Anvil file support (blocks and biomes) (#145)

Adds the `valence_anvil` crate for loading anvil worlds. It can only read blocks and biomes currently. Support for saving data is to be added later.

Co-authored-by: Ryan <ryanj00a@gmail.com>
This commit is contained in:
Terminator 2022-12-26 15:55:52 +01:00 committed by GitHub
parent 8a7782e16f
commit 6de5de57a5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 8078 additions and 119 deletions

1
.gitignore vendored
View file

@ -13,3 +13,4 @@ Cargo.lock
flamegraph.svg
perf.data
perf.data.old
/valence_anvil/.asset_cache/

View file

@ -41,7 +41,7 @@ valence_protocol = { version = "0.1.0", path = "valence_protocol", features = ["
vek = "0.15.8"
[dependencies.tokio]
version = "1.21.1"
version = "1.21.2"
features = ["macros", "rt-multi-thread", "net", "io-util", "sync", "time"]
[dependencies.reqwest]
@ -70,6 +70,7 @@ num = "0.4.0"
members = [
"valence_derive",
"valence_nbt",
"valence_anvil",
"valence_protocol",
"valence_spatial_index",
"packet_inspector",

View file

@ -171,7 +171,7 @@ impl Config for Game {
let mut in_terrain = false;
let mut depth = 0;
for y in (0..chunk.height()).rev() {
for y in (0..chunk.section_count() * 16).rev() {
let b = terrain_column(
self,
block_x,
@ -184,7 +184,7 @@ impl Config for Game {
}
// Add grass
for y in (0..chunk.height()).rev() {
for y in (0..chunk.section_count() * 16).rev() {
if chunk.block_state(x, y, z).is_air()
&& chunk.block_state(x, y - 1, z) == BlockState::GRASS_BLOCK
{

6994
extracted/biomes.json Normal file

File diff suppressed because it is too large Load diff

View file

@ -38,6 +38,7 @@ public class Main implements ModInitializer {
LOGGER.info("Starting extractors...");
var extractors = new Extractor[]{
new Biomes(),
new Blocks(),
new Enchants(),
new Entities(),

View file

@ -0,0 +1,128 @@
package rs.valence.extractor.extractors;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import net.minecraft.entity.SpawnGroup;
import net.minecraft.util.Identifier;
import net.minecraft.util.collection.Weighted;
import net.minecraft.util.registry.BuiltinRegistries;
import net.minecraft.util.registry.Registry;
import net.minecraft.world.biome.BiomeParticleConfig;
import rs.valence.extractor.Main;
import java.lang.reflect.Field;
public class Biomes implements Main.Extractor {
public Biomes() {
}
@Override
public String fileName() {
return "biomes.json";
}
@Override
public JsonElement extract() {
// The biome particle probability field is private.
// We have to resort to reflection, unfortunately.
Field particleConfigProbabilityField;
try {
particleConfigProbabilityField = BiomeParticleConfig.class.getDeclaredField("probability");
particleConfigProbabilityField.setAccessible(true);
} catch (Exception e) {
throw new RuntimeException(e);
}
var biomesJson = new JsonArray();
for (var biome : BuiltinRegistries.BIOME) {
var biomeIdent = BuiltinRegistries.BIOME.getId(biome);
assert biomeIdent != null;
var biomeJson = new JsonObject();
biomeJson.addProperty("precipitation", biome.getPrecipitation().getName());
biomeJson.addProperty("temperature", biome.getTemperature());
biomeJson.addProperty("downfall", biome.getDownfall());
var effectJson = new JsonObject();
var biomeEffects = biome.getEffects();
effectJson.addProperty("sky_color", biomeEffects.getSkyColor());
effectJson.addProperty("water_fog_color", biomeEffects.getWaterFogColor());
effectJson.addProperty("fog_color", biomeEffects.getFogColor());
effectJson.addProperty("water_color", biomeEffects.getWaterColor());
biomeEffects.getFoliageColor().ifPresent(color -> effectJson.addProperty("foliage_color", color));
biomeEffects.getGrassColor().ifPresent(color -> effectJson.addProperty("grass_color", color));
effectJson.addProperty("grass_color_modifier", biomeEffects.getGrassColorModifier().getName());
biomeEffects.getMusic().ifPresent(biome_music -> {
var music = new JsonObject();
music.addProperty("replace_current_music", biome_music.shouldReplaceCurrentMusic());
music.addProperty("sound", biome_music.getSound().getId().getPath());
music.addProperty("max_delay", biome_music.getMaxDelay());
music.addProperty("min_delay", biome_music.getMinDelay());
effectJson.add("music", music);
});
biomeEffects.getLoopSound().ifPresent(soundEvent -> effectJson.addProperty("ambient_sound", soundEvent.getId().getPath()));
biomeEffects.getAdditionsSound().ifPresent(soundEvent -> {
var sound = new JsonObject();
sound.addProperty("sound", soundEvent.getSound().getId().getPath());
sound.addProperty("tick_chance", soundEvent.getChance());
effectJson.add("additions_sound", sound);
});
biomeEffects.getMoodSound().ifPresent(soundEvent -> {
var sound = new JsonObject();
sound.addProperty("sound", soundEvent.getSound().getId().getPath());
sound.addProperty("tick_delay", soundEvent.getCultivationTicks());
sound.addProperty("offset", soundEvent.getExtraDistance());
sound.addProperty("block_search_extent", soundEvent.getSpawnRange());
effectJson.add("mood_sound", sound);
});
biome.getParticleConfig().ifPresent(biomeParticleConfig -> {
try {
var particleConfig = new JsonObject();
// We must first convert it into an identifier, because asString() returns a resource identifier as string.
Identifier id = new Identifier(biomeParticleConfig.getParticle().asString());
particleConfig.addProperty("kind", id.getPath());
particleConfig.addProperty("probability" ,particleConfigProbabilityField.getFloat(biomeParticleConfig));
biomeJson.add("particle", particleConfig);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
});
var spawnSettingsJson = new JsonObject();
var spawnSettings = biome.getSpawnSettings();
spawnSettingsJson.addProperty("probability", spawnSettings.getCreatureSpawnProbability());
var spawnGroupsJson = new JsonObject();
for (var spawnGroup : SpawnGroup.values()) {
var spawnGroupJson = new JsonArray();
for (var entry : spawnSettings.getSpawnEntries(spawnGroup).getEntries()) {
var groupEntryJson = new JsonObject();
groupEntryJson.addProperty("name", Registry.ENTITY_TYPE.getId(entry.type).getPath());
groupEntryJson.addProperty("min_group_size", entry.minGroupSize);
groupEntryJson.addProperty("max_group_size", entry.maxGroupSize);
groupEntryJson.addProperty("weight", ((Weighted) entry).getWeight().getValue());
spawnGroupJson.add(groupEntryJson);
}
spawnGroupsJson.add(spawnGroup.getName(), spawnGroupJson);
}
spawnSettingsJson.add("groups", spawnGroupsJson);
biomeJson.add("effects", effectJson);
biomeJson.add("spawn_settings", spawnSettingsJson);
var entryJson = new JsonObject();
entryJson.addProperty("name", biomeIdent.getPath());
entryJson.addProperty("id", BuiltinRegistries.BIOME.getRawId(biome));
entryJson.add("element", biomeJson);
biomesJson.add(entryJson);
}
return biomesJson;
}
}

View file

@ -110,7 +110,8 @@ impl<C: Config> Chunks<C> {
}
/// Returns the height of all loaded chunks in the world. This returns the
/// same value as [`Chunk::height`] for all loaded chunks.
/// same value as [`Chunk::section_count`] multiplied by 16 for all loaded
/// chunks.
pub fn height(&self) -> usize {
self.dimension_height as usize
}
@ -204,7 +205,7 @@ impl<C: Config> Chunks<C> {
let y = pos.y.checked_sub(self.dimension_min_y)?.try_into().ok()?;
if y < chunk.height() {
if y < chunk.section_count() * 16 {
Some(chunk.block_state(
pos.x.rem_euclid(16) as usize,
y,
@ -437,42 +438,46 @@ impl<C: Config, P: Into<ChunkPos>> IndexMut<P> for Chunks<C> {
/// Operations that can be performed on a chunk. [`LoadedChunk`] and
/// [`UnloadedChunk`] implement this trait.
pub trait Chunk {
/// Returns the height of this chunk in blocks. The result is always a
/// multiple of 16.
fn height(&self) -> usize;
/// Returns the number of sections in this chunk. To get the height of the
/// chunk in meters, multiply the result by 16.
fn section_count(&self) -> usize;
/// Gets the block state at the provided offsets in the chunk.
///
/// **Note**: The arguments to this function are offsets from the minimum
/// corner of the chunk in _chunk space_ rather than _world space_. You
/// might be looking for [`Chunks::block_state`] instead.
/// corner of the chunk in _chunk space_ rather than _world space_.
///
/// # Panics
///
/// Panics if the offsets are outside the bounds of the chunk.
/// Panics if the offsets are outside the bounds of the chunk. `x` and `z`
/// must be less than 16 while `y` must be less than `section_count() * 16`.
fn block_state(&self, x: usize, y: usize, z: usize) -> BlockState;
/// Sets the block state at the provided offsets in the chunk. The previous
/// block state at the position is returned.
///
/// **Note**: The arguments to this function are offsets from the minimum
/// corner of the chunk in _chunk space_ rather than _world space_. You
/// might be looking for [`Chunks::set_block_state`] instead.
/// corner of the chunk in _chunk space_ rather than _world space_.
///
/// # Panics
///
/// Panics if the offsets are outside the bounds of the chunk.
/// Panics if the offsets are outside the bounds of the chunk. `x` and `z`
/// must be less than 16 while `y` must be less than `section_count() * 16`.
fn set_block_state(&mut self, x: usize, y: usize, z: usize, block: BlockState) -> BlockState;
/// Sets every block state in this chunk to the given block state.
/// Sets every block in a section to the given block state.
///
/// This is semantically equivalent to calling [`set_block_state`] on every
/// block in the chunk followed by a call to [`optimize`] at the end.
/// However, this function may be implemented more efficiently.
/// This is semantically equivalent to setting every block in the section
/// with [`set_block_state`]. However, this function may be implemented more
/// efficiently.
///
/// # Panics
///
/// Panics if `sect_y` is out of bounds. `sect_y` must be less than the
/// section count.
///
/// [`set_block_state`]: Self::set_block_state
/// [`optimize`]: Self::optimize
fn fill_block_states(&mut self, block: BlockState);
fn fill_block_states(&mut self, sect_y: usize, block: BlockState);
/// Gets the biome at the provided biome offsets in the chunk.
///
@ -481,7 +486,8 @@ pub trait Chunk {
///
/// # Panics
///
/// Panics if the offsets are outside the bounds of the chunk.
/// Panics if the offsets are outside the bounds of the chunk. `x` and `z`
/// must be less than 4 while `y` must be less than `section_count() * 4`.
fn biome(&self, x: usize, y: usize, z: usize) -> BiomeId;
/// Sets the biome at the provided offsets in the chunk. The previous
@ -492,18 +498,23 @@ pub trait Chunk {
///
/// # Panics
///
/// Panics if the offsets are outside the bounds of the chunk.
/// Panics if the offsets are outside the bounds of the chunk. `x` and `z`
/// must be less than 4 while `y` must be less than `section_count() * 4`.
fn set_biome(&mut self, x: usize, y: usize, z: usize, biome: BiomeId) -> BiomeId;
/// Sets every biome in this chunk to the given biome.
/// Sets every biome in a section to the given block state.
///
/// This is semantically equivalent to calling [`set_biome`] on every
/// biome in the chunk followed by a call to [`optimize`] at the end.
/// However, this function may be implemented more efficiently.
/// This is semantically equivalent to setting every biome in the section
/// with [`set_biome`]. However, this function may be implemented more
/// efficiently.
///
/// # Panics
///
/// Panics if `sect_y` is out of bounds. `sect_y` must be less than the
/// section count.
///
/// [`set_biome`]: Self::set_biome
/// [`optimize`]: Self::optimize
fn fill_biomes(&mut self, biome: BiomeId);
fn fill_biomes(&mut self, sect_y: usize, biome: BiomeId);
/// Optimizes this chunk to use the minimum amount of memory possible. It
/// should have no observable effect on the contents of the chunk.
@ -521,44 +532,30 @@ pub struct UnloadedChunk {
impl UnloadedChunk {
/// Constructs a new unloaded chunk containing only [`BlockState::AIR`] and
/// [`BiomeId::default()`] with the given height in blocks.
///
/// # Panics
///
/// Panics if the value of `height` does not meet the following criteria:
/// `height % 16 == 0 && height <= 4064`.
pub fn new(height: usize) -> Self {
/// [`BiomeId::default()`] with the given number of sections. A section is a
/// 16x16x16 meter volume.
pub fn new(section_count: usize) -> Self {
let mut chunk = Self { sections: vec![] };
chunk.resize(height);
chunk.resize(section_count);
chunk
}
/// Changes the height of the chunk to `new_height`. This is a potentially
/// expensive operation that may involve copying.
/// Changes the section count of the chunk to `new_section_count`. This is a
/// potentially expensive operation that may involve copying.
///
/// The chunk is extended and truncated from the top. New blocks are always
/// [`BlockState::AIR`] and biomes are [`BiomeId::default()`].
///
/// # Panics
///
/// The constraints on `new_height` are the same as [`UnloadedChunk::new`].
pub fn resize(&mut self, new_height: usize) {
assert!(
new_height % 16 == 0 && new_height <= 4064,
"invalid chunk height of {new_height}"
);
pub fn resize(&mut self, new_section_count: usize) {
let old_section_count = self.section_count();
let old_height = self.sections.len() * 16;
if new_height > old_height {
let additional = (new_height - old_height) / 16;
self.sections.reserve_exact(additional);
if new_section_count > old_section_count {
self.sections
.resize_with(new_height / 16, ChunkSection::default);
.reserve_exact(new_section_count - old_section_count);
self.sections
.resize_with(new_section_count, ChunkSection::default);
debug_assert_eq!(self.sections.capacity(), self.sections.len());
} else if new_height < old_height {
self.sections.truncate(new_height / 16);
} else {
self.sections.truncate(new_section_count);
}
}
}
@ -571,13 +568,13 @@ impl Default for UnloadedChunk {
}
impl Chunk for UnloadedChunk {
fn height(&self) -> usize {
self.sections.len() * 16
fn section_count(&self) -> usize {
self.sections.len()
}
fn block_state(&self, x: usize, y: usize, z: usize) -> BlockState {
assert!(
x < 16 && y < self.height() && z < 16,
x < 16 && y < self.section_count() * 16 && z < 16,
"chunk block offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -588,7 +585,7 @@ impl Chunk for UnloadedChunk {
fn set_block_state(&mut self, x: usize, y: usize, z: usize, block: BlockState) -> BlockState {
assert!(
x < 16 && y < self.height() && z < 16,
x < 16 && y < self.section_count() * 16 && z < 16,
"chunk block offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -605,23 +602,26 @@ impl Chunk for UnloadedChunk {
old_block
}
fn fill_block_states(&mut self, block: BlockState) {
for sect in self.sections.iter_mut() {
// TODO: adjust motion blocking here.
fn fill_block_states(&mut self, sect_y: usize, block: BlockState) {
let Some(sect) = self.sections.get_mut(sect_y) else {
panic!(
"section index {sect_y} out of bounds for chunk with {} sections",
self.section_count()
)
};
if block.is_air() {
sect.non_air_count = 0;
} else {
sect.non_air_count = SECTION_BLOCK_COUNT as u16;
}
sect.block_states.fill(block);
if block.is_air() {
sect.non_air_count = 0;
} else {
sect.non_air_count = SECTION_BLOCK_COUNT as u16;
}
sect.block_states.fill(block);
}
fn biome(&self, x: usize, y: usize, z: usize) -> BiomeId {
assert!(
x < 4 && y < self.height() / 4 && z < 4,
x < 4 && y < self.section_count() * 4 && z < 4,
"chunk biome offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -630,7 +630,7 @@ impl Chunk for UnloadedChunk {
fn set_biome(&mut self, x: usize, y: usize, z: usize, biome: BiomeId) -> BiomeId {
assert!(
x < 4 && y < self.height() / 4 && z < 4,
x < 4 && y < self.section_count() * 4 && z < 4,
"chunk biome offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -639,10 +639,15 @@ impl Chunk for UnloadedChunk {
.set(x + z * 4 + y % 4 * 4 * 4, biome)
}
fn fill_biomes(&mut self, biome: BiomeId) {
for sect in self.sections.iter_mut() {
sect.biomes.fill(biome);
}
fn fill_biomes(&mut self, sect_y: usize, biome: BiomeId) {
let Some(sect) = self.sections.get_mut(sect_y) else {
panic!(
"section index {sect_y} out of bounds for chunk with {} sections",
self.section_count()
)
};
sect.biomes.fill(biome);
}
fn optimize(&mut self) {
@ -727,7 +732,7 @@ impl ChunkSection {
impl<C: Config> LoadedChunk<C> {
fn new(mut chunk: UnloadedChunk, dimension_section_count: usize, state: C::ChunkState) -> Self {
chunk.resize(dimension_section_count * 16);
chunk.resize(dimension_section_count);
Self {
state,
@ -870,13 +875,13 @@ impl<C: Config> LoadedChunk<C> {
}
impl<C: Config> Chunk for LoadedChunk<C> {
fn height(&self) -> usize {
self.sections.len() * 16
fn section_count(&self) -> usize {
self.sections.len()
}
fn block_state(&self, x: usize, y: usize, z: usize) -> BlockState {
assert!(
x < 16 && y < self.height() && z < 16,
x < 16 && y < self.section_count() * 16 && z < 16,
"chunk block offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -887,7 +892,7 @@ impl<C: Config> Chunk for LoadedChunk<C> {
fn set_block_state(&mut self, x: usize, y: usize, z: usize, block: BlockState) -> BlockState {
assert!(
x < 16 && y < self.height() && z < 16,
x < 16 && y < self.section_count() * 16 && z < 16,
"chunk block offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -909,37 +914,40 @@ impl<C: Config> Chunk for LoadedChunk<C> {
old_block
}
fn fill_block_states(&mut self, block: BlockState) {
for sect in self.sections.iter_mut() {
// Mark the appropriate blocks as modified.
// No need to iterate through all the blocks if we know they're all the same.
if let PalettedContainer::Single(single) = &sect.block_states {
if block != *single {
sect.mark_all_blocks_as_modified();
}
} else {
for i in 0..SECTION_BLOCK_COUNT {
if block != sect.block_states.get(i) {
sect.mark_block_as_modified(i);
}
fn fill_block_states(&mut self, sect_y: usize, block: BlockState) {
let Some(sect) = self.sections.get_mut(sect_y) else {
panic!(
"section index {sect_y} out of bounds for chunk with {} sections",
self.section_count()
)
};
// Mark the appropriate blocks as modified.
// No need to iterate through all the blocks if we know they're all the same.
if let PalettedContainer::Single(single) = &sect.block_states {
if block != *single {
sect.mark_all_blocks_as_modified();
}
} else {
for i in 0..SECTION_BLOCK_COUNT {
if block != sect.block_states.get(i) {
sect.mark_block_as_modified(i);
}
}
// TODO: adjust motion blocking here.
if block.is_air() {
sect.non_air_count = 0;
} else {
sect.non_air_count = SECTION_BLOCK_COUNT as u16;
}
sect.block_states.fill(block);
}
if block.is_air() {
sect.non_air_count = 0;
} else {
sect.non_air_count = SECTION_BLOCK_COUNT as u16;
}
sect.block_states.fill(block);
}
fn biome(&self, x: usize, y: usize, z: usize) -> BiomeId {
assert!(
x < 4 && y < self.height() / 4 && z < 4,
x < 4 && y < self.section_count() * 4 && z < 4,
"chunk biome offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -948,7 +956,7 @@ impl<C: Config> Chunk for LoadedChunk<C> {
fn set_biome(&mut self, x: usize, y: usize, z: usize, biome: BiomeId) -> BiomeId {
assert!(
x < 4 && y < self.height() / 4 && z < 4,
x < 4 && y < self.section_count() * 4 && z < 4,
"chunk biome offsets of ({x}, {y}, {z}) are out of bounds"
);
@ -963,10 +971,15 @@ impl<C: Config> Chunk for LoadedChunk<C> {
old_biome
}
fn fill_biomes(&mut self, biome: BiomeId) {
for sect in self.sections.iter_mut() {
sect.biomes.fill(biome);
}
fn fill_biomes(&mut self, sect_y: usize, biome: BiomeId) {
let Some(sect) = self.sections.get_mut(sect_y) else {
panic!(
"section index {sect_y} out of bounds for chunk with {} sections",
self.section_count()
)
};
sect.biomes.fill(biome);
// TODO: this is set to true unconditionally, but it doesn't have to be.
self.any_biomes_modified = true;
@ -983,13 +996,6 @@ impl<C: Config> Chunk for LoadedChunk<C> {
}
}
/*
fn is_motion_blocking(b: BlockState) -> bool {
// TODO: use is_solid || is_fluid ?
!b.is_air()
}
*/
fn compact_u64s_len(vals_count: usize, bits_per_val: usize) -> usize {
let vals_per_u64 = 64 / bits_per_val;
num::Integer::div_ceil(&vals_count, &vals_per_u64)
@ -1080,8 +1086,14 @@ mod tests {
check_invariants(&loaded.sections);
check_invariants(&unloaded.sections);
loaded.fill_block_states(rand_block_state(&mut rng));
unloaded.fill_block_states(rand_block_state(&mut rng));
loaded.fill_block_states(
rng.gen_range(0..loaded.section_count()),
rand_block_state(&mut rng),
);
unloaded.fill_block_states(
rng.gen_range(0..loaded.section_count()),
rand_block_state(&mut rng),
);
check_invariants(&loaded.sections);
check_invariants(&unloaded.sections);

38
valence_anvil/Cargo.toml Normal file
View file

@ -0,0 +1,38 @@
[package]
name = "valence_anvil"
description = "A library for Minecraft's Anvil world format."
documentation = "https://docs.rs/valence_anvil/"
repository = "https://github.com/valence_anvil/valence/tree/main/valence_anvil"
readme = "README.md"
license = "MIT"
keywords = ["anvil", "minecraft", "deserialization"]
version = "0.1.0"
authors = ["Ryan Johnson <ryanj00a@gmail.com>", "TerminatorNL <TerminatorNL@users.noreply.github.com>"]
edition = "2021"
[dependencies]
byteorder = "1.4.3"
flate2 = "1.0.25"
thiserror = "1.0.37"
num-integer = "0.1.45" # TODO: remove when div_ceil is stabilized.
valence = { version = "0.1.0", path = "..", optional = true }
valence_nbt = { version = "0.5.0", path = "../valence_nbt" }
[dev-dependencies]
anyhow = "1.0.68"
criterion = "0.4.0"
fs_extra = "1.2.0"
tempfile = "3.3.0"
valence = { version = "0.1.0", path = ".." }
valence_anvil = { version = "0.1.0", path = ".", features = ["valence"] }
zip = "0.6.3"
[dev-dependencies.reqwest]
version = "0.11.12"
default-features = false
# Avoid OpenSSL dependency on Linux.
features = ["rustls-tls", "blocking", "stream"]
[[bench]]
name = "world_parsing"
harness = false

View file

@ -0,0 +1,137 @@
use std::fs::create_dir_all;
use std::path::{Path, PathBuf};
use anyhow::{ensure, Context};
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use fs_extra::dir::CopyOptions;
use reqwest::IntoUrl;
use valence::chunk::UnloadedChunk;
use valence_anvil::AnvilWorld;
use zip::ZipArchive;
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
fn criterion_benchmark(c: &mut Criterion) {
let world_dir = get_world_asset(
"https://github.com/valence-rs/valence-test-data/archive/refs/heads/asset/sp_world_1.19.2.zip",
"1.19.2 benchmark world",
true
).expect("failed to get world asset");
let mut world = AnvilWorld::new(world_dir);
c.bench_function("Load square 10x10", |b| {
b.iter(|| {
let world = black_box(&mut world);
for z in -5..5 {
for x in -5..5 {
let nbt = world
.read_chunk(x, z)
.expect("failed to read chunk")
.expect("missing chunk at position")
.data;
let mut chunk = UnloadedChunk::new(24);
valence_anvil::to_valence(&nbt, &mut chunk, 4, |_| Default::default()).unwrap();
black_box(chunk);
}
}
});
});
}
/// Loads the asset. If the asset is already present on the system due to a
/// prior run, the cached asset is used instead. If the asset is not
/// cached yet, this function downloads the asset using the current thread.
/// This will block until the download is complete.
///
/// returns: `PathBuf` The reference to the asset on the file system
fn get_world_asset(
url: impl IntoUrl,
dest_path: impl AsRef<Path>,
remove_top_level_dir: bool,
) -> anyhow::Result<PathBuf> {
let url = url.into_url()?;
let dest_path = dest_path.as_ref();
let asset_cache_dir = Path::new(".asset_cache");
create_dir_all(asset_cache_dir).context("unable to create `.asset_cache` directory")?;
let final_path = asset_cache_dir.join(dest_path);
if final_path.exists() {
return Ok(final_path);
}
let mut response = reqwest::blocking::get(url.clone())?;
let cache_download_directory = asset_cache_dir.join("downloads");
create_dir_all(&cache_download_directory)
.context("unable to create `.asset_cache/downloads` directory")?;
let mut downloaded_zip_file =
tempfile::tempfile_in(&cache_download_directory).context("Could not create temp file")?;
println!("Downloading {dest_path:?} from {url}");
response
.copy_to(&mut downloaded_zip_file)
.context("could not write web contents to the temporary file")?;
let mut zip_archive = ZipArchive::new(downloaded_zip_file)
.context("unable to create zip archive from downloaded content")?;
if !remove_top_level_dir {
zip_archive
.extract(&final_path)
.context("unable to unzip downloaded contents")?;
return Ok(final_path);
}
let temp_dir = tempfile::tempdir_in(&cache_download_directory)
.context("unable to create temporary directory in `.asset_cache`")?;
zip_archive
.extract(&temp_dir)
.context("unable to unzip downloaded contents")?;
let mut entries = temp_dir.path().read_dir()?;
let top_level_dir = entries
.next()
.context("the downloaded zip file was empty")??;
ensure!(
entries.next().is_none(),
"found more than one entry in the top level directory of the Zip file"
);
ensure!(
top_level_dir.path().is_dir(),
"the only content in the zip archive is a file"
);
create_dir_all(&final_path).context("could not create a directory inside the asset cache")?;
let dir_entries = top_level_dir
.path()
.read_dir()?
.collect::<Result<Vec<_>, _>>()?;
let items_to_move: Vec<_> = dir_entries.into_iter().map(|d| d.path()).collect();
fs_extra::move_items(&items_to_move, &final_path, &CopyOptions::new())?;
// We keep the temporary directory around until we're done moving files out
// of it.
drop(temp_dir);
Ok(final_path)
}

View file

@ -0,0 +1,201 @@
//! # IMPORTANT
//!
//! Run this example with one argument containing the path of the the following
//! to the world directory you wish to load. Inside this directory you can
//! commonly see `advancements`, `DIM1`, `DIM-1` and most importantly `region`
//! subdirectories. Only the `region` directory is accessed.
extern crate valence;
use std::env;
use std::net::SocketAddr;
use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering};
use valence::prelude::*;
use valence_anvil::AnvilWorld;
pub fn main() -> ShutdownResult {
let Some(world_dir) = env::args().nth(1) else {
return Err("please add the world directory as program argument.".into())
};
let world_dir = PathBuf::from(world_dir);
if !world_dir.exists() || !world_dir.is_dir() {
return Err("world argument must be a directory that exists".into());
}
if !world_dir.join("region").exists() {
return Err("could not find the \"region\" directory in the given world directory".into());
}
valence::start_server(
Game {
world_dir,
player_count: AtomicUsize::new(0),
},
None,
)
}
#[derive(Debug, Default)]
struct ClientData {
id: EntityId,
//block: valence::block::BlockKind
}
struct Game {
world_dir: PathBuf,
player_count: AtomicUsize,
}
const MAX_PLAYERS: usize = 10;
#[async_trait]
impl Config for Game {
type ServerState = Option<PlayerListId>;
type ClientState = ClientData;
type EntityState = ();
type WorldState = AnvilWorld;
/// If the chunk should stay loaded at the end of the tick.
type ChunkState = bool;
type PlayerListState = ();
type InventoryState = ();
async fn server_list_ping(
&self,
_server: &SharedServer<Self>,
_remote_addr: SocketAddr,
_protocol_version: i32,
) -> ServerListPing {
ServerListPing::Respond {
online_players: self.player_count.load(Ordering::SeqCst) as i32,
max_players: MAX_PLAYERS as i32,
player_sample: Default::default(),
description: "Hello Valence!".color(Color::AQUA),
favicon_png: Some(
include_bytes!("../../assets/logo-64x64.png")
.as_slice()
.into(),
),
}
}
fn init(&self, server: &mut Server<Self>) {
for (id, _) in server.shared.dimensions() {
server.worlds.insert(id, AnvilWorld::new(&self.world_dir));
}
server.state = Some(server.player_lists.insert(()).0);
}
fn update(&self, server: &mut Server<Self>) {
let (world_id, world) = server.worlds.iter_mut().next().unwrap();
server.clients.retain(|_, client| {
if client.created_this_tick() {
if self
.player_count
.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |count| {
(count < MAX_PLAYERS).then_some(count + 1)
})
.is_err()
{
client.disconnect("The server is full!".color(Color::RED));
return false;
}
match server
.entities
.insert_with_uuid(EntityKind::Player, client.uuid(), ())
{
Some((id, _)) => client.state.id = id,
None => {
client.disconnect("Conflicting UUID");
return false;
}
}
client.respawn(world_id);
client.set_flat(true);
client.set_game_mode(GameMode::Spectator);
client.teleport([0.0, 125.0, 0.0], 0.0, 0.0);
client.set_player_list(server.state.clone());
if let Some(id) = &server.state {
server.player_lists.get_mut(id).insert(
client.uuid(),
client.username(),
client.textures().cloned(),
client.game_mode(),
0,
None,
);
}
client.send_message("Welcome to the java chunk parsing example!");
client.send_message(
"Chunks with a single lava source block indicates that the chunk is not \
(fully) generated."
.italic(),
);
}
if client.is_disconnected() {
self.player_count.fetch_sub(1, Ordering::SeqCst);
if let Some(id) = &server.state {
server.player_lists.get_mut(id).remove(client.uuid());
}
server.entities.delete(client.id);
return false;
}
if let Some(entity) = server.entities.get_mut(client.state.id) {
while let Some(event) = client.next_event() {
event.handle_default(client, entity);
}
}
let dist = client.view_distance();
let p = client.position();
for pos in ChunkPos::at(p.x, p.z).in_view(dist) {
if let Some(existing) = world.chunks.get_mut(pos) {
existing.state = true;
} else {
match world.state.read_chunk(pos.x, pos.z) {
Ok(Some(anvil_chunk)) => {
let mut chunk = UnloadedChunk::new(24);
if let Err(e) =
valence_anvil::to_valence(&anvil_chunk.data, &mut chunk, 4, |_| {
BiomeId::default()
})
{
eprintln!("Failed to convert chunk at ({}, {}): {e}", pos.x, pos.z);
}
world.chunks.insert(pos, chunk, true);
}
Ok(None) => {
// No chunk at this position.
world.chunks.insert(pos, UnloadedChunk::default(), true);
}
Err(e) => {
eprintln!("Failed to read chunk at ({}, {}): {e}", pos.x, pos.z)
}
}
}
}
true
});
for (_, chunk) in world.chunks.iter_mut() {
if !chunk.state {
chunk.set_deleted(true)
}
}
}
}

171
valence_anvil/src/lib.rs Normal file
View file

@ -0,0 +1,171 @@
use std::collections::btree_map::Entry;
use std::collections::BTreeMap;
use std::fs::File;
use std::io;
use std::io::{ErrorKind, Read, Seek, SeekFrom};
use std::path::PathBuf;
use byteorder::{BigEndian, ReadBytesExt};
use flate2::bufread::{GzDecoder, ZlibDecoder};
use thiserror::Error;
#[cfg(feature = "valence")]
pub use to_valence::*;
use valence_nbt::Compound;
#[cfg(feature = "valence")]
mod to_valence;
#[derive(Debug)]
pub struct AnvilWorld {
/// Path to the "region" subdirectory in the world root.
region_root: PathBuf,
/// Maps region (x, z) positions to region files.
regions: BTreeMap<(i32, i32), Region>,
}
#[derive(Clone, PartialEq, Debug)]
pub struct AnvilChunk {
/// This chunk's NBT data.
pub data: Compound,
/// The time this chunk was last modified measured in seconds since the
/// epoch.
pub timestamp: u32,
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ReadChunkError {
#[error(transparent)]
Io(#[from] io::Error),
#[error(transparent)]
Nbt(#[from] valence_nbt::Error),
#[error("invalid chunk sector offset")]
BadSectorOffset,
#[error("invalid chunk size")]
BadChunkSize,
#[error("unknown compression scheme number of {0}")]
UnknownCompressionScheme(u8),
#[error("not all chunk NBT data was read")]
IncompleteNbtRead,
}
#[derive(Debug)]
struct Region {
file: File,
/// The first 8 KiB in the file.
header: [u8; SECTOR_SIZE * 2],
}
const SECTOR_SIZE: usize = 4096;
impl AnvilWorld {
pub fn new(world_root: impl Into<PathBuf>) -> Self {
let mut region_root = world_root.into();
region_root.push("region");
Self {
region_root,
regions: BTreeMap::new(),
}
}
/// Reads a chunk from the file system with the given chunk coordinates. If
/// no chunk exists at the position, then `None` is returned.
pub fn read_chunk(
&mut self,
chunk_x: i32,
chunk_z: i32,
) -> Result<Option<AnvilChunk>, ReadChunkError> {
let region_x = chunk_x.div_euclid(32);
let region_z = chunk_z.div_euclid(32);
let region = match self.regions.entry((region_x, region_z)) {
Entry::Vacant(ve) => {
// Load the region file if it exists. Otherwise, the chunk is considered absent.
let path = self
.region_root
.join(format!("r.{region_x}.{region_z}.mca"));
let mut file = match File::options().read(true).write(true).open(path) {
Ok(file) => file,
Err(e) if e.kind() == ErrorKind::NotFound => return Ok(None),
Err(e) => return Err(e.into()),
};
let mut header = [0; SECTOR_SIZE * 2];
file.read_exact(&mut header)?;
ve.insert(Region { file, header })
}
Entry::Occupied(oe) => oe.into_mut(),
};
let chunk_idx = (chunk_x.rem_euclid(32) + chunk_z.rem_euclid(32) * 32) as usize;
let location_bytes = (&region.header[chunk_idx * 4..]).read_u32::<BigEndian>()?;
let timestamp = (&region.header[chunk_idx * 4 + SECTOR_SIZE..]).read_u32::<BigEndian>()?;
if location_bytes == 0 {
// No chunk exists at this position.
return Ok(None);
}
let sector_offset = (location_bytes >> 8) as u64;
let sector_count = (location_bytes & 0xff) as usize;
if sector_offset < 2 {
// If the sector offset was <2, then the chunk data would be inside the region
// header. That doesn't make any sense.
return Err(ReadChunkError::BadSectorOffset);
}
// Seek to the beginning of the chunk's data.
region
.file
.seek(SeekFrom::Start(sector_offset * SECTOR_SIZE as u64))?;
let exact_chunk_size = region.file.read_u32::<BigEndian>()? as usize;
if exact_chunk_size > sector_count * SECTOR_SIZE {
// Sector size of this chunk must always be >= the exact size.
return Err(ReadChunkError::BadChunkSize);
}
let mut data_buf = vec![0; exact_chunk_size].into_boxed_slice();
region.file.read_exact(&mut data_buf)?;
let mut r = data_buf.as_ref();
let mut decompress_buf = vec![];
// What compression does the chunk use?
let mut nbt_slice = match r.read_u8()? {
// GZip
1 => {
let mut z = GzDecoder::new(r);
z.read_to_end(&mut decompress_buf)?;
decompress_buf.as_slice()
}
// Zlib
2 => {
let mut z = ZlibDecoder::new(r);
z.read_to_end(&mut decompress_buf)?;
decompress_buf.as_slice()
}
// Uncompressed
3 => r,
// Unknown
b => return Err(ReadChunkError::UnknownCompressionScheme(b)),
};
let (data, _) = valence_nbt::from_binary_slice(&mut nbt_slice)?;
if !nbt_slice.is_empty() {
return Err(ReadChunkError::IncompleteNbtRead);
}
Ok(Some(AnvilChunk { data, timestamp }))
}
}

View file

@ -0,0 +1,275 @@
use num_integer::div_ceil;
use thiserror::Error;
use valence::biome::BiomeId;
use valence::chunk::Chunk;
use valence::protocol::block::{BlockKind, PropName, PropValue};
use valence::protocol::Ident;
use valence_nbt::{Compound, List, Value};
#[derive(Clone, Debug, Error)]
#[non_exhaustive]
pub enum ToValenceError {
#[error("missing chunk sections")]
MissingSections,
#[error("missing chunk section Y")]
MissingSectionY,
#[error("missing block states")]
MissingBlockStates,
#[error("missing block palette")]
MissingBlockPalette,
#[error("invalid block palette length")]
BadBlockPaletteLen,
#[error("missing block name in palette")]
MissingBlockName,
#[error("unknown block name of \"{0}\"")]
UnknownBlockName(String),
#[error("unknown property name of \"{0}\"")]
UnknownPropName(String),
#[error("property value of block is not a string")]
BadPropValueType,
#[error("unknown property value of \"{0}\"")]
UnknownPropValue(String),
#[error("missing packed block state data in section")]
MissingBlockStateData,
#[error("unexpected number of longs in block state data")]
BadBlockLongCount,
#[error("invalid block palette index")]
BadBlockPaletteIndex,
#[error("missing biomes")]
MissingBiomes,
#[error("missing biome palette")]
MissingBiomePalette,
#[error("invalid biome palette length")]
BadBiomePaletteLen,
#[error("biome name is not a valid resource identifier")]
BadBiomeName,
#[error("missing biome name")]
MissingBiomeName,
#[error("missing packed biome data in section")]
MissingBiomeData,
#[error("unexpected number of longs in biome data")]
BadBiomeLongCount,
#[error("invalid biome palette index")]
BadBiomePaletteIndex,
}
/// Reads an Anvil chunk in NBT form and writes its data to a Valence [`Chunk`].
/// An error is returned if the NBT data does not match the expected structure
/// for an Anvil chunk.
///
/// # Arguments
///
/// - `nbt`: The Anvil chunk to read from. This is usually the value returned by
/// [`read_chunk`].
/// - `chunk`: The Valence chunk to write to.
/// - `sect_offset`: A constant to add to all sector Y positions in `nbt`. After
/// applying the offset, only the sectors in the range
/// `0..chunk.sector_count()` are written.
/// - `map_biome`: A function to map biome resource identifiers in the NBT data
/// to Valence [`BiomeId`]s.
///
/// [`read_chunk`]: crate::AnvilWorld::read_chunk
pub fn to_valence<C, F>(
nbt: &Compound,
chunk: &mut C,
sect_offset: i32,
mut map_biome: F,
) -> Result<(), ToValenceError>
where
C: Chunk,
F: FnMut(Ident<&str>) -> BiomeId,
{
let Some(Value::List(List::Compound(sections))) = nbt.get("sections") else {
return Err(ToValenceError::MissingSections)
};
let mut converted_block_palette = vec![];
let mut converted_biome_palette = vec![];
for section in sections {
let Some(Value::Byte(sect_y)) = section.get("Y") else {
return Err(ToValenceError::MissingSectionY)
};
let adjusted_sect_y = *sect_y as i32 + sect_offset;
if adjusted_sect_y < 0 || adjusted_sect_y as usize >= chunk.section_count() {
// Section is out of bounds. Skip it.
continue;
}
let Some(Value::Compound(block_states)) = section.get("block_states") else {
return Err(ToValenceError::MissingBlockStates)
};
let Some(Value::List(List::Compound(palette))) = block_states.get("palette") else {
return Err(ToValenceError::MissingBlockPalette)
};
if !(1..BLOCKS_PER_SECTION).contains(&palette.len()) {
return Err(ToValenceError::BadBlockPaletteLen);
}
converted_block_palette.clear();
for block in palette {
let Some(Value::String(name)) = block.get("Name") else {
return Err(ToValenceError::MissingBlockName)
};
let Some(block_kind) = BlockKind::from_str(ident_path(name)) else {
return Err(ToValenceError::UnknownBlockName(name.into()))
};
let mut state = block_kind.to_state();
if let Some(Value::Compound(properties)) = block.get("Properties") {
for (key, value) in properties {
let Value::String(value) = value else {
return Err(ToValenceError::BadPropValueType)
};
let Some(prop_name) = PropName::from_str(key) else {
return Err(ToValenceError::UnknownPropName(key.into()))
};
let Some(prop_value) = PropValue::from_str(value) else {
return Err(ToValenceError::UnknownPropValue(value.into()))
};
state = state.set(prop_name, prop_value);
}
}
converted_block_palette.push(state);
}
if converted_block_palette.len() == 1 {
chunk.fill_block_states(adjusted_sect_y as usize, converted_block_palette[0]);
} else {
debug_assert!(converted_block_palette.len() > 1);
let Some(Value::LongArray(data)) = block_states.get("data") else {
return Err(ToValenceError::MissingBlockStateData)
};
let bits_per_idx = bit_width(converted_block_palette.len() - 1).max(4);
let idxs_per_long = 64 / bits_per_idx;
let long_count = div_ceil(BLOCKS_PER_SECTION, idxs_per_long);
let mask = 2_u64.pow(bits_per_idx as u32) - 1;
if long_count != data.len() {
return Err(ToValenceError::BadBlockLongCount);
};
let mut i = 0;
for &long in data.iter() {
let u64 = long as u64;
for j in 0..idxs_per_long {
if i >= BLOCKS_PER_SECTION {
break;
}
let idx = (u64 >> (bits_per_idx * j)) & mask;
let Some(block) = converted_block_palette.get(idx as usize).cloned() else {
return Err(ToValenceError::BadBlockPaletteIndex)
};
let x = i % 16;
let z = i / 16 % 16;
let y = i / (16 * 16);
chunk.set_block_state(x, adjusted_sect_y as usize * 16 + y, z, block);
i += 1;
}
}
}
let Some(Value::Compound(biomes)) = section.get("biomes") else {
return Err(ToValenceError::MissingBiomes)
};
let Some(Value::List(List::String(palette))) = biomes.get("palette") else {
return Err(ToValenceError::MissingBiomePalette)
};
if !(1..BIOMES_PER_SECTION).contains(&palette.len()) {
return Err(ToValenceError::BadBiomePaletteLen);
}
converted_biome_palette.clear();
for biome_name in palette {
let Ok(ident) = Ident::new(biome_name.as_str()) else {
return Err(ToValenceError::BadBiomeName)
};
converted_biome_palette.push(map_biome(ident));
}
if converted_biome_palette.len() == 1 {
chunk.fill_biomes(adjusted_sect_y as usize, converted_biome_palette[0]);
} else {
debug_assert!(converted_biome_palette.len() > 1);
let Some(Value::LongArray(data)) = biomes.get("data") else {
return Err(ToValenceError::MissingBiomeData)
};
let bits_per_idx = bit_width(converted_biome_palette.len() - 1);
let idxs_per_long = 64 / bits_per_idx;
let long_count = div_ceil(BIOMES_PER_SECTION, idxs_per_long);
let mask = 2_u64.pow(bits_per_idx as u32) - 1;
if long_count != data.len() {
return Err(ToValenceError::BadBiomeLongCount);
};
let mut i = 0;
for &long in data.iter() {
let u64 = long as u64;
for j in 0..idxs_per_long {
if i >= BIOMES_PER_SECTION {
break;
}
let idx = (u64 >> (bits_per_idx * j)) & mask;
let Some(biome) = converted_biome_palette.get(idx as usize).cloned() else {
return Err(ToValenceError::BadBiomePaletteIndex)
};
let x = i % 4;
let z = i / 4 % 4;
let y = i / (4 * 4);
chunk.set_biome(x, adjusted_sect_y as usize * 4 + y, z, biome);
i += 1;
}
}
}
}
Ok(())
}
const BLOCKS_PER_SECTION: usize = 16 * 16 * 16;
const BIOMES_PER_SECTION: usize = 4 * 4 * 4;
/// Gets the path part of a resource identifier.
fn ident_path(ident: &str) -> &str {
match ident.rsplit_once(':') {
Some((_, after)) => after,
None => ident,
}
}
/// Returns the minimum number of bits needed to represent the integer `n`.
const fn bit_width(n: usize) -> usize {
(usize::BITS - n.leading_zeros()) as _
}