From 5e96be3fda14fe87941c4e7fcc51cfae881b7e11 Mon Sep 17 00:00:00 2001 From: Matthias Schiffer Date: Wed, 25 Jan 2023 21:41:08 +0100 Subject: [PATCH] Refactor logic from new dump tools into library crate --- Cargo.toml | 1 + src/bin/nbtdump.rs | 19 ++---- src/bin/regiondump.rs | 135 +------------------------------------- src/io/data.rs | 28 ++++++++ src/io/mod.rs | 2 + src/io/region.rs | 147 ++++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 2 + src/types.rs | 9 +++ 8 files changed, 196 insertions(+), 147 deletions(-) create mode 100644 src/io/data.rs create mode 100644 src/io/mod.rs create mode 100644 src/io/region.rs create mode 100644 src/lib.rs create mode 100644 src/types.rs diff --git a/Cargo.toml b/Cargo.toml index 977525b..b704512 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ name = "minedmap" version = "0.1.0" edition = "2021" license = "BSD-2-Clause" +default-run = "minedmap" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/src/bin/nbtdump.rs b/src/bin/nbtdump.rs index 20236f7..b2f287e 100644 --- a/src/bin/nbtdump.rs +++ b/src/bin/nbtdump.rs @@ -1,8 +1,7 @@ -use std::{fs::File, io::prelude::*, path::PathBuf}; +use std::path::PathBuf; -use anyhow::{Context, Result}; +use anyhow::Result; use clap::Parser; -use flate2::read::GzDecoder; #[derive(Debug, Parser)] struct Args { @@ -13,18 +12,8 @@ struct Args { fn main() -> Result<()> { let args = Args::parse(); - let file = File::open(&args.file).context("Failed to open file")?; - - let mut decoder = GzDecoder::new(file); - let mut buf = vec![]; - decoder - .read_to_end(&mut buf) - .context("Failed to read file")?; - - let nbt: fastnbt::Value = - fastnbt::from_bytes(buf.as_slice()).context("Failed to decode NBT data")?; - - println!("{:#x?}", nbt); + let value: fastnbt::Value = minedmap::io::data::from_file(&args.file)?; + println!("{:#x?}", value); Ok(()) } diff --git a/src/bin/regiondump.rs b/src/bin/regiondump.rs index d0e27ce..1a3b216 100644 --- a/src/bin/regiondump.rs +++ b/src/bin/regiondump.rs @@ -1,134 +1,7 @@ -use std::{ - collections::HashMap, - fs::File, - io::{prelude::*, SeekFrom}, - path::PathBuf, -}; +use std::path::PathBuf; -use anyhow::{bail, Context, Result}; +use anyhow::Result; use clap::Parser; -use flate2::read::ZlibDecoder; -use serde::de::DeserializeOwned; - -const BLOCKSIZE: usize = 4096; -const CHUNKS_PER_REGION: u8 = 32; - -/// A chunk X coordinate relative to a region -#[derive(Debug, Clone, Copy)] -pub struct ChunkX(pub u8); - -/// A chunk Z coordinate relative to a region -#[derive(Debug, Clone, Copy)] -pub struct ChunkZ(pub u8); - -#[derive(Debug)] -struct ChunkDesc { - x: ChunkX, - z: ChunkZ, - len: u8, -} - -fn parse_header(header: &[u8; BLOCKSIZE]) -> HashMap { - let mut map = HashMap::new(); - - for z in 0..CHUNKS_PER_REGION { - for x in 0..CHUNKS_PER_REGION { - let chunk = - &header[(4 * (usize::from(CHUNKS_PER_REGION) * usize::from(z) + usize::from(x)))..]; - - let offset = u32::from(chunk[0]) << 16 | u32::from(chunk[1]) << 8 | u32::from(chunk[2]); - if offset == 0 { - continue; - } - - let len = chunk[3]; - - map.insert( - offset, - ChunkDesc { - x: ChunkX(x), - z: ChunkZ(z), - len, - }, - ); - } - } - - map -} - -fn decode_chunk(buf: &[u8]) -> Result -where - T: DeserializeOwned, -{ - let (len_bytes, buf) = buf.split_at(4); - let len = u32::from_be_bytes( - len_bytes - .try_into() - .context("Failed to decode chunk size")?, - ) as usize; - - let buf = &buf[..len]; - let (format, buf) = buf.split_at(1); - if format.get(0) != Some(&2) { - bail!("Unknown chunk format"); - } - - let mut decoder = ZlibDecoder::new(&buf[..]); - let mut decode_buffer = vec![]; - decoder - .read_to_end(&mut decode_buffer) - .context("Failed to decompress chunk data")?; - - fastnbt::from_bytes(&decode_buffer).context("Failed to decode NBT data") -} - -fn foreach_chunk(mut reader: R, mut f: F) -> Result<()> -where - R: Read + Seek, - T: DeserializeOwned, - F: FnMut(ChunkX, ChunkZ, T), -{ - let chunk_map = { - let mut header = [0u8; BLOCKSIZE]; - reader - .read_exact(&mut header) - .context("Failed to read region header")?; - - parse_header(&header) - }; - - let mut index = 1; - let mut count = 0; - let mut seen = [[false; CHUNKS_PER_REGION as usize]; CHUNKS_PER_REGION as usize]; - - while count < chunk_map.len() { - let Some(&ChunkDesc { x, z, len }) = chunk_map.get(&index) else { - reader.seek(SeekFrom::Current(BLOCKSIZE as i64)).context("Failed to seek chunk data")?; - index += 1; - continue; - }; - - let chunk_seen = &mut seen[x.0 as usize][z.0 as usize]; - if *chunk_seen { - bail!("Duplicate chunk"); - } - - *chunk_seen = true; - count += 1; - - let mut buffer = vec![0; (len as usize) * BLOCKSIZE]; - reader - .read_exact(&mut buffer[..]) - .context("Failed to read chunk data")?; - - f(x, z, decode_chunk(&buffer[..])?); - - index += len as u32; - } - - Ok(()) -} #[derive(Debug, Parser)] struct Args { @@ -139,9 +12,7 @@ struct Args { fn main() -> Result<()> { let args = Args::parse(); - let mut file = File::open(&args.file).context("Failed to open file")?; - - foreach_chunk(&mut file, |x, z, value: fastnbt::Value| { + minedmap::io::region::from_file(&args.file)?.foreach_chunk(|x, z, value: fastnbt::Value| { println!("Chunk({}, {}): {:#x?}", x.0, z.0, value); }) } diff --git a/src/io/data.rs b/src/io/data.rs new file mode 100644 index 0000000..a4e08b7 --- /dev/null +++ b/src/io/data.rs @@ -0,0 +1,28 @@ +use std::{fs::File, io::prelude::*, path::Path}; + +use anyhow::{Context, Result}; +use flate2::read::GzDecoder; +use serde::de::DeserializeOwned; + +pub fn from_reader(reader: R) -> Result +where + R: Read, + T: DeserializeOwned, +{ + let mut decoder = GzDecoder::new(reader); + let mut buf = vec![]; + decoder + .read_to_end(&mut buf) + .context("Failed to read file")?; + + fastnbt::from_bytes(&buf[..]).context("Failed to decode NBT data") +} + +pub fn from_file(path: P) -> Result +where + P: AsRef, + T: DeserializeOwned, +{ + let file = File::open(path).context("Failed to open file")?; + from_reader(file) +} diff --git a/src/io/mod.rs b/src/io/mod.rs new file mode 100644 index 0000000..f109d15 --- /dev/null +++ b/src/io/mod.rs @@ -0,0 +1,2 @@ +pub mod data; +pub mod region; diff --git a/src/io/region.rs b/src/io/region.rs new file mode 100644 index 0000000..fad020c --- /dev/null +++ b/src/io/region.rs @@ -0,0 +1,147 @@ +use std::{ + collections::HashMap, + fs::File, + io::{prelude::*, SeekFrom}, + path::Path, +}; + +use anyhow::{bail, Context, Result}; +use flate2::read::ZlibDecoder; +use serde::de::DeserializeOwned; + +use crate::types::*; + +const BLOCKSIZE: usize = 4096; + +#[derive(Debug)] +struct ChunkDesc { + x: ChunkX, + z: ChunkZ, + len: u8, +} + +fn parse_header(header: &[u8; BLOCKSIZE]) -> HashMap { + let mut map = HashMap::new(); + + for z in 0..CHUNKS_PER_REGION { + for x in 0..CHUNKS_PER_REGION { + let chunk = + &header[(4 * (usize::from(CHUNKS_PER_REGION) * usize::from(z) + usize::from(x)))..]; + + let offset = u32::from(chunk[0]) << 16 | u32::from(chunk[1]) << 8 | u32::from(chunk[2]); + if offset == 0 { + continue; + } + + let len = chunk[3]; + + map.insert( + offset, + ChunkDesc { + x: ChunkX(x), + z: ChunkZ(z), + len, + }, + ); + } + } + + map +} + +fn decode_chunk(buf: &[u8]) -> Result +where + T: DeserializeOwned, +{ + let (len_bytes, buf) = buf.split_at(4); + let len = u32::from_be_bytes( + len_bytes + .try_into() + .context("Failed to decode chunk size")?, + ) as usize; + + let buf = &buf[..len]; + let (format, buf) = buf.split_at(1); + if format.get(0) != Some(&2) { + bail!("Unknown chunk format"); + } + + let mut decoder = ZlibDecoder::new(&buf[..]); + let mut decode_buffer = vec![]; + decoder + .read_to_end(&mut decode_buffer) + .context("Failed to decompress chunk data")?; + + fastnbt::from_bytes(&decode_buffer).context("Failed to decode NBT data") +} + +#[derive(Debug)] +pub struct Region { + reader: R, +} + +impl Region { + pub fn foreach_chunk(self, mut f: F) -> Result<()> + where + R: Read + Seek, + T: DeserializeOwned, + F: FnMut(ChunkX, ChunkZ, T), + { + let Region { mut reader } = self; + + let chunk_map = { + let mut header = [0u8; BLOCKSIZE]; + reader + .read_exact(&mut header) + .context("Failed to read region header")?; + + parse_header(&header) + }; + + let mut index = 1; + let mut count = 0; + let mut seen = [[false; CHUNKS_PER_REGION as usize]; CHUNKS_PER_REGION as usize]; + + while count < chunk_map.len() { + let Some(&ChunkDesc { x, z, len }) = chunk_map.get(&index) else { + reader.seek(SeekFrom::Current(BLOCKSIZE as i64)).context("Failed to seek chunk data")?; + index += 1; + continue; + }; + + let chunk_seen = &mut seen[x.0 as usize][z.0 as usize]; + if *chunk_seen { + bail!("Duplicate chunk"); + } + + *chunk_seen = true; + count += 1; + + let mut buffer = vec![0; (len as usize) * BLOCKSIZE]; + reader + .read_exact(&mut buffer[..]) + .context("Failed to read chunk data")?; + + f(x, z, decode_chunk(&buffer[..])?); + + index += len as u32; + } + + Ok(()) + } +} + +pub fn from_reader(reader: R) -> Region +where + R: Read + Seek, +{ + Region { reader } +} + +pub fn from_file

(path: P) -> Result> +where + P: AsRef, +{ + let file = File::open(path).context("Failed to open file")?; + Ok(from_reader(file)) +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..146b296 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,2 @@ +pub mod io; +pub mod types; diff --git a/src/types.rs b/src/types.rs new file mode 100644 index 0000000..54593a4 --- /dev/null +++ b/src/types.rs @@ -0,0 +1,9 @@ +pub const CHUNKS_PER_REGION: u8 = 32; + +/// A chunk X coordinate relative to a region +#[derive(Debug, Clone, Copy)] +pub struct ChunkX(pub u8); + +/// A chunk Z coordinate relative to a region +#[derive(Debug, Clone, Copy)] +pub struct ChunkZ(pub u8);