Refactor logic from new dump tools into library crate

This commit is contained in:
Matthias Schiffer 2023-01-25 21:41:08 +01:00
parent 5a364e2434
commit 5e96be3fda
Signed by: neocturne
GPG key ID: 16EF3F64CB201D9C
8 changed files with 196 additions and 147 deletions

View file

@ -3,6 +3,7 @@ name = "minedmap"
version = "0.1.0"
edition = "2021"
license = "BSD-2-Clause"
default-run = "minedmap"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -1,8 +1,7 @@
use std::{fs::File, io::prelude::*, path::PathBuf};
use std::path::PathBuf;
use anyhow::{Context, Result};
use anyhow::Result;
use clap::Parser;
use flate2::read::GzDecoder;
#[derive(Debug, Parser)]
struct Args {
@ -13,18 +12,8 @@ struct Args {
fn main() -> Result<()> {
let args = Args::parse();
let file = File::open(&args.file).context("Failed to open file")?;
let mut decoder = GzDecoder::new(file);
let mut buf = vec![];
decoder
.read_to_end(&mut buf)
.context("Failed to read file")?;
let nbt: fastnbt::Value =
fastnbt::from_bytes(buf.as_slice()).context("Failed to decode NBT data")?;
println!("{:#x?}", nbt);
let value: fastnbt::Value = minedmap::io::data::from_file(&args.file)?;
println!("{:#x?}", value);
Ok(())
}

View file

@ -1,134 +1,7 @@
use std::{
collections::HashMap,
fs::File,
io::{prelude::*, SeekFrom},
path::PathBuf,
};
use std::path::PathBuf;
use anyhow::{bail, Context, Result};
use anyhow::Result;
use clap::Parser;
use flate2::read::ZlibDecoder;
use serde::de::DeserializeOwned;
const BLOCKSIZE: usize = 4096;
const CHUNKS_PER_REGION: u8 = 32;
/// A chunk X coordinate relative to a region
#[derive(Debug, Clone, Copy)]
pub struct ChunkX(pub u8);
/// A chunk Z coordinate relative to a region
#[derive(Debug, Clone, Copy)]
pub struct ChunkZ(pub u8);
#[derive(Debug)]
struct ChunkDesc {
x: ChunkX,
z: ChunkZ,
len: u8,
}
fn parse_header(header: &[u8; BLOCKSIZE]) -> HashMap<u32, ChunkDesc> {
let mut map = HashMap::new();
for z in 0..CHUNKS_PER_REGION {
for x in 0..CHUNKS_PER_REGION {
let chunk =
&header[(4 * (usize::from(CHUNKS_PER_REGION) * usize::from(z) + usize::from(x)))..];
let offset = u32::from(chunk[0]) << 16 | u32::from(chunk[1]) << 8 | u32::from(chunk[2]);
if offset == 0 {
continue;
}
let len = chunk[3];
map.insert(
offset,
ChunkDesc {
x: ChunkX(x),
z: ChunkZ(z),
len,
},
);
}
}
map
}
fn decode_chunk<T>(buf: &[u8]) -> Result<T>
where
T: DeserializeOwned,
{
let (len_bytes, buf) = buf.split_at(4);
let len = u32::from_be_bytes(
len_bytes
.try_into()
.context("Failed to decode chunk size")?,
) as usize;
let buf = &buf[..len];
let (format, buf) = buf.split_at(1);
if format.get(0) != Some(&2) {
bail!("Unknown chunk format");
}
let mut decoder = ZlibDecoder::new(&buf[..]);
let mut decode_buffer = vec![];
decoder
.read_to_end(&mut decode_buffer)
.context("Failed to decompress chunk data")?;
fastnbt::from_bytes(&decode_buffer).context("Failed to decode NBT data")
}
fn foreach_chunk<R, T, F>(mut reader: R, mut f: F) -> Result<()>
where
R: Read + Seek,
T: DeserializeOwned,
F: FnMut(ChunkX, ChunkZ, T),
{
let chunk_map = {
let mut header = [0u8; BLOCKSIZE];
reader
.read_exact(&mut header)
.context("Failed to read region header")?;
parse_header(&header)
};
let mut index = 1;
let mut count = 0;
let mut seen = [[false; CHUNKS_PER_REGION as usize]; CHUNKS_PER_REGION as usize];
while count < chunk_map.len() {
let Some(&ChunkDesc { x, z, len }) = chunk_map.get(&index) else {
reader.seek(SeekFrom::Current(BLOCKSIZE as i64)).context("Failed to seek chunk data")?;
index += 1;
continue;
};
let chunk_seen = &mut seen[x.0 as usize][z.0 as usize];
if *chunk_seen {
bail!("Duplicate chunk");
}
*chunk_seen = true;
count += 1;
let mut buffer = vec![0; (len as usize) * BLOCKSIZE];
reader
.read_exact(&mut buffer[..])
.context("Failed to read chunk data")?;
f(x, z, decode_chunk(&buffer[..])?);
index += len as u32;
}
Ok(())
}
#[derive(Debug, Parser)]
struct Args {
@ -139,9 +12,7 @@ struct Args {
fn main() -> Result<()> {
let args = Args::parse();
let mut file = File::open(&args.file).context("Failed to open file")?;
foreach_chunk(&mut file, |x, z, value: fastnbt::Value| {
minedmap::io::region::from_file(&args.file)?.foreach_chunk(|x, z, value: fastnbt::Value| {
println!("Chunk({}, {}): {:#x?}", x.0, z.0, value);
})
}

28
src/io/data.rs Normal file
View file

@ -0,0 +1,28 @@
use std::{fs::File, io::prelude::*, path::Path};
use anyhow::{Context, Result};
use flate2::read::GzDecoder;
use serde::de::DeserializeOwned;
pub fn from_reader<R, T>(reader: R) -> Result<T>
where
R: Read,
T: DeserializeOwned,
{
let mut decoder = GzDecoder::new(reader);
let mut buf = vec![];
decoder
.read_to_end(&mut buf)
.context("Failed to read file")?;
fastnbt::from_bytes(&buf[..]).context("Failed to decode NBT data")
}
pub fn from_file<P, T>(path: P) -> Result<T>
where
P: AsRef<Path>,
T: DeserializeOwned,
{
let file = File::open(path).context("Failed to open file")?;
from_reader(file)
}

2
src/io/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod data;
pub mod region;

147
src/io/region.rs Normal file
View file

@ -0,0 +1,147 @@
use std::{
collections::HashMap,
fs::File,
io::{prelude::*, SeekFrom},
path::Path,
};
use anyhow::{bail, Context, Result};
use flate2::read::ZlibDecoder;
use serde::de::DeserializeOwned;
use crate::types::*;
const BLOCKSIZE: usize = 4096;
#[derive(Debug)]
struct ChunkDesc {
x: ChunkX,
z: ChunkZ,
len: u8,
}
fn parse_header(header: &[u8; BLOCKSIZE]) -> HashMap<u32, ChunkDesc> {
let mut map = HashMap::new();
for z in 0..CHUNKS_PER_REGION {
for x in 0..CHUNKS_PER_REGION {
let chunk =
&header[(4 * (usize::from(CHUNKS_PER_REGION) * usize::from(z) + usize::from(x)))..];
let offset = u32::from(chunk[0]) << 16 | u32::from(chunk[1]) << 8 | u32::from(chunk[2]);
if offset == 0 {
continue;
}
let len = chunk[3];
map.insert(
offset,
ChunkDesc {
x: ChunkX(x),
z: ChunkZ(z),
len,
},
);
}
}
map
}
fn decode_chunk<T>(buf: &[u8]) -> Result<T>
where
T: DeserializeOwned,
{
let (len_bytes, buf) = buf.split_at(4);
let len = u32::from_be_bytes(
len_bytes
.try_into()
.context("Failed to decode chunk size")?,
) as usize;
let buf = &buf[..len];
let (format, buf) = buf.split_at(1);
if format.get(0) != Some(&2) {
bail!("Unknown chunk format");
}
let mut decoder = ZlibDecoder::new(&buf[..]);
let mut decode_buffer = vec![];
decoder
.read_to_end(&mut decode_buffer)
.context("Failed to decompress chunk data")?;
fastnbt::from_bytes(&decode_buffer).context("Failed to decode NBT data")
}
#[derive(Debug)]
pub struct Region<R: Read + Seek> {
reader: R,
}
impl<R: Read + Seek> Region<R> {
pub fn foreach_chunk<T, F>(self, mut f: F) -> Result<()>
where
R: Read + Seek,
T: DeserializeOwned,
F: FnMut(ChunkX, ChunkZ, T),
{
let Region { mut reader } = self;
let chunk_map = {
let mut header = [0u8; BLOCKSIZE];
reader
.read_exact(&mut header)
.context("Failed to read region header")?;
parse_header(&header)
};
let mut index = 1;
let mut count = 0;
let mut seen = [[false; CHUNKS_PER_REGION as usize]; CHUNKS_PER_REGION as usize];
while count < chunk_map.len() {
let Some(&ChunkDesc { x, z, len }) = chunk_map.get(&index) else {
reader.seek(SeekFrom::Current(BLOCKSIZE as i64)).context("Failed to seek chunk data")?;
index += 1;
continue;
};
let chunk_seen = &mut seen[x.0 as usize][z.0 as usize];
if *chunk_seen {
bail!("Duplicate chunk");
}
*chunk_seen = true;
count += 1;
let mut buffer = vec![0; (len as usize) * BLOCKSIZE];
reader
.read_exact(&mut buffer[..])
.context("Failed to read chunk data")?;
f(x, z, decode_chunk(&buffer[..])?);
index += len as u32;
}
Ok(())
}
}
pub fn from_reader<R>(reader: R) -> Region<R>
where
R: Read + Seek,
{
Region { reader }
}
pub fn from_file<P>(path: P) -> Result<Region<File>>
where
P: AsRef<Path>,
{
let file = File::open(path).context("Failed to open file")?;
Ok(from_reader(file))
}

2
src/lib.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod io;
pub mod types;

9
src/types.rs Normal file
View file

@ -0,0 +1,9 @@
pub const CHUNKS_PER_REGION: u8 = 32;
/// A chunk X coordinate relative to a region
#[derive(Debug, Clone, Copy)]
pub struct ChunkX(pub u8);
/// A chunk Z coordinate relative to a region
#[derive(Debug, Clone, Copy)]
pub struct ChunkZ(pub u8);