mirror of
https://github.com/neocturne/MinedMap.git
synced 2025-03-05 17:44:52 +01:00
io/region: return sorted Vec instead of HashMap from parse_header()
Instead of counting and looking up indices in the HashMap, we can just iterate over all offsets in the returned Vec.
This commit is contained in:
parent
6186a0d916
commit
8c34f74952
1 changed files with 29 additions and 25 deletions
|
@ -1,5 +1,4 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
|
||||||
fs::File,
|
fs::File,
|
||||||
io::{prelude::*, SeekFrom},
|
io::{prelude::*, SeekFrom},
|
||||||
path::Path,
|
path::Path,
|
||||||
|
@ -15,27 +14,35 @@ const BLOCKSIZE: usize = 4096;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct ChunkDesc {
|
struct ChunkDesc {
|
||||||
coords: ChunkCoords,
|
offset: u32,
|
||||||
len: u8,
|
len: u8,
|
||||||
|
coords: ChunkCoords,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_header(header: &ChunkArray<u32>) -> HashMap<u32, ChunkDesc> {
|
fn parse_header(header: &ChunkArray<u32>) -> Vec<ChunkDesc> {
|
||||||
let mut map = HashMap::new();
|
let mut chunks: Vec<_> = header
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(coords, &chunk)| {
|
||||||
|
let offset_len = u32::from_be(chunk);
|
||||||
|
|
||||||
for (coords, &chunk) in header.iter() {
|
let offset = offset_len >> 8;
|
||||||
let offset_len = u32::from_be(chunk);
|
let len = offset_len as u8;
|
||||||
|
|
||||||
let offset = offset_len >> 8;
|
if offset == 0 || len == 0 {
|
||||||
let len = offset_len as u8;
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
if offset == 0 || len == 0 {
|
Some(ChunkDesc {
|
||||||
continue;
|
offset,
|
||||||
}
|
len,
|
||||||
|
coords,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
map.insert(offset, ChunkDesc { coords, len });
|
chunks.sort_by_key(|chunk| chunk.offset);
|
||||||
}
|
|
||||||
|
|
||||||
map
|
chunks
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decode_chunk<T>(buf: &[u8]) -> Result<T>
|
fn decode_chunk<T>(buf: &[u8]) -> Result<T>
|
||||||
|
@ -70,7 +77,7 @@ impl<R: Read + Seek> Region<R> {
|
||||||
{
|
{
|
||||||
let Region { mut reader } = self;
|
let Region { mut reader } = self;
|
||||||
|
|
||||||
let mut chunk_map = {
|
let chunks = {
|
||||||
let mut header = ChunkArray::<u32>::default();
|
let mut header = ChunkArray::<u32>::default();
|
||||||
reader
|
reader
|
||||||
.read_exact(bytemuck::cast_mut::<_, [u8; BLOCKSIZE]>(&mut header.0))
|
.read_exact(bytemuck::cast_mut::<_, [u8; BLOCKSIZE]>(&mut header.0))
|
||||||
|
@ -79,22 +86,21 @@ impl<R: Read + Seek> Region<R> {
|
||||||
parse_header(&header)
|
parse_header(&header)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut index = 1;
|
|
||||||
let mut seen = ChunkArray::<bool>::default();
|
let mut seen = ChunkArray::<bool>::default();
|
||||||
|
|
||||||
while !chunk_map.is_empty() {
|
for ChunkDesc {
|
||||||
let Some(ChunkDesc { coords, len }) = chunk_map.remove(&index) else {
|
offset,
|
||||||
index += 1;
|
len,
|
||||||
continue;
|
coords,
|
||||||
};
|
} in chunks
|
||||||
|
{
|
||||||
if seen[coords] {
|
if seen[coords] {
|
||||||
bail!("Duplicate chunk {:?}", coords);
|
bail!("Duplicate chunk {:?}", coords);
|
||||||
}
|
}
|
||||||
seen[coords] = true;
|
seen[coords] = true;
|
||||||
|
|
||||||
reader
|
reader
|
||||||
.seek(SeekFrom::Start(index as u64 * BLOCKSIZE as u64))
|
.seek(SeekFrom::Start(offset as u64 * BLOCKSIZE as u64))
|
||||||
.context("Failed to seek chunk data")?;
|
.context("Failed to seek chunk data")?;
|
||||||
|
|
||||||
let mut len_buf = [0u8; 4];
|
let mut len_buf = [0u8; 4];
|
||||||
|
@ -114,8 +120,6 @@ impl<R: Read + Seek> Region<R> {
|
||||||
.with_context(|| format!("Failed to decode data for chunk {:?}", coords))?;
|
.with_context(|| format!("Failed to decode data for chunk {:?}", coords))?;
|
||||||
|
|
||||||
f(coords, chunk);
|
f(coords, chunk);
|
||||||
|
|
||||||
index += len as u32;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
Loading…
Add table
Reference in a new issue