mirror of
https://github.com/neocturne/MinedMap.git
synced 2025-07-21 05:19:06 +02:00
treewide: update to bincode 2
Consistently use bincode's Encode/Decode to avoid issues with incompatible serde features. Support for storing some temporary files as JSON is removed. The size of the "processed" directory is reduced by ~8% with the new default encoding of bincode 2. Performance is more or less unaffected.
This commit is contained in:
parent
404ad74235
commit
53a0f24600
16 changed files with 133 additions and 81 deletions
|
@ -3,13 +3,15 @@
|
|||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use bincode::{Decode, Encode};
|
||||
use clap::ValueEnum;
|
||||
use regex::{Regex, RegexSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
io::fs::FileMetaVersion,
|
||||
|
@ -24,7 +26,7 @@ use crate::{
|
|||
///
|
||||
/// Increase when the generation of processed regions from region data changes
|
||||
/// (usually because of updated resource data)
|
||||
pub const REGION_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(5);
|
||||
pub const REGION_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(6);
|
||||
|
||||
/// MinedMap map tile data version number
|
||||
///
|
||||
|
@ -46,7 +48,7 @@ pub const MIPMAP_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(0);
|
|||
/// MinedMap processed entity data version number
|
||||
///
|
||||
/// Increase when entity collection changes bacause of code changes.
|
||||
pub const ENTITIES_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(1);
|
||||
pub const ENTITIES_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(2);
|
||||
|
||||
/// Coordinate pair of a generated tile
|
||||
///
|
||||
|
@ -85,7 +87,7 @@ impl TileCoordMap {
|
|||
}
|
||||
|
||||
/// Data structure for storing chunk data between processing and rendering steps
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Encode, Decode)]
|
||||
pub struct ProcessedChunk {
|
||||
/// Block type data
|
||||
pub blocks: Box<layer::BlockArray>,
|
||||
|
@ -96,7 +98,7 @@ pub struct ProcessedChunk {
|
|||
}
|
||||
|
||||
/// Data structure for storing region data between processing and rendering steps
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(Debug, Default, Encode, Decode)]
|
||||
pub struct ProcessedRegion {
|
||||
/// List of biomes used in the region
|
||||
///
|
||||
|
@ -107,7 +109,7 @@ pub struct ProcessedRegion {
|
|||
}
|
||||
|
||||
/// Data structure for storing entity data between processing and collection steps
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(Debug, Default, Encode, Decode)]
|
||||
pub struct ProcessedEntities {
|
||||
/// List of block entities
|
||||
pub block_entities: Vec<BlockEntity>,
|
||||
|
|
|
@ -78,23 +78,22 @@ impl<'a> EntityCollector<'a> {
|
|||
let mut output = ProcessedEntities::default();
|
||||
|
||||
for source_path in sources {
|
||||
let mut source: ProcessedEntities =
|
||||
match storage::read_file(source_path.as_ref(), storage::Format::Json) {
|
||||
Ok(source) => source,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to read entity data file {}: {:?}",
|
||||
source_path.as_ref().display(),
|
||||
err,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let mut source: ProcessedEntities = match storage::read_file(source_path.as_ref()) {
|
||||
Ok(source) => source,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to read entity data file {}: {:?}",
|
||||
source_path.as_ref().display(),
|
||||
err,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
output.block_entities.append(&mut source.block_entities);
|
||||
}
|
||||
|
||||
storage::write(file, &output, storage::Format::Json).context("Failed to write entity data")
|
||||
storage::write(file, &output).context("Failed to write entity data")
|
||||
}
|
||||
|
||||
/// Runs the mipmap generation
|
||||
|
|
|
@ -179,9 +179,8 @@ impl<'a> MetadataWriter<'a> {
|
|||
|
||||
/// Generates [Entities] data from collected entity lists
|
||||
fn entities(&self) -> Result<Entities> {
|
||||
let data: ProcessedEntities =
|
||||
storage::read_file(&self.config.entities_path_final, storage::Format::Json)
|
||||
.context("Failed to read entity data file")?;
|
||||
let data: ProcessedEntities = storage::read_file(&self.config.entities_path_final)
|
||||
.context("Failed to read entity data file")?;
|
||||
|
||||
let ret = Entities {
|
||||
signs: data
|
||||
|
|
|
@ -168,7 +168,6 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
storage::write_file(
|
||||
&self.output_path,
|
||||
&self.processed_region,
|
||||
storage::Format::Bincode,
|
||||
REGION_FILE_META_VERSION,
|
||||
self.input_timestamp,
|
||||
)
|
||||
|
@ -207,7 +206,6 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
storage::write_file(
|
||||
&self.entities_path,
|
||||
&self.entities,
|
||||
storage::Format::Json,
|
||||
ENTITIES_FILE_META_VERSION,
|
||||
self.input_timestamp,
|
||||
)
|
||||
|
|
|
@ -105,8 +105,7 @@ impl<'a> TileRenderer<'a> {
|
|||
|
||||
region_loader
|
||||
.get_or_try_init(|| async {
|
||||
storage::read_file(&processed_path, storage::Format::Bincode)
|
||||
.context("Failed to load processed region data")
|
||||
storage::read_file(&processed_path).context("Failed to load processed region data")
|
||||
})
|
||||
.await
|
||||
.cloned()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue