io/storage: add JSON support

Bincode can't deal with a number of serde's features that would be
desirable if we're using the same Serialize impls for generating JSON
data for the viewer.
This commit is contained in:
Matthias Schiffer 2023-11-26 14:12:51 +01:00
parent 5d40d061a4
commit c44f6ab859
Signed by: neocturne
GPG key ID: 16EF3F64CB201D9C
3 changed files with 30 additions and 8 deletions

View file

@ -148,6 +148,7 @@ impl<'a> SingleRegionProcessor<'a> {
storage::write_file( storage::write_file(
&self.output_path, &self.output_path,
&self.processed_region, &self.processed_region,
storage::Format::Bincode,
REGION_FILE_META_VERSION, REGION_FILE_META_VERSION,
self.input_timestamp, self.input_timestamp,
) )

View file

@ -105,7 +105,8 @@ impl<'a> TileRenderer<'a> {
region_loader region_loader
.get_or_try_init(|| async { .get_or_try_init(|| async {
storage::read_file(&processed_path).context("Failed to load processed region data") storage::read_file(&processed_path, storage::Format::Bincode)
.context("Failed to load processed region data")
}) })
.await .await
.cloned() .cloned()

View file

@ -14,9 +14,24 @@ use serde::{de::DeserializeOwned, Serialize};
use super::fs; use super::fs;
/// Storage format
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Format {
/// Encode as Bincode
///
/// Bincode is more efficient than JSON, but cannot handle many of
/// serde's features like flatten, conditional skipping, ...
Bincode,
/// Encode as JSON
Json,
}
/// Serializes data and writes it to a writer /// Serializes data and writes it to a writer
pub fn write<W: Write, T: Serialize>(writer: &mut W, value: &T) -> Result<()> { pub fn write<W: Write, T: Serialize>(writer: &mut W, value: &T, format: Format) -> Result<()> {
let data = bincode::serialize(value)?; let data = match format {
Format::Bincode => bincode::serialize(value)?,
Format::Json => serde_json::to_vec(value)?,
};
let len = u32::try_from(data.len())?; let len = u32::try_from(data.len())?;
let compressed = zstd::bulk::compress(&data, 1)?; let compressed = zstd::bulk::compress(&data, 1)?;
drop(data); drop(data);
@ -33,14 +48,15 @@ pub fn write<W: Write, T: Serialize>(writer: &mut W, value: &T) -> Result<()> {
pub fn write_file<T: Serialize>( pub fn write_file<T: Serialize>(
path: &Path, path: &Path,
value: &T, value: &T,
format: Format,
version: fs::FileMetaVersion, version: fs::FileMetaVersion,
timestamp: SystemTime, timestamp: SystemTime,
) -> Result<()> { ) -> Result<()> {
fs::create_with_timestamp(path, version, timestamp, |file| write(file, value)) fs::create_with_timestamp(path, version, timestamp, |file| write(file, value, format))
} }
/// Reads data from a reader and deserializes it /// Reads data from a reader and deserializes it
pub fn read<R: Read, T: DeserializeOwned>(reader: &mut R) -> Result<T> { pub fn read<R: Read, T: DeserializeOwned>(reader: &mut R, format: Format) -> Result<T> {
let mut len_buf = [0u8; 4]; let mut len_buf = [0u8; 4];
reader.read_exact(&mut len_buf)?; reader.read_exact(&mut len_buf)?;
let len = usize::try_from(u32::from_be_bytes(len_buf))?; let len = usize::try_from(u32::from_be_bytes(len_buf))?;
@ -50,14 +66,18 @@ pub fn read<R: Read, T: DeserializeOwned>(reader: &mut R) -> Result<T> {
let data = zstd::bulk::decompress(&compressed, len)?; let data = zstd::bulk::decompress(&compressed, len)?;
drop(compressed); drop(compressed);
Ok(bincode::deserialize(&data)?) let value = match format {
Format::Bincode => bincode::deserialize(&data)?,
Format::Json => serde_json::from_slice(&data)?,
};
Ok(value)
} }
/// Reads data from a file and deserializes it /// Reads data from a file and deserializes it
pub fn read_file<T: DeserializeOwned>(path: &Path) -> Result<T> { pub fn read_file<T: DeserializeOwned>(path: &Path, format: Format) -> Result<T> {
(|| -> Result<T> { (|| -> Result<T> {
let mut file = File::open(path)?; let mut file = File::open(path)?;
read(&mut file) read(&mut file, format)
})() })()
.with_context(|| format!("Failed to read file {}", path.display())) .with_context(|| format!("Failed to read file {}", path.display()))
} }