Restructure crates

Get rid of the arbitrary bin/lib split and instead move as much as
possible into the bin crate, which becomes the main crate again.

The types and NBT handling are moved into separate crates, so they can
be reused by nbtdump and regiondump.
This commit is contained in:
Matthias Schiffer 2023-08-20 16:28:10 +02:00
parent 09399f5ae9
commit 248a641035
Signed by: neocturne
GPG key ID: 16EF3F64CB201D9C
21 changed files with 121 additions and 62 deletions

175
src/core/common.rs Normal file
View file

@ -0,0 +1,175 @@
//! Common data types and functions used by multiple generation steps
use std::{
collections::{BTreeMap, BTreeSet},
fmt::Debug,
path::{Path, PathBuf},
};
use indexmap::IndexSet;
use serde::{Deserialize, Serialize};
use crate::{io::fs::FileMetaVersion, resource::Biome, types::*, world::layer};
/// MinedMap data version number
///
/// Increase to force regeneration of all output files
pub const FILE_META_VERSION: FileMetaVersion = FileMetaVersion(0);
/// Coordinate pair of a generated tile
///
/// Each tile corresponds to one Minecraft region file
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct TileCoords {
/// The X coordinate
pub x: i32,
/// The Z coordinate
pub z: i32,
}
impl Debug for TileCoords {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "({}, {})", self.x, self.z)
}
}
/// Set of tile coordinates
///
/// Used to store list of populated tiles for each mipmap level in the
/// viewer metadata file.
#[derive(Debug, Clone, Default, Serialize)]
#[serde(transparent)]
pub struct TileCoordMap(pub BTreeMap<i32, BTreeSet<i32>>);
impl TileCoordMap {
/// Checks whether the map contains a given coordinate pair
pub fn contains(&self, coords: TileCoords) -> bool {
let Some(xs) = self.0.get(&coords.z) else {
return false;
};
xs.contains(&coords.x)
}
}
/// Data structure for storing chunk data between processing and rendering steps
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProcessedChunk {
/// Block type data
pub blocks: Box<layer::BlockArray>,
/// Biome data
pub biomes: Box<layer::BiomeArray>,
/// Block height/depth data
pub depths: Box<layer::DepthArray>,
}
/// Data structure for storing region data between processing and rendering steps
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ProcessedRegion {
/// List of biomes used in the region
///
/// Indexed by [ProcessedChunk] biome data
pub biome_list: IndexSet<Biome>,
/// Processed chunk data
pub chunks: ChunkArray<Option<Box<ProcessedChunk>>>,
}
/// Derives a filename from region coordinates and a file extension
///
/// Can be used for input regions, processed data or rendered tiles
fn coord_filename(coords: TileCoords, ext: &str) -> String {
format!("r.{}.{}.{}", coords.x, coords.z, ext)
}
/// Tile kind corresponding to a map layer
#[derive(Debug, Clone, Copy)]
pub enum TileKind {
/// Regular map tile contains block colors
Map,
/// Lightmap tile for illumination layer
Lightmap,
}
/// Common configuration based on command line arguments
pub struct Config {
/// Number of threads for parallel processing
pub num_threads: usize,
/// Path of input region directory
pub region_dir: PathBuf,
/// Path of input `level.dat` file
pub level_dat_path: PathBuf,
/// Base path for storage of rendered tile data
pub output_dir: PathBuf,
/// Path for storage of intermediate processed data files
pub processed_dir: PathBuf,
/// Path of viewer metadata file
pub metadata_path: PathBuf,
}
impl Config {
/// Crates a new [Config] from [command line arguments](super::Args)
pub fn new(args: &super::Args) -> Self {
let num_threads = match args.jobs {
Some(0) => num_cpus::get(),
Some(threads) => threads,
None => 1,
};
let region_dir = [&args.input_dir, Path::new("region")].iter().collect();
let level_dat_path = [&args.input_dir, Path::new("level.dat")].iter().collect();
let processed_dir = [&args.output_dir, Path::new("processed")].iter().collect();
let metadata_path = [&args.output_dir, Path::new("info.json")].iter().collect();
Config {
num_threads,
region_dir,
level_dat_path,
output_dir: args.output_dir.clone(),
processed_dir,
metadata_path,
}
}
/// Constructs the path to an input region file
pub fn region_path(&self, coords: TileCoords) -> PathBuf {
let filename = coord_filename(coords, "mca");
[&self.region_dir, Path::new(&filename)].iter().collect()
}
/// Constructs the path of an intermediate processed region file
pub fn processed_path(&self, coords: TileCoords) -> PathBuf {
let filename = coord_filename(coords, "bin");
[&self.processed_dir, Path::new(&filename)].iter().collect()
}
/// Constructs the base output path for a [TileKind] and mipmap level
pub fn tile_dir(&self, kind: TileKind, level: usize) -> PathBuf {
let prefix = match kind {
TileKind::Map => "map",
TileKind::Lightmap => "light",
};
let dir = format!("{}/{}", prefix, level);
[&self.output_dir, Path::new(&dir)].iter().collect()
}
/// Constructs the path of an output tile image
pub fn tile_path(&self, kind: TileKind, level: usize, coords: TileCoords) -> PathBuf {
let filename = coord_filename(coords, "png");
let dir = self.tile_dir(kind, level);
[Path::new(&dir), Path::new(&filename)].iter().collect()
}
}
/// Copies a chunk image into a region tile
pub fn overlay_chunk<I, J>(image: &mut I, chunk: &J, coords: ChunkCoords)
where
I: image::GenericImage,
J: image::GenericImageView<Pixel = I::Pixel>,
{
image::imageops::overlay(
image,
chunk,
coords.x.0 as i64 * BLOCKS_PER_CHUNK as i64,
coords.z.0 as i64 * BLOCKS_PER_CHUNK as i64,
);
}

129
src/core/metadata_writer.rs Normal file
View file

@ -0,0 +1,129 @@
//! The [MetadataWriter] and related types
use anyhow::{Context, Result};
use serde::Serialize;
use crate::{core::common::*, io::fs, world::de};
/// Minimum and maximum X and Z tile coordinates for a mipmap level
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct Bounds {
/// Minimum X coordinate
min_x: i32,
/// Maximum X coordinate
max_x: i32,
/// Minimum Z coordinate
min_z: i32,
/// Maximum Z coordinate
max_z: i32,
}
/// Mipmap level information in viewer metadata file
#[derive(Debug, Serialize)]
struct Mipmap<'t> {
/// Minimum and maximum tile coordinates of the mipmap level
bounds: Bounds,
/// Map of populated tiles for the mipmap level
regions: &'t TileCoordMap,
}
/// Initial spawn point for new players
#[derive(Debug, Serialize)]
struct Spawn {
/// Spawn X coordinate
x: i32,
/// Spawn Z coordinate
z: i32,
}
/// Viewer metadata JSON data structure
#[derive(Debug, Serialize)]
struct Metadata<'t> {
/// Tile information for each mipmap level
mipmaps: Vec<Mipmap<'t>>,
/// Initial spawn point for new players
spawn: Spawn,
}
/// The MetadataWriter is used to generate the viewer metadata file
pub struct MetadataWriter<'a> {
/// Common MinedMap configuration from command line
config: &'a Config,
/// Map of generated tiles for each mipmap level
tiles: &'a [TileCoordMap],
}
impl<'a> MetadataWriter<'a> {
/// Creates a new MetadataWriter
pub fn new(config: &'a Config, tiles: &'a [TileCoordMap]) -> Self {
MetadataWriter { config, tiles }
}
/// Helper to construct a [Mipmap] data structure from a [TileCoordMap]
fn mipmap_entry(regions: &TileCoordMap) -> Mipmap {
let mut min_x = i32::MAX;
let mut max_x = i32::MIN;
let mut min_z = i32::MAX;
let mut max_z = i32::MIN;
for (&z, xs) in &regions.0 {
if z < min_z {
min_z = z;
}
if z > max_z {
max_z = z;
}
for &x in xs {
if x < min_x {
min_x = x;
}
if x > max_x {
max_x = x;
}
}
}
Mipmap {
bounds: Bounds {
min_x,
max_x,
min_z,
max_z,
},
regions,
}
}
/// Reads and deserializes the `level.dat` of the Minecraft save data
fn read_level_dat(&self) -> Result<de::LevelDat> {
crate::nbt::data::from_file(&self.config.level_dat_path).context("Failed to read level.dat")
}
/// Generates [Spawn] data from a [de::LevelDat]
fn spawn(level_dat: &de::LevelDat) -> Spawn {
Spawn {
x: level_dat.data.spawn_x,
z: level_dat.data.spawn_z,
}
}
/// Runs the viewer metadata file generation
pub fn run(self) -> Result<()> {
let level_dat = self.read_level_dat()?;
let mut metadata = Metadata {
mipmaps: Vec::new(),
spawn: Self::spawn(&level_dat),
};
for tile_map in self.tiles.iter() {
metadata.mipmaps.push(Self::mipmap_entry(tile_map));
}
fs::create_with_tmpfile(&self.config.metadata_path, |file| {
serde_json::to_writer(file, &metadata).context("Failed to write metadata")
})
}
}

61
src/core/mod.rs Normal file
View file

@ -0,0 +1,61 @@
//! Core functions of the MinedMap CLI
mod common;
mod metadata_writer;
mod region_group;
mod region_processor;
mod tile_mipmapper;
mod tile_renderer;
use std::path::PathBuf;
use anyhow::{Context, Result};
use clap::Parser;
use common::Config;
use metadata_writer::MetadataWriter;
use region_processor::RegionProcessor;
use tile_mipmapper::TileMipmapper;
use tile_renderer::TileRenderer;
/// Command line arguments for minedmap
#[derive(Debug, Parser)]
pub struct Args {
/// Number of parallel threads to use for processing
///
/// If not given, only a single thread is used. Pass 0 to
/// use one thread per logical CPU core.
#[arg(short, long)]
pub jobs: Option<usize>,
/// Minecraft save directory
pub input_dir: PathBuf,
/// MinedMap data directory
pub output_dir: PathBuf,
}
/// Configures the Rayon thread pool for parallel processing
fn setup_threads(num_threads: usize) -> Result<()> {
rayon::ThreadPoolBuilder::new()
.num_threads(num_threads)
.build_global()
.context("Failed to configure thread pool")
}
/// MinedMap CLI main function
pub fn cli() -> Result<()> {
let args = Args::parse();
let config = Config::new(&args);
setup_threads(config.num_threads)?;
let rt = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();
let regions = RegionProcessor::new(&config).run()?;
TileRenderer::new(&config, &rt, &regions).run()?;
let tiles = TileMipmapper::new(&config, &regions).run()?;
MetadataWriter::new(&config, &tiles).run()?;
Ok(())
}

143
src/core/region_group.rs Normal file
View file

@ -0,0 +1,143 @@
//! The generic [RegionGroup] data structure
use std::{future::Future, iter};
use anyhow::Result;
use futures_util::future::OptionFuture;
/// A generic array of 3x3 elements
///
/// A RegionGroup is used to store information about a 3x3 neighborhood of
/// regions.
///
/// The center element is always populated, while the 8 adjacent elements may be None.
#[derive(Debug, Clone, Copy)]
pub struct RegionGroup<T> {
/// The element corresponding to the center of the 9x9 neighborhood
center: T,
/// The remaining elements, stored in row-first order
///
/// The center element is always None.
neighs: [Option<T>; 9],
}
impl<T> RegionGroup<T> {
/// Constructs a new RegionGroup from a closure called for each element
///
/// The X and Z coordinates relative to the center (in the range -1..1)
/// are passed to the closure.
///
/// Panics of the closure returns None for the center element.
pub fn new<F>(f: F) -> Self
where
F: Fn(i8, i8) -> Option<T>,
{
RegionGroup {
center: f(0, 0).expect("Center element of RegionGroup must not be None"),
neighs: [
f(-1, -1),
f(-1, 0),
f(-1, 1),
f(0, -1),
None,
f(0, 1),
f(1, -1),
f(1, 0),
f(1, 1),
],
}
}
/// Returns a reference to the center element
pub fn center(&self) -> &T {
&self.center
}
/// Returns a reference to an element of the RegionGroup, if populated
///
/// Always returns None for X and Z coordinates outside of the -1..1 range.
pub fn get(&self, x: i8, z: i8) -> Option<&T> {
if (x, z) == (0, 0) {
return Some(&self.center);
}
if !(-1..=1).contains(&x) || !(-1..=1).contains(&z) {
return None;
}
self.neighs.get((3 * x + z + 4) as usize)?.as_ref()
}
/// Runs a closure on each element to construct a new RegionGroup
pub fn map<U, F>(self, mut f: F) -> RegionGroup<U>
where
F: FnMut(T) -> U,
{
RegionGroup {
center: f(self.center),
neighs: self.neighs.map(|entry| entry.map(&mut f)),
}
}
/// Runs a fallible closure on each element to construct a new RegionGroup
///
/// [Err] return values for the center element are passed up. Outer elements
/// become unpopulated when the closure fails.
pub fn try_map<U, F>(self, mut f: F) -> Result<RegionGroup<U>>
where
F: FnMut(T) -> Result<U>,
{
let RegionGroup { center, neighs } = self;
let center = f(center)?;
let neighs = neighs.map(|entry| entry.and_then(|value| f(value).ok()));
Ok(RegionGroup { center, neighs })
}
/// Runs an asynchronous closure on each element to construct a new RegionGroup
#[allow(dead_code)]
pub async fn async_map<U, F, Fut>(self, mut f: F) -> RegionGroup<U>
where
Fut: Future<Output = U>,
F: FnMut(T) -> Fut,
{
let center = f(self.center);
let neighs = futures_util::future::join_all(
self.neighs
.map(|entry| OptionFuture::from(entry.map(&mut f))),
);
let (center, neighs) = futures_util::join!(center, neighs);
RegionGroup {
center,
neighs: <[Option<_>; 9]>::try_from(neighs).ok().unwrap(),
}
}
/// Runs a fallible asynchronous closure on each element to construct a new RegionGroup
///
/// [Err] return values for the center element are passed up. Outer elements
/// become unpopulated when the closure fails.
pub async fn async_try_map<U, F, Fut>(self, mut f: F) -> Result<RegionGroup<U>>
where
Fut: Future<Output = Result<U>>,
F: FnMut(T) -> Fut,
{
let center = f(self.center);
let neighs = futures_util::future::join_all(
self.neighs
.map(|entry| OptionFuture::from(entry.map(&mut f))),
);
let (center, neighs) = futures_util::join!(center, neighs);
let center = center?;
let neighs: Vec<_> = neighs
.into_iter()
.map(|entry| entry.and_then(Result::ok))
.collect();
let neighs = <[Option<_>; 9]>::try_from(neighs).ok().unwrap();
Ok(RegionGroup { center, neighs })
}
/// Returns an [Iterator] over all populated elements
pub fn iter(&self) -> impl Iterator<Item = &T> {
iter::once(&self.center).chain(self.neighs.iter().filter_map(Option::as_ref))
}
}

View file

@ -0,0 +1,213 @@
//! The [RegionProcessor] and related functions
use std::{ffi::OsStr, path::Path, time::SystemTime};
use anyhow::{Context, Result};
use indexmap::IndexSet;
use rayon::prelude::*;
use super::common::*;
use crate::{
io::{fs, storage},
resource::{self, Biome},
types::*,
world::{
self,
layer::{self, LayerData},
},
};
/// Parses a filename in the format r.X.Z.mca into the contained X and Z values
fn parse_region_filename(file_name: &OsStr) -> Option<TileCoords> {
let parts: Vec<_> = file_name.to_str()?.split('.').collect();
let &["r", x, z, "mca"] = parts.as_slice() else {
return None;
};
Some(TileCoords {
x: x.parse().ok()?,
z: z.parse().ok()?,
})
}
/// Type with methods for processing the regions of a Minecraft save directory
///
/// The RegionProcessor builds lightmap tiles as well as processed region data
/// consumed by subsequent generation steps.
pub struct RegionProcessor<'a> {
/// Registry of known block types
block_types: resource::BlockTypes,
/// Registry of known biome types
biome_types: resource::BiomeTypes,
/// Common MinedMap configuration from command line
config: &'a Config,
}
impl<'a> RegionProcessor<'a> {
/// Constructs a new RegionProcessor
pub fn new(config: &'a Config) -> Self {
RegionProcessor {
block_types: resource::BlockTypes::default(),
biome_types: resource::BiomeTypes::default(),
config,
}
}
/// Generates a list of all regions of the input Minecraft save data
fn collect_regions(&self) -> Result<Vec<TileCoords>> {
Ok(self
.config
.region_dir
.read_dir()
.with_context(|| {
format!(
"Failed to read directory {}",
self.config.region_dir.display()
)
})?
.filter_map(|entry| entry.ok())
.filter(|entry| {
// We are only interested in regular files
matches!(
entry.file_type().map(|file_type| file_type.is_file()),
Ok(true)
)
})
.filter_map(|entry| parse_region_filename(&entry.file_name()))
.collect())
}
/// Processes a single chunk
fn process_chunk(
&self,
biome_list: &mut IndexSet<Biome>,
data: world::de::Chunk,
) -> Result<Option<LayerData>> {
let chunk = world::chunk::Chunk::new(&data, &self.block_types, &self.biome_types)?;
world::layer::top_layer(biome_list, &chunk)
}
/// Renders a lightmap subtile from chunk block light data
fn render_chunk_lightmap(
block_light: Box<world::layer::BlockLightArray>,
) -> image::GrayAlphaImage {
/// Width/height of generated chunk lightmap
const N: u32 = BLOCKS_PER_CHUNK as u32;
image::GrayAlphaImage::from_fn(N, N, |x, z| {
let v: f32 = block_light[LayerBlockCoords {
x: BlockX::new(x),
z: BlockZ::new(z),
}]
.into();
image::LumaA([0, (192.0 * (1.0 - v / 15.0)) as u8])
})
}
/// Saves processed region data
///
/// The timestamp is the time of the last modification of the input region data.
fn save_region(
path: &Path,
processed_region: &ProcessedRegion,
timestamp: SystemTime,
) -> Result<()> {
storage::write(path, processed_region, FILE_META_VERSION, timestamp)
}
/// Saves a lightmap tile
///
/// The timestamp is the time of the last modification of the input region data.
fn save_lightmap(
path: &Path,
lightmap: &image::GrayAlphaImage,
timestamp: SystemTime,
) -> Result<()> {
fs::create_with_timestamp(path, FILE_META_VERSION, timestamp, |file| {
lightmap
.write_to(file, image::ImageFormat::Png)
.context("Failed to save image")
})
}
/// Processes a single region file
fn process_region(&self, coords: TileCoords) -> Result<()> {
/// Width/height of the region data
const N: u32 = (BLOCKS_PER_CHUNK * CHUNKS_PER_REGION) as u32;
let mut processed_region = ProcessedRegion::default();
let mut lightmap = image::GrayAlphaImage::new(N, N);
let path = self.config.region_path(coords);
let input_timestamp = fs::modified_timestamp(&path)?;
let output_path = self.config.processed_path(coords);
let output_timestamp = fs::read_timestamp(&output_path, FILE_META_VERSION);
let lightmap_path = self.config.tile_path(TileKind::Lightmap, 0, coords);
let lightmap_timestamp = fs::read_timestamp(&lightmap_path, FILE_META_VERSION);
if Some(input_timestamp) <= output_timestamp && Some(input_timestamp) <= lightmap_timestamp
{
println!("Skipping unchanged region r.{}.{}.mca", coords.x, coords.z);
return Ok(());
}
println!("Processing region r.{}.{}.mca", coords.x, coords.z);
crate::nbt::region::from_file(path)?.foreach_chunk(
|chunk_coords, data: world::de::Chunk| {
let Some(layer::LayerData {
blocks,
biomes,
block_light,
depths,
}) = self
.process_chunk(&mut processed_region.biome_list, data)
.with_context(|| format!("Failed to process chunk {:?}", chunk_coords))?
else {
return Ok(());
};
processed_region.chunks[chunk_coords] = Some(Box::new(ProcessedChunk {
blocks,
biomes,
depths,
}));
let chunk_lightmap = Self::render_chunk_lightmap(block_light);
overlay_chunk(&mut lightmap, &chunk_lightmap, chunk_coords);
Ok(())
},
)?;
if Some(input_timestamp) > output_timestamp {
Self::save_region(&output_path, &processed_region, input_timestamp)?;
}
if Some(input_timestamp) > lightmap_timestamp {
Self::save_lightmap(&lightmap_path, &lightmap, input_timestamp)?;
}
Ok(())
}
/// Iterates over all region files of a Minecraft save directory
///
/// Returns a list of the coordinates of all processed regions
pub fn run(self) -> Result<Vec<TileCoords>> {
let mut regions = self.collect_regions()?;
// Sort regions in a zig-zag pattern to optimize cache usage
regions.sort_unstable_by_key(|&TileCoords { x, z }| (x, if x % 2 == 0 { z } else { -z }));
fs::create_dir_all(&self.config.processed_dir)?;
fs::create_dir_all(&self.config.tile_dir(TileKind::Lightmap, 0))?;
regions.par_iter().for_each(|&coords| {
if let Err(err) = self.process_region(coords) {
eprintln!("Failed to process region {:?}: {:?}", coords, err);
}
});
Ok(regions)
}
}

191
src/core/tile_mipmapper.rs Normal file
View file

@ -0,0 +1,191 @@
//! The [TileMipmapper]
use anyhow::{Context, Result};
use rayon::prelude::*;
use super::common::*;
use crate::{io::fs, types::*};
/// Generates mipmap tiles from full-resolution tile images
pub struct TileMipmapper<'a> {
/// Common MinedMap configuration from command line
config: &'a Config,
/// List of populated tiles for base mipmap level (level 0)
regions: &'a [TileCoords],
}
impl<'a> TileMipmapper<'a> {
/// Constructs a new TileMipmapper
pub fn new(config: &'a Config, regions: &'a [TileCoords]) -> Self {
TileMipmapper { config, regions }
}
/// Helper to determine if no further mipmap levels are needed
///
/// If all tile coordinates are -1 or 0, further mipmap levels will not
/// decrease the number of tiles and mipmap generated is considered finished.
fn done(tiles: &TileCoordMap) -> bool {
tiles
.0
.iter()
.all(|(z, xs)| (-1..=0).contains(z) && xs.iter().all(|x| (-1..=0).contains(x)))
}
/// Derives the map of populated tile coordinates for the next mipmap level
fn map_coords(tiles: &TileCoordMap) -> TileCoordMap {
let mut ret = TileCoordMap::default();
for (&z, xs) in &tiles.0 {
for &x in xs {
let xt = x >> 1;
let zt = z >> 1;
ret.0.entry(zt).or_default().insert(xt);
}
}
ret
}
/// Renders and saves a single mipmap tile image
///
/// Each mipmap tile is rendered by taking 2x2 tiles from the
/// previous level and scaling them down by 50%.
fn render_mipmap<P: image::PixelWithColorType>(
&self,
kind: TileKind,
level: usize,
coords: TileCoords,
prev: &TileCoordMap,
) -> Result<()>
where
[P::Subpixel]: image::EncodableLayout,
image::ImageBuffer<P, Vec<P::Subpixel>>: Into<image::DynamicImage>,
{
/// Tile width/height
const N: u32 = (BLOCKS_PER_CHUNK * CHUNKS_PER_REGION) as u32;
let output_path = self.config.tile_path(kind, level, coords);
let output_timestamp = fs::read_timestamp(&output_path, FILE_META_VERSION);
let sources: Vec<_> = [(0, 0), (0, 1), (1, 0), (1, 1)]
.into_iter()
.filter_map(|(dx, dz)| {
let source_coords = TileCoords {
x: 2 * coords.x + dx,
z: 2 * coords.z + dz,
};
if !prev.contains(source_coords) {
return None;
}
let source_path = self.config.tile_path(kind, level - 1, source_coords);
let timestamp = match fs::modified_timestamp(&source_path) {
Ok(timestamp) => timestamp,
Err(err) => {
eprintln!("{}", err);
return None;
}
};
Some(((dx, dz), source_path, timestamp))
})
.collect();
let Some(input_timestamp) = sources.iter().map(|(_, _, ts)| *ts).max() else {
return Ok(());
};
if Some(input_timestamp) <= output_timestamp {
println!(
"Skipping unchanged mipmap tile {}",
output_path
.strip_prefix(&self.config.output_dir)
.expect("tile path must be in output directory")
.display(),
);
return Ok(());
}
println!(
"Rendering mipmap tile {}",
output_path
.strip_prefix(&self.config.output_dir)
.expect("tile path must be in output directory")
.display(),
);
let mut image: image::DynamicImage =
image::ImageBuffer::<P, Vec<P::Subpixel>>::new(N, N).into();
for ((dx, dz), source_path, _) in sources {
let source = match image::open(&source_path) {
Ok(source) => source,
Err(err) => {
eprintln!(
"Failed to read source image {}: {}",
source_path.display(),
err,
);
continue;
}
};
let resized = source.resize(N / 2, N / 2, image::imageops::FilterType::Triangle);
image::imageops::overlay(
&mut image,
&resized,
dx as i64 * (N / 2) as i64,
dz as i64 * (N / 2) as i64,
);
}
fs::create_with_timestamp(&output_path, FILE_META_VERSION, input_timestamp, |file| {
image
.write_to(file, image::ImageFormat::Png)
.context("Failed to save image")
})
}
/// Runs the mipmap generation
pub fn run(self) -> Result<Vec<TileCoordMap>> {
let mut tile_stack = {
let mut tile_map = TileCoordMap::default();
for &TileCoords { x, z } in self.regions {
tile_map.0.entry(z).or_default().insert(x);
}
vec![tile_map]
};
loop {
let level = tile_stack.len();
let prev = &tile_stack[level - 1];
if Self::done(prev) {
break;
}
fs::create_dir_all(&self.config.tile_dir(TileKind::Map, level))?;
fs::create_dir_all(&self.config.tile_dir(TileKind::Lightmap, level))?;
let next = Self::map_coords(prev);
next.0.par_iter().try_for_each(|(&z, xs)| {
xs.par_iter().try_for_each(|&x| {
let coords = TileCoords { x, z };
self.render_mipmap::<image::Rgba<u8>>(TileKind::Map, level, coords, prev)?;
self.render_mipmap::<image::LumaA<u8>>(
TileKind::Lightmap,
level,
coords,
prev,
)?;
anyhow::Ok(())
})
})?;
tile_stack.push(next);
}
Ok(tile_stack)
}
}

324
src/core/tile_renderer.rs Normal file
View file

@ -0,0 +1,324 @@
//! The [TileRenderer] and related types and functions
use std::{
num::NonZeroUsize,
path::PathBuf,
sync::{Arc, Mutex},
time::SystemTime,
};
use anyhow::{Context, Result};
use glam::Vec3;
use lru::LruCache;
use rayon::prelude::*;
use tokio::sync::OnceCell;
use super::{common::*, region_group::RegionGroup};
use crate::{
io::{fs, storage},
resource::{block_color, needs_biome},
types::*,
util::coord_offset,
};
/// Type for referencing loaded [ProcessedRegion] data
type RegionRef = Arc<ProcessedRegion>;
/// Returns the index of the biome at a block coordinate
///
/// The passed chunk and block coordinates relative to the center of the
/// region group is offset by *dx* and *dz*.
///
/// The returned tuple contains the relative region coordinates the offset coordinate
/// ends up in (in the range -1..1) and the index in that region's biome list.
fn biome_at(
region_group: &RegionGroup<RegionRef>,
chunk: ChunkCoords,
block: LayerBlockCoords,
dx: i32,
dz: i32,
) -> Option<(i8, i8, u16)> {
let (region_x, chunk_x, block_x) = coord_offset(chunk.x, block.x, dx);
let (region_z, chunk_z, block_z) = coord_offset(chunk.z, block.z, dz);
let chunk = ChunkCoords {
x: chunk_x,
z: chunk_z,
};
let block = LayerBlockCoords {
x: block_x,
z: block_z,
};
let region = region_group.get(region_x, region_z)?;
Some((
region_x,
region_z,
region.chunks[chunk].as_ref()?.biomes[block]?.get() - 1,
))
}
/// The TileRenderer generates map tiles from processed region data
pub struct TileRenderer<'a> {
/// Common MinedMap configuration from command line
config: &'a Config,
/// Runtime for asynchronous region loading
rt: &'a tokio::runtime::Runtime,
/// List of populated regions to render tiles for
regions: &'a [TileCoords],
/// Set of populated regions for fast existence checking
region_set: rustc_hash::FxHashSet<TileCoords>,
/// Cache of previously loaded regions
region_cache: Mutex<LruCache<PathBuf, Arc<OnceCell<RegionRef>>>>,
}
impl<'a> TileRenderer<'a> {
/// Constructs a new TileRenderer
pub fn new(
config: &'a Config,
rt: &'a tokio::runtime::Runtime,
regions: &'a [TileCoords],
) -> Self {
let region_cache = Mutex::new(LruCache::new(
NonZeroUsize::new(6 + 6 * config.num_threads).unwrap(),
));
let region_set = regions.iter().copied().collect();
TileRenderer {
config,
rt,
regions,
region_set,
region_cache,
}
}
/// Loads [ProcessedRegion] for a region or returns previously loaded data from the region cache
async fn load_region(&self, processed_path: PathBuf) -> Result<RegionRef> {
let region_loader = {
let mut region_cache = self.region_cache.lock().unwrap();
if let Some(region_loader) = region_cache.get(&processed_path) {
Arc::clone(region_loader)
} else {
let region_loader = Default::default();
region_cache.put(processed_path.clone(), Arc::clone(&region_loader));
region_loader
}
};
region_loader
.get_or_try_init(|| async {
storage::read(&processed_path).context("Failed to load processed region data")
})
.await
.cloned()
}
/// Loads a 3x3 neighborhood of processed region data
async fn load_region_group(
&self,
processed_paths: RegionGroup<PathBuf>,
) -> Result<RegionGroup<RegionRef>> {
processed_paths
.async_try_map(move |path| self.load_region(path))
.await
}
/// Computes the color of a tile pixel
fn block_color_at(
region_group: &RegionGroup<RegionRef>,
chunk: &ProcessedChunk,
chunk_coords: ChunkCoords,
block_coords: LayerBlockCoords,
) -> Option<Vec3> {
/// Helper for keys in the weight table
///
/// Hashing the value as a single u32 is more efficient than hashing
/// the tuple elements separately.
fn biome_key((dx, dz, index): (i8, i8, u16)) -> u32 {
(dx as u8 as u32) | (dz as u8 as u32) << 8 | (index as u32) << 16
}
/// One quadrant of the kernel used to smooth biome edges
///
/// The kernel is mirrored in X und Z direction to build the full 5x5
/// smoothing kernel.
const SMOOTH: [[f32; 3]; 3] = [[41.0, 26.0, 7.0], [26.0, 16.0, 4.0], [7.0, 4.0, 1.0]];
/// Maximum X coordinate offset to take into account for biome smoothing
const X: isize = SMOOTH[0].len() as isize - 1;
/// Maximum Z coordinate offset to take into account for biome smoothing
const Z: isize = SMOOTH.len() as isize - 1;
let block = chunk.blocks[block_coords]?;
let depth = chunk.depths[block_coords]?;
if !needs_biome(block) {
return Some(block_color(block, None, depth.0 as f32));
}
let mut weights = rustc_hash::FxHashMap::<u32, ((i8, i8, u16), f32)>::default();
for dz in -Z..=Z {
for dx in -X..=X {
let w = SMOOTH[dz.unsigned_abs()][dx.unsigned_abs()];
if w == 0.0 {
continue;
}
let Some(biome) = biome_at(
region_group,
chunk_coords,
block_coords,
dx as i32,
dz as i32,
) else {
continue;
};
let value = weights.entry(biome_key(biome)).or_default();
value.0 = biome;
value.1 += w;
}
}
if weights.is_empty() {
return None;
}
let mut color = Vec3::ZERO;
let mut total = 0.0;
for ((region_x, region_z, index), w) in weights.into_values() {
let region = region_group.get(region_x, region_z)?;
let biome = region.biome_list.get_index(index.into())?;
total += w;
color += w * block_color(block, Some(biome), depth.0 as f32);
}
Some(color / total)
}
/// Renders a chunk subtile into a region tile image
fn render_chunk(
image: &mut image::RgbaImage,
region_group: &RegionGroup<RegionRef>,
chunk: &ProcessedChunk,
chunk_coords: ChunkCoords,
) {
/// Width/height of a chunk subtile
const N: u32 = BLOCKS_PER_CHUNK as u32;
let chunk_image = image::RgbaImage::from_fn(N, N, |x, z| {
let block_coords = LayerBlockCoords {
x: BlockX::new(x),
z: BlockZ::new(z),
};
let color = Self::block_color_at(region_group, chunk, chunk_coords, block_coords);
image::Rgba(
color
.map(|c| [c[0] as u8, c[1] as u8, c[2] as u8, 255])
.unwrap_or_default(),
)
});
overlay_chunk(image, &chunk_image, chunk_coords);
}
/// Renders a region tile image
fn render_region(image: &mut image::RgbaImage, region_group: &RegionGroup<RegionRef>) {
for (coords, chunk) in region_group.center().chunks.iter() {
let Some(chunk) = chunk else {
continue;
};
Self::render_chunk(image, region_group, chunk, coords);
}
}
/// Returns the filename of the processed data for a region and the time of its last modification
fn processed_source(&self, coords: TileCoords) -> Result<(PathBuf, SystemTime)> {
let path = self.config.processed_path(coords);
let timestamp = fs::modified_timestamp(&path)?;
Ok((path, timestamp))
}
/// Returns the filenames of the processed data for a 3x3 neighborhood of a region
/// and the time of last modification for any of them
fn processed_sources(&self, coords: TileCoords) -> Result<(RegionGroup<PathBuf>, SystemTime)> {
let sources = RegionGroup::new(|x, z| {
Some(TileCoords {
x: coords.x + (x as i32),
z: coords.z + (z as i32),
})
.filter(|entry| self.region_set.contains(entry))
})
.try_map(|entry| self.processed_source(entry))
.with_context(|| format!("Region {:?} from previous step must exist", coords))?;
let max_timestamp = *sources
.iter()
.map(|(_, timestamp)| timestamp)
.max()
.expect("at least one timestamp must exist");
let paths = sources.map(|(path, _)| path);
Ok((paths, max_timestamp))
}
/// Renders and saves a region tile image
fn render_tile(&self, coords: TileCoords) -> Result<()> {
/// Width/height of a tile image
const N: u32 = (BLOCKS_PER_CHUNK * CHUNKS_PER_REGION) as u32;
let (processed_paths, processed_timestamp) = self.processed_sources(coords)?;
let output_path = self.config.tile_path(TileKind::Map, 0, coords);
let output_timestamp = fs::read_timestamp(&output_path, FILE_META_VERSION);
if Some(processed_timestamp) <= output_timestamp {
println!(
"Skipping unchanged tile {}",
output_path
.strip_prefix(&self.config.output_dir)
.expect("tile path must be in output directory")
.display(),
);
return Ok(());
}
println!(
"Rendering tile {}",
output_path
.strip_prefix(&self.config.output_dir)
.expect("tile path must be in output directory")
.display(),
);
let region_group = self
.rt
.block_on(self.load_region_group(processed_paths))
.with_context(|| format!("Region {:?} from previous step must be loadable", coords))?;
let mut image = image::RgbaImage::new(N, N);
Self::render_region(&mut image, &region_group);
fs::create_with_timestamp(
&output_path,
FILE_META_VERSION,
processed_timestamp,
|file| {
image
.write_to(file, image::ImageFormat::Png)
.context("Failed to save image")
},
)
}
/// Runs the tile generation
pub fn run(self) -> Result<()> {
fs::create_dir_all(&self.config.tile_dir(TileKind::Map, 0))?;
// Use par_bridge to process items in order (for better use of region cache)
self.regions.iter().par_bridge().try_for_each(|&coords| {
self.render_tile(coords)
.with_context(|| format!("Failed to render tile {:?}", coords))
})?;
Ok(())
}
}