mirror of
https://github.com/neocturne/MinedMap.git
synced 2025-04-18 18:45:09 +02:00
Compare commits
75 commits
Author | SHA1 | Date | |
---|---|---|---|
118034dd27 | |||
ca880ab3b4 | |||
dd56e842b5 | |||
69b62576ea | |||
58f2ff53b9 | |||
42b302f493 | |||
5f84ec8ed2 | |||
ba6e4bae7f | |||
442009eb08 | |||
23b2f274be | |||
3008203080 | |||
d638c810f2 | |||
6e5b958912 | |||
974a0f37df | |||
54ea2b2f28 | |||
6a54f57c50 | |||
dca365f4e2 | |||
e600a9dabb | |||
0f52f6a2c1 | |||
d6cd0fc53b | |||
c49ad6e608 | |||
5ee826a11b | |||
775fcb2d1b | |||
7bba5bae55 | |||
5c85687554 | |||
708fb9645d | |||
53a0f24600 | |||
404ad74235 | |||
deb232ddf3 | |||
fbdd5ed457 | |||
b1c0f316cb | |||
7686996fd3 | |||
b8a5d1580b | |||
f8c8ca78ba | |||
40bc6cd2a9 | |||
5ee8e493d4 | |||
6e2f2697fc | |||
24c266fc78 | |||
0a08e8cc46 | |||
90f2c5fdd0 | |||
8cb1eee60b | |||
282f62fc30 | |||
7bc15f97de | |||
901489dbc3 | |||
dba3dd551e | |||
850b1a668b | |||
3b5ce82873 | |||
cb0aa235db | |||
d02ca9aea2 | |||
c10e9e4902 | |||
37126f69fc | |||
971afea727 | |||
f661f854a4 | |||
1d9be9a41c | |||
a25b3cdbd7 | |||
d96bb727f7 | |||
0dd36a409a | |||
a10151a4f3 | |||
d7fc95c950 | |||
561a1e6577 | |||
0bf1d46aad | |||
a2f0ad401d | |||
ec309dc15f | |||
6763e2b4ec | |||
c23b53a8c3 | |||
bb11b29e92 | |||
28a191a23a | |||
52572a9e81 | |||
b9cd94b235 | |||
f9954d1ce4 | |||
a67bdb3b67 | |||
0a3f6d7765 | |||
ff6e28d381 | |||
9375af8d54 | |||
7f329ac8e7 |
41 changed files with 1479 additions and 425 deletions
4
.dockerignore
Normal file
4
.dockerignore
Normal file
|
@ -0,0 +1,4 @@
|
|||
*
|
||||
!/Cargo.*
|
||||
!/src
|
||||
!/crates
|
120
.github/workflows/MinedMap.yml
vendored
120
.github/workflows/MinedMap.yml
vendored
|
@ -1,5 +1,14 @@
|
|||
name: 'MinedMap'
|
||||
on: ['push', 'pull_request', 'workflow_dispatch']
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
workflow_dispatch: {}
|
||||
|
||||
env:
|
||||
RUSTFLAGS: -Dwarnings
|
||||
|
@ -7,7 +16,7 @@ env:
|
|||
|
||||
jobs:
|
||||
viewer:
|
||||
runs-on: 'ubuntu-20.04'
|
||||
runs-on: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
|
@ -25,9 +34,10 @@ jobs:
|
|||
pkgdir='build/pkg/MinedMap-${{ steps.tag.outputs.tag }}-viewer'
|
||||
mkdir -p "$pkgdir"
|
||||
cp -r viewer/* "$pkgdir"/
|
||||
rm "$pkgdir"/Dockerfile
|
||||
|
||||
- name: 'Archive'
|
||||
uses: 'actions/upload-artifact@v3'
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'MinedMap-${{ steps.tag.outputs.tag }}-viewer'
|
||||
path: 'build/pkg'
|
||||
|
@ -38,7 +48,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
toolchain: '1.86'
|
||||
components: rustfmt
|
||||
- run: cargo fmt --all -- --check
|
||||
|
||||
|
@ -48,7 +58,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
toolchain: '1.86'
|
||||
components: clippy
|
||||
- uses: swatinem/rust-cache@v2
|
||||
- uses: actions-rs/clippy-check@v1
|
||||
|
@ -62,7 +72,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
toolchain: '1.86'
|
||||
components: rust-docs
|
||||
- uses: swatinem/rust-cache@v2
|
||||
- run: cargo doc --workspace --no-deps --document-private-items
|
||||
|
@ -73,12 +83,11 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
rust: [stable]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
toolchain: '1.86'
|
||||
- uses: swatinem/rust-cache@v2
|
||||
- run: cargo test --workspace
|
||||
- run: cargo test --workspace --no-default-features
|
||||
|
@ -101,7 +110,7 @@ jobs:
|
|||
- os: 'windows-2019'
|
||||
target: 'i686-pc-windows-msvc'
|
||||
ext: '.exe'
|
||||
- os: 'ubuntu-20.04'
|
||||
- os: 'ubuntu-22.04'
|
||||
target: 'x86_64-unknown-linux-gnu'
|
||||
|
||||
steps:
|
||||
|
@ -118,10 +127,12 @@ jobs:
|
|||
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
toolchain: '1.86'
|
||||
targets: '${{ matrix.target }}'
|
||||
|
||||
- uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
key: '${{ matrix.target }}'
|
||||
|
||||
- name: 'Build'
|
||||
shell: 'bash'
|
||||
|
@ -134,7 +145,94 @@ jobs:
|
|||
cp target/${{ matrix.target }}/release/minedmap${{ matrix.ext }} "$pkgdir"/
|
||||
|
||||
- name: 'Archive'
|
||||
uses: 'actions/upload-artifact@v3'
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'MinedMap-${{ steps.tag.outputs.tag }}-${{ matrix.target }}'
|
||||
path: 'target/pkg'
|
||||
|
||||
build-container:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Get version'
|
||||
id: 'tag'
|
||||
run: |
|
||||
set -o pipefail
|
||||
git fetch --prune --unshallow --tags -f
|
||||
echo "tag=$(git describe --abbrev=7 --match='v*' | sed 's/^v//')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/neocturne/minedmap/minedmap
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=branch,suffix=-{{sha}}
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
|
||||
- name: Login to GHCR
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
build-args: |
|
||||
MINEDMAP_VERSION=${{ steps.tag.outputs.tag }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
viewer-container:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test
|
||||
steps:
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/neocturne/minedmap/viewer
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=branch,suffix=-{{sha}}
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
|
||||
- name: Login to GHCR
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: "{{defaultContext}}:viewer"
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
|
98
CHANGELOG.md
98
CHANGELOG.md
|
@ -2,8 +2,101 @@
|
|||
|
||||
## [Unreleased] - ReleaseDate
|
||||
|
||||
### Added
|
||||
|
||||
- Added support for Minecraft 1.21.5
|
||||
|
||||
Added new block types and handling for changed sign text storage format.
|
||||
|
||||
## [2.5.0] - 2025-03-16
|
||||
|
||||
### Added
|
||||
|
||||
- Added experimental watch mode
|
||||
|
||||
Passing `--watch` will cause MinedMap to run continuously instead of exiting
|
||||
after map generation, regenerating tiles whenever they change.
|
||||
|
||||
`--watch-delay` can be used to configure the delay between detecting a change
|
||||
and runing the map generation, also limiting how often the regeneration
|
||||
happens. This defaults to `30s`; significantly smaller values probably don't
|
||||
make sense because Minecraft writes out changes in batches anyways.
|
||||
|
||||
Finally, `--jobs-initial` can be used to configure the number of parallel
|
||||
generation threads for the initial cycle separately from the value used for
|
||||
subsequent cycles after a change is detected (`-j`/`--jobs`). Subsequent
|
||||
cycles usually need to regenerate only a small number of tiles, so setting
|
||||
`--jobs` to a smaller value than `--jobs-initial` may be advantageous.
|
||||
|
||||
- Added jemalloc support to fix performace on musl targets
|
||||
|
||||
The global allocator can be switched to jemalloc by enabling the `jemalloc`
|
||||
cargo feature now. This is not the default because it is not always faster
|
||||
than the default system allocator; in particular, the glibc allocator has
|
||||
slightly better performance in multithreaded mode. In addition, jemalloc
|
||||
uses a bit more memory.
|
||||
|
||||
In addition, the `jemalloc-auto` feature has been introduced, which is enabled
|
||||
by default and sets the global allocator to jemalloc on platforms where it is
|
||||
clearly advantageous. For now, this is only done on musl-based targets, as
|
||||
musl's default allocator is very slow in multithreaded operation (which was
|
||||
making higher thread counts like `-j8` basically useless due to 7-8x
|
||||
slowdowns). With the new default, performance on musl is basically identical
|
||||
to glibc.
|
||||
|
||||
Note that some platforms like `msvc` are unsupported by jemalloc, and trying
|
||||
to enable the `jemalloc` feature on these platforms may break the MinedMap
|
||||
build or cause issues at runtime.
|
||||
- Docker images can be downloaded from the GitHub Container registry
|
||||
|
||||
Two images are provided, one for the tile renderer and one with the viewer
|
||||
and a web server. A `docker-compose.yml` example can be found in the
|
||||
repository as a starting point.
|
||||
|
||||
### Changed
|
||||
|
||||
- Unknown biome types (from not yet supported or modded versions of Minecraft)
|
||||
will now use plains biome colors as a fallback instead of resulting in water,
|
||||
grass and foliage blocks to be rendered as transparent pixels
|
||||
- Switched from zlib-ng to zlib-rs
|
||||
|
||||
This should have no noticable effect on the usage of MinedMap, but avoids
|
||||
an external build dependency on CMake.
|
||||
- Small (1-block) seagrass is now visible on the map
|
||||
|
||||
1-block seagrass in 1-block deep water would previously result in the ground
|
||||
to be shown instead of water, as MinedMap currently doesn't handle the
|
||||
"waterlogged" block status. As 1-block seagrass is relatively big compared to
|
||||
other "small" plants, just considering it opaque seems like a good enough
|
||||
solution that avoids having to implement advanced block status flags.
|
||||
- Use Bincode 2 for storage of intermediate data
|
||||
|
||||
The update from Bincode 1 to 2 slightly reduces the size of the `processed`
|
||||
directory used for intermediate data. At least Rust 1.85 is now required to
|
||||
build MinedMap.
|
||||
|
||||
## [2.4.0] - 2025-01-11
|
||||
|
||||
### Added
|
||||
|
||||
- Added support for rendering tiles in WebP format using the `--image-format` option
|
||||
|
||||
## [2.3.1] - 2025-01-06
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix text colors for signs modified using dye
|
||||
- Fix text colors specified using `#rrggbb` CSS syntax in JSON text
|
||||
|
||||
Only named colors specified via JSON text were working as intended.
|
||||
|
||||
The mapping of color names to values is now handled by the generator. Both the generator and the
|
||||
viewer must be updated for sign text colors to work.
|
||||
|
||||
## [2.3.0] - 2025-01-02
|
||||
|
||||
### Added
|
||||
|
||||
- Added support for Minecraft 1.21.4 block types
|
||||
- Added support for Minecraft 1.21.4 Pale Garden biome
|
||||
- viewer: added images for pale oak signs
|
||||
|
@ -101,7 +194,10 @@ intermediate data.
|
|||
Full support for custom biomes datapacks might be added in a future release.
|
||||
|
||||
<!-- next-url -->
|
||||
[Unreleased]: https://github.com/neocturne/MinedMap/compare/v2.3.0...HEAD
|
||||
[Unreleased]: https://github.com/neocturne/MinedMap/compare/v2.5.0...HEAD
|
||||
[2.5.0]: https://github.com/neocturne/MinedMap/compare/v2.4.0...v2.5.0
|
||||
[2.4.0]: https://github.com/neocturne/MinedMap/compare/v2.3.1...v2.4.0
|
||||
[2.3.1]: https://github.com/neocturne/MinedMap/compare/v2.3.0...v2.3.1
|
||||
[2.3.0]: https://github.com/neocturne/MinedMap/compare/v2.2.0...v2.3.0
|
||||
[2.2.0]: https://github.com/neocturne/MinedMap/compare/v2.1.1...v2.2.0
|
||||
[2.1.1]: https://github.com/neocturne/MinedMap/compare/v2.1.0...v2.1.1
|
||||
|
|
589
Cargo.lock
generated
589
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
30
Cargo.toml
30
Cargo.toml
|
@ -2,7 +2,7 @@
|
|||
members = ["crates/*"]
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/neocturne/MinedMap"
|
||||
|
@ -13,7 +13,7 @@ pre-release-commit-message = "{{crate_name}} {{version}}"
|
|||
|
||||
[package]
|
||||
name = "minedmap"
|
||||
version = "2.3.0"
|
||||
version = "2.5.0"
|
||||
description = "Generate browsable maps from Minecraft save data"
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -38,24 +38,29 @@ pre-release-replacements = [
|
|||
|
||||
[dependencies]
|
||||
anyhow = "1.0.68"
|
||||
bincode = "1.3.3"
|
||||
bincode = "2.0.1"
|
||||
clap = { version = "4.1.4", features = ["derive", "wrap_help"] }
|
||||
enum-map = "2.7.3"
|
||||
fastnbt = "2.3.2"
|
||||
flate2 = { version = "1.1.0", features = ["zlib-rs"] }
|
||||
futures-util = "0.3.28"
|
||||
git-version = "0.3.5"
|
||||
image = { version = "0.25.1", default-features = false, features = ["png"] }
|
||||
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||
lru = "0.12.0"
|
||||
minedmap-nbt = { version = "0.1.1", path = "crates/nbt", default-features = false }
|
||||
minedmap-resource = { version = "0.5.0", path = "crates/resource" }
|
||||
minedmap-types = { version = "0.1.2", path = "crates/types" }
|
||||
humantime = "2.1.0"
|
||||
image = { version = "0.25.1", default-features = false, features = ["png", "webp"] }
|
||||
indexmap = "2.0.0"
|
||||
lru = "0.13.0"
|
||||
minedmap-default-alloc = { version = "0.1.0", path = "crates/default-alloc", optional = true }
|
||||
minedmap-nbt = { version = "0.2.0", path = "crates/nbt", default-features = false }
|
||||
minedmap-resource = { version = "0.7.0", path = "crates/resource" }
|
||||
minedmap-types = { version = "0.2.0", path = "crates/types" }
|
||||
notify = "8.0.0"
|
||||
num-integer = "0.1.45"
|
||||
num_cpus = "1.16.0"
|
||||
phf = { version = "0.11.2", features = ["macros"] }
|
||||
rayon = "1.7.0"
|
||||
regex = "1.10.2"
|
||||
rustc-hash = "2.0.0"
|
||||
serde = { version = "1.0.152", features = ["rc", "derive"] }
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
serde_json = "1.0.99"
|
||||
tokio = { version = "1.31.0", features = ["rt", "parking_lot", "sync"] }
|
||||
tracing = "0.1.37"
|
||||
|
@ -63,5 +68,6 @@ tracing-subscriber = "0.3.17"
|
|||
zstd = "0.13.0"
|
||||
|
||||
[features]
|
||||
default = ["zlib-ng"]
|
||||
zlib-ng = ["minedmap-nbt/zlib-ng"]
|
||||
default = ["jemalloc-auto"]
|
||||
jemalloc-auto = ["dep:minedmap-default-alloc"]
|
||||
jemalloc = ["jemalloc-auto", "minedmap-default-alloc/jemalloc"]
|
||||
|
|
19
Dockerfile
19
Dockerfile
|
@ -1,14 +1,17 @@
|
|||
FROM docker.io/library/rust:alpine AS BUILDER
|
||||
FROM docker.io/library/rust:1.85.1-alpine AS builder
|
||||
|
||||
WORKDIR /build
|
||||
RUN apk update && apk add cmake build-base
|
||||
RUN apk add --no-cache build-base tini-static
|
||||
|
||||
COPY src /build/src
|
||||
COPY crates /build/crates
|
||||
COPY Cargo.toml Cargo.lock /build
|
||||
ARG MINEDMAP_VERSION
|
||||
|
||||
COPY . .
|
||||
RUN cargo build -r
|
||||
RUN strip target/release/minedmap
|
||||
|
||||
FROM scratch AS RUNNER
|
||||
FROM scratch
|
||||
|
||||
COPY --from=BUILDER /build/target/release/minedmap /minedmap
|
||||
ENTRYPOINT [ "/minedmap" ]
|
||||
COPY --from=builder /sbin/tini-static /build/target/release/minedmap /bin/
|
||||
ENTRYPOINT [ "/bin/tini-static", "--", "/bin/minedmap" ]
|
||||
|
||||
USER 1000:1000
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
FROM docker.io/library/nginx:alpine
|
||||
COPY viewer /usr/share/nginx/html
|
||||
# datadir should be mounted to: /usr/share/nginx/html/data
|
16
README.md
16
README.md
|
@ -55,9 +55,18 @@ a proper webserver like [nginx](https://nginx.org/) or upload the viewer togethe
|
|||
the generated map files to public webspace to make the map available to others.
|
||||
|
||||
If you are uploading the directory to a remote webserver, you do not need to upload the
|
||||
`<viewer>/data/processed` directory, as that is only used locally to allow processing
|
||||
`<viewer>/data/processed` directory, as it is only used locally to allow processing
|
||||
updates more quickly.
|
||||
|
||||
### Image formats
|
||||
|
||||
MinedMap renders map tiles as PNG by default. Pass `--image-format webp` to select
|
||||
WebP instead. For typical Minecraft worlds, using WebP reduces file sizes by 20-25%
|
||||
without increasing processing time.
|
||||
|
||||
MinedMap always uses lossless compression for tile images, regardless of the
|
||||
image format.
|
||||
|
||||
### Signs
|
||||
|
||||

|
||||
|
@ -108,11 +117,6 @@ or newer). The following command can be used to build the current development ve
|
|||
cargo install --git 'https://github.com/neocturne/MinedMap.git'
|
||||
```
|
||||
|
||||
In addition, CMake is needed to build the zlib-ng library. If you do not have
|
||||
CMake installed, you can disable the zlib-ng feature by passing `--no-default-features`
|
||||
to cargo. A pure-Rust zlib implementation will be used, which is more portable,
|
||||
but slower than zlib-ng.
|
||||
|
||||
If you are looking for the older C++ implementation of the MinedMap tile renderer,
|
||||
see the [v1.19.1](https://github.com/neocturne/MinedMap/tree/v1.19.1) tag.
|
||||
|
||||
|
|
17
crates/default-alloc/Cargo.toml
Normal file
17
crates/default-alloc/Cargo.toml
Normal file
|
@ -0,0 +1,17 @@
|
|||
[package]
|
||||
name = "minedmap-default-alloc"
|
||||
version = "0.1.0"
|
||||
description = "Helper crate for target-specific selection of global allocator default"
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[dependencies]
|
||||
tikv-jemallocator = { version = "0.6.0", optional = true }
|
||||
|
||||
[target.'cfg(target_env = "musl")'.dependencies]
|
||||
tikv-jemallocator = "0.6.0"
|
||||
|
||||
[features]
|
||||
jemalloc = ["dep:tikv-jemallocator"]
|
3
crates/default-alloc/src/lib.rs
Normal file
3
crates/default-alloc/src/lib.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
#[cfg(any(target_env = "musl", feature = "jemalloc"))]
|
||||
#[global_allocator]
|
||||
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "minedmap-nbt"
|
||||
version = "0.1.1"
|
||||
version = "0.2.0"
|
||||
description = "MinedMap's handling of Minecraft NBT data and region files"
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -11,12 +11,10 @@ repository.workspace = true
|
|||
anyhow = "1.0.75"
|
||||
bytemuck = "1.13.1"
|
||||
fastnbt = "2.4.4"
|
||||
flate2 = "1.0.27"
|
||||
minedmap-types = { version = "0.1.2", path = "../types" }
|
||||
flate2 = "1.1.0"
|
||||
minedmap-types = { version = "0.2.0", path = "../types" }
|
||||
serde = "1.0.183"
|
||||
|
||||
[features]
|
||||
zlib-ng = ["flate2/zlib-ng"]
|
||||
|
||||
[dev-dependencies]
|
||||
clap = { version = "4.3.23", features = ["derive"] }
|
||||
flate2 = { version = "1.1.0", features = ["zlib-rs"] }
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{prelude::*, SeekFrom},
|
||||
io::{SeekFrom, prelude::*},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use flate2::read::ZlibDecoder;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "minedmap-resource"
|
||||
version = "0.5.0"
|
||||
version = "0.7.0"
|
||||
description = "Data describing Minecraft biomes and block types"
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -8,6 +8,6 @@ readme.workspace = true
|
|||
repository.workspace = true
|
||||
|
||||
[dependencies]
|
||||
enumflags2 = { version = "0.7.7", features = ["serde"] }
|
||||
glam = "0.29.2"
|
||||
serde = { version = "1.0.183", features = ["derive"] }
|
||||
bincode = "2.0.1"
|
||||
enumflags2 = "0.7.7"
|
||||
glam = "0.30.0"
|
||||
|
|
|
@ -1488,6 +1488,16 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"bush",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{Opaque|Grass}),
|
||||
color: Color([119, 120, 119]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"cactus",
|
||||
ConstBlockType {
|
||||
|
@ -1498,6 +1508,16 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"cactus_flower",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{Opaque}),
|
||||
color: Color([209, 120, 135]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"cake",
|
||||
ConstBlockType {
|
||||
|
@ -3808,6 +3828,16 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"firefly_bush",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{Opaque}),
|
||||
color: Color([87, 83, 43]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"fletching_table",
|
||||
ConstBlockType {
|
||||
|
@ -4878,6 +4908,16 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"leaf_litter",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{}),
|
||||
color: Color([0, 0, 0]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"lectern",
|
||||
ConstBlockType {
|
||||
|
@ -8692,8 +8732,18 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
"seagrass",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{}),
|
||||
color: Color([0, 0, 0]),
|
||||
flags: make_bitflags!(BlockFlag::{Opaque}),
|
||||
color: Color([50, 126, 8]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"short_dry_grass",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{Opaque}),
|
||||
color: Color([187, 158, 108]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
|
@ -9638,6 +9688,16 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"tall_dry_grass",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{Opaque}),
|
||||
color: Color([196, 171, 122]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"tall_grass",
|
||||
ConstBlockType {
|
||||
|
@ -9678,6 +9738,26 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"test_block",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{}),
|
||||
color: Color([0, 0, 0]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"test_instance_block",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{}),
|
||||
color: Color([0, 0, 0]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"tinted_glass",
|
||||
ConstBlockType {
|
||||
|
@ -10828,6 +10908,16 @@ pub const BLOCK_TYPES: &[(&str, ConstBlockType)] = &[
|
|||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"wildflowers",
|
||||
ConstBlockType {
|
||||
block_color: BlockColor {
|
||||
flags: make_bitflags!(BlockFlag::{}),
|
||||
color: Color([0, 0, 0]),
|
||||
},
|
||||
sign_material: None,
|
||||
},
|
||||
),
|
||||
(
|
||||
"wither_rose",
|
||||
ConstBlockType {
|
||||
|
|
|
@ -13,6 +13,7 @@ const DEF: &str = "air";
|
|||
const EMPTY: [&str; 16] = simple(DEF);
|
||||
|
||||
/// Mapping from each numeric block type and damage/subtype ID to new string ID
|
||||
#[allow(clippy::large_const_arrays)]
|
||||
pub const LEGACY_BLOCK_TYPES: [[&str; 16]; 256] = [
|
||||
/* 0 */
|
||||
simple("air"),
|
||||
|
|
|
@ -10,13 +10,13 @@ mod legacy_block_types;
|
|||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use enumflags2::{bitflags, BitFlags};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use bincode::{BorrowDecode, Decode, Encode};
|
||||
use enumflags2::{BitFlags, bitflags};
|
||||
|
||||
/// Flags describing special properties of [BlockType]s
|
||||
#[bitflags]
|
||||
#[repr(u8)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum BlockFlag {
|
||||
/// The block type is opaque
|
||||
Opaque,
|
||||
|
@ -38,14 +38,14 @@ pub enum BlockFlag {
|
|||
}
|
||||
|
||||
/// An RGB color with u8 components
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Encode, Decode)]
|
||||
pub struct Color(pub [u8; 3]);
|
||||
|
||||
/// An RGB color with f32 components
|
||||
pub type Colorf = glam::Vec3;
|
||||
|
||||
/// A block type specification
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct BlockColor {
|
||||
/// Bit set of [BlockFlag]s describing special properties of the block type
|
||||
pub flags: BitFlags<BlockFlag>,
|
||||
|
@ -61,6 +61,43 @@ impl BlockColor {
|
|||
}
|
||||
}
|
||||
|
||||
impl Encode for BlockColor {
|
||||
fn encode<E: bincode::enc::Encoder>(
|
||||
&self,
|
||||
encoder: &mut E,
|
||||
) -> Result<(), bincode::error::EncodeError> {
|
||||
bincode::Encode::encode(&self.flags.bits(), encoder)?;
|
||||
bincode::Encode::encode(&self.color, encoder)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Context> Decode<Context> for BlockColor {
|
||||
fn decode<D: bincode::de::Decoder<Context = Context>>(
|
||||
decoder: &mut D,
|
||||
) -> Result<Self, bincode::error::DecodeError> {
|
||||
Ok(BlockColor {
|
||||
flags: BitFlags::from_bits(bincode::Decode::decode(decoder)?).or(Err(
|
||||
bincode::error::DecodeError::Other("invalid block flags"),
|
||||
))?,
|
||||
color: bincode::Decode::decode(decoder)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, Context> BorrowDecode<'de, Context> for BlockColor {
|
||||
fn borrow_decode<D: bincode::de::BorrowDecoder<'de, Context = Context>>(
|
||||
decoder: &mut D,
|
||||
) -> Result<Self, bincode::error::DecodeError> {
|
||||
Ok(BlockColor {
|
||||
flags: BitFlags::from_bits(bincode::BorrowDecode::borrow_decode(decoder)?).or(Err(
|
||||
bincode::error::DecodeError::Other("invalid block flags"),
|
||||
))?,
|
||||
color: bincode::BorrowDecode::borrow_decode(decoder)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A block type specification (for use in constants)
|
||||
#[derive(Debug, Clone)]
|
||||
struct ConstBlockType {
|
||||
|
@ -137,7 +174,7 @@ impl BlockTypes {
|
|||
pub use block_color::{block_color, needs_biome};
|
||||
|
||||
/// Grass color modifier used by a biome
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Encode, Decode)]
|
||||
pub enum BiomeGrassColorModifier {
|
||||
/// Grass color modifier used by the dark forest biome
|
||||
DarkForest,
|
||||
|
@ -149,7 +186,7 @@ pub enum BiomeGrassColorModifier {
|
|||
///
|
||||
/// A Biome contains all information about a biome necessary to compute a block
|
||||
/// color given a block type and depth
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Encode, Decode)]
|
||||
pub struct Biome {
|
||||
/// Temperature value
|
||||
///
|
||||
|
@ -247,6 +284,8 @@ pub struct BiomeTypes {
|
|||
biome_map: HashMap<String, &'static Biome>,
|
||||
/// Array used to look up old numeric biome IDs
|
||||
legacy_biomes: Box<[&'static Biome; 256]>,
|
||||
/// Fallback for unknown (new/modded) biomes
|
||||
fallback_biome: &'static Biome,
|
||||
}
|
||||
|
||||
impl Default for BiomeTypes {
|
||||
|
@ -273,9 +312,12 @@ impl Default for BiomeTypes {
|
|||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let fallback_biome = *biome_map.get("plains").expect("Plains biome undefined");
|
||||
|
||||
Self {
|
||||
biome_map,
|
||||
legacy_biomes,
|
||||
fallback_biome,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -293,4 +335,10 @@ impl BiomeTypes {
|
|||
pub fn get_legacy(&self, id: u8) -> Option<&Biome> {
|
||||
Some(self.legacy_biomes[id as usize])
|
||||
}
|
||||
|
||||
/// Returns the fallback for unknown (new/modded) biomes
|
||||
#[inline]
|
||||
pub fn get_fallback(&self) -> &Biome {
|
||||
self.fallback_biome
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "minedmap-types"
|
||||
version = "0.1.3"
|
||||
version = "0.2.0"
|
||||
description = "Common types used by several MinedMap crates"
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -8,5 +8,5 @@ readme.workspace = true
|
|||
repository.workspace = true
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.13.0"
|
||||
serde = { version = "1.0.183", features = ["derive"] }
|
||||
bincode = "2.0.1"
|
||||
itertools = "0.14.0"
|
||||
|
|
|
@ -8,8 +8,8 @@ use std::{
|
|||
ops::{Index, IndexMut},
|
||||
};
|
||||
|
||||
use bincode::{Decode, Encode};
|
||||
use itertools::iproduct;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Const generic AXIS arguments for coordinate types
|
||||
pub mod axis {
|
||||
|
@ -48,10 +48,10 @@ macro_rules! coord_type {
|
|||
/// Returns an iterator over all possible values of the type
|
||||
#[inline]
|
||||
pub fn iter() -> impl DoubleEndedIterator<Item = $t<AXIS>>
|
||||
+ ExactSizeIterator
|
||||
+ FusedIterator
|
||||
+ Clone
|
||||
+ Debug {
|
||||
+ ExactSizeIterator
|
||||
+ FusedIterator
|
||||
+ Clone
|
||||
+ Debug {
|
||||
(0..Self::MAX as u8).map($t)
|
||||
}
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ impl LayerBlockCoords {
|
|||
/// Generic array for data stored per block of a chunk layer
|
||||
///
|
||||
/// Includes various convenient iteration functions.
|
||||
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, Default, Encode, Decode)]
|
||||
pub struct LayerBlockArray<T>(pub [[T; BLOCKS_PER_CHUNK]; BLOCKS_PER_CHUNK]);
|
||||
|
||||
impl<T> Index<LayerBlockCoords> for LayerBlockArray<T> {
|
||||
|
@ -196,7 +196,7 @@ impl Debug for ChunkCoords {
|
|||
/// Generic array for data stored per chunk of a region
|
||||
///
|
||||
/// Includes various convenient iteration functions.
|
||||
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, Default, Encode, Decode)]
|
||||
pub struct ChunkArray<T>(pub [[T; CHUNKS_PER_REGION]; CHUNKS_PER_REGION]);
|
||||
|
||||
impl<T> ChunkArray<T> {
|
||||
|
|
51
docker-compose.yml
Normal file
51
docker-compose.yml
Normal file
|
@ -0,0 +1,51 @@
|
|||
# This is an example docker-compose configuration providing a Minecraft server,
|
||||
# map generator and webserver. Visit http://localhost:8080 to view the map.
|
||||
#
|
||||
# See https://docker-minecraft-server.readthedocs.io/ for more information on
|
||||
# the itzg/minecraft-server image and its configuration.
|
||||
|
||||
services:
|
||||
mc:
|
||||
image: docker.io/itzg/minecraft-server
|
||||
environment:
|
||||
EULA: 'true'
|
||||
ports:
|
||||
- '25565:25565'
|
||||
volumes:
|
||||
- data:/data
|
||||
stdin_open: true
|
||||
tty: true
|
||||
restart: unless-stopped
|
||||
|
||||
minedmap:
|
||||
image: ghcr.io/neocturne/minedmap/minedmap
|
||||
command:
|
||||
- '--jobs-initial=2'
|
||||
- '--image-format=webp'
|
||||
- '--sign-filter=\[Map\]'
|
||||
- '--sign-transform=s/\[Map\]//'
|
||||
- '--watch'
|
||||
- '/input/world'
|
||||
- '/output'
|
||||
volumes:
|
||||
- data:/input:ro
|
||||
- output:/output
|
||||
- processed:/output/processed
|
||||
network_mode: 'none'
|
||||
depends_on:
|
||||
mc:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
viewer:
|
||||
image: ghcr.io/neocturne/minedmap/viewer
|
||||
ports:
|
||||
- '8080:80'
|
||||
volumes:
|
||||
- output:/usr/share/nginx/html/data:ro
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data: {}
|
||||
processed: {}
|
||||
output: {}
|
|
@ -316,9 +316,13 @@
|
|||
"bubble_coral_fan": null,
|
||||
"bubble_coral_wall_fan": null,
|
||||
"budding_amethyst": {},
|
||||
"bush": {
|
||||
"grass": true
|
||||
},
|
||||
"cactus": {
|
||||
"texture": "cactus_top"
|
||||
},
|
||||
"cactus_flower": {},
|
||||
"cake": {
|
||||
"texture": "cake_top"
|
||||
},
|
||||
|
@ -778,6 +782,7 @@
|
|||
"fire_coral_block": {},
|
||||
"fire_coral_fan": null,
|
||||
"fire_coral_wall_fan": null,
|
||||
"firefly_bush": {},
|
||||
"fletching_table": {
|
||||
"texture": "fletching_table_top"
|
||||
},
|
||||
|
@ -987,6 +992,7 @@
|
|||
"lava_cauldron": {
|
||||
"texture": "cauldron_top"
|
||||
},
|
||||
"leaf_litter": null,
|
||||
"lectern": {
|
||||
"texture": "lectern_top"
|
||||
},
|
||||
|
@ -1781,7 +1787,8 @@
|
|||
"sculk_vein": {},
|
||||
"sea_lantern": {},
|
||||
"sea_pickle": {},
|
||||
"seagrass": null,
|
||||
"seagrass": {},
|
||||
"short_dry_grass": {},
|
||||
"short_grass": null,
|
||||
"shroomlight": {},
|
||||
"shulker_box": {},
|
||||
|
@ -2013,6 +2020,7 @@
|
|||
"sweet_berry_bush": {
|
||||
"texture": "sweet_berry_bush_stage3"
|
||||
},
|
||||
"tall_dry_grass": {},
|
||||
"tall_grass": {
|
||||
"grass": true,
|
||||
"texture": "tall_grass_top"
|
||||
|
@ -2024,6 +2032,8 @@
|
|||
"texture": "target_top"
|
||||
},
|
||||
"terracotta": {},
|
||||
"test_block": null,
|
||||
"test_instance_block": null,
|
||||
"tinted_glass": {},
|
||||
"tnt": {
|
||||
"texture": "tnt_top"
|
||||
|
@ -2288,6 +2298,7 @@
|
|||
"white_tulip": null,
|
||||
"white_wall_banner": null,
|
||||
"white_wool": {},
|
||||
"wildflowers": null,
|
||||
"wither_rose": null,
|
||||
"wither_skeleton_skull": null,
|
||||
"wither_skeleton_wall_skull": null,
|
||||
|
|
|
@ -3,13 +3,15 @@
|
|||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use indexmap::IndexSet;
|
||||
use bincode::{Decode, Encode};
|
||||
use clap::ValueEnum;
|
||||
use regex::{Regex, RegexSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
io::fs::FileMetaVersion,
|
||||
|
@ -24,7 +26,7 @@ use crate::{
|
|||
///
|
||||
/// Increase when the generation of processed regions from region data changes
|
||||
/// (usually because of updated resource data)
|
||||
pub const REGION_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(3);
|
||||
pub const REGION_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(7);
|
||||
|
||||
/// MinedMap map tile data version number
|
||||
///
|
||||
|
@ -36,7 +38,7 @@ pub const MAP_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(0);
|
|||
///
|
||||
/// Increase when the generation of lightmap tiles from region data changes
|
||||
/// (usually because of updated resource data)
|
||||
pub const LIGHTMAP_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(3);
|
||||
pub const LIGHTMAP_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(5);
|
||||
|
||||
/// MinedMap mipmap data version number
|
||||
///
|
||||
|
@ -46,7 +48,7 @@ pub const MIPMAP_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(0);
|
|||
/// MinedMap processed entity data version number
|
||||
///
|
||||
/// Increase when entity collection changes bacause of code changes.
|
||||
pub const ENTITIES_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(0);
|
||||
pub const ENTITIES_FILE_META_VERSION: FileMetaVersion = FileMetaVersion(3);
|
||||
|
||||
/// Coordinate pair of a generated tile
|
||||
///
|
||||
|
@ -85,7 +87,7 @@ impl TileCoordMap {
|
|||
}
|
||||
|
||||
/// Data structure for storing chunk data between processing and rendering steps
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Encode, Decode)]
|
||||
pub struct ProcessedChunk {
|
||||
/// Block type data
|
||||
pub blocks: Box<layer::BlockArray>,
|
||||
|
@ -96,18 +98,18 @@ pub struct ProcessedChunk {
|
|||
}
|
||||
|
||||
/// Data structure for storing region data between processing and rendering steps
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(Debug, Default, Encode, Decode)]
|
||||
pub struct ProcessedRegion {
|
||||
/// List of biomes used in the region
|
||||
///
|
||||
/// Indexed by [ProcessedChunk] biome data
|
||||
pub biome_list: IndexSet<Biome>,
|
||||
pub biome_list: Vec<Biome>,
|
||||
/// Processed chunk data
|
||||
pub chunks: ChunkArray<Option<Box<ProcessedChunk>>>,
|
||||
}
|
||||
|
||||
/// Data structure for storing entity data between processing and collection steps
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(Debug, Default, Encode, Decode)]
|
||||
pub struct ProcessedEntities {
|
||||
/// List of block entities
|
||||
pub block_entities: Vec<BlockEntity>,
|
||||
|
@ -134,10 +136,14 @@ pub enum TileKind {
|
|||
pub struct Config {
|
||||
/// Number of threads for parallel processing
|
||||
pub num_threads: usize,
|
||||
/// Number of threads for initial parallel processing
|
||||
pub num_threads_initial: usize,
|
||||
/// Path of input region directory
|
||||
pub region_dir: PathBuf,
|
||||
/// Path of input `level.dat` file
|
||||
pub level_dat_path: PathBuf,
|
||||
/// Path of input `level.dat_old` file
|
||||
pub level_dat_old_path: PathBuf,
|
||||
/// Base path for storage of rendered tile data
|
||||
pub output_dir: PathBuf,
|
||||
/// Path for storage of intermediate processed data files
|
||||
|
@ -150,6 +156,8 @@ pub struct Config {
|
|||
pub viewer_info_path: PathBuf,
|
||||
/// Path of viewer entities file
|
||||
pub viewer_entities_path: PathBuf,
|
||||
/// Format of generated map tiles
|
||||
pub image_format: ImageFormat,
|
||||
/// Sign text filter patterns
|
||||
pub sign_patterns: RegexSet,
|
||||
/// Sign text transformation pattern
|
||||
|
@ -164,9 +172,13 @@ impl Config {
|
|||
Some(threads) => threads,
|
||||
None => 1,
|
||||
};
|
||||
let num_threads_initial = args.jobs_initial.unwrap_or(num_threads);
|
||||
|
||||
let region_dir = [&args.input_dir, Path::new("region")].iter().collect();
|
||||
let level_dat_path = [&args.input_dir, Path::new("level.dat")].iter().collect();
|
||||
let level_dat_old_path = [&args.input_dir, Path::new("level.dat_old")]
|
||||
.iter()
|
||||
.collect();
|
||||
let processed_dir: PathBuf = [&args.output_dir, Path::new("processed")].iter().collect();
|
||||
let entities_dir: PathBuf = [&processed_dir, Path::new("entities")].iter().collect();
|
||||
let entities_path_final = [&entities_dir, Path::new("entities.bin")].iter().collect();
|
||||
|
@ -181,14 +193,17 @@ impl Config {
|
|||
|
||||
Ok(Config {
|
||||
num_threads,
|
||||
num_threads_initial,
|
||||
region_dir,
|
||||
level_dat_path,
|
||||
level_dat_old_path,
|
||||
output_dir: args.output_dir.clone(),
|
||||
processed_dir,
|
||||
entities_dir,
|
||||
entities_path_final,
|
||||
viewer_info_path,
|
||||
viewer_entities_path,
|
||||
image_format: args.image_format,
|
||||
sign_patterns,
|
||||
sign_transforms,
|
||||
})
|
||||
|
@ -264,14 +279,39 @@ impl Config {
|
|||
[&self.output_dir, Path::new(&dir)].iter().collect()
|
||||
}
|
||||
|
||||
/// Returns the file extension for the configured image format
|
||||
pub fn tile_extension(&self) -> &'static str {
|
||||
match self.image_format {
|
||||
ImageFormat::Png => "png",
|
||||
ImageFormat::Webp => "webp",
|
||||
}
|
||||
}
|
||||
/// Returns the configurured image format for the image library
|
||||
pub fn tile_image_format(&self) -> image::ImageFormat {
|
||||
match self.image_format {
|
||||
ImageFormat::Png => image::ImageFormat::Png,
|
||||
ImageFormat::Webp => image::ImageFormat::WebP,
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs the path of an output tile image
|
||||
pub fn tile_path(&self, kind: TileKind, level: usize, coords: TileCoords) -> PathBuf {
|
||||
let filename = coord_filename(coords, "png");
|
||||
let filename = coord_filename(coords, self.tile_extension());
|
||||
let dir = self.tile_dir(kind, level);
|
||||
[Path::new(&dir), Path::new(&filename)].iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Format of generated map tiles
|
||||
#[derive(Debug, Clone, Copy, Default, ValueEnum)]
|
||||
pub enum ImageFormat {
|
||||
/// Generate PNG images
|
||||
#[default]
|
||||
Png,
|
||||
/// Generate WebP images
|
||||
Webp,
|
||||
}
|
||||
|
||||
/// Copies a chunk image into a region tile
|
||||
pub fn overlay_chunk<I, J>(image: &mut I, chunk: &J, coords: ChunkCoords)
|
||||
where
|
||||
|
|
|
@ -78,23 +78,22 @@ impl<'a> EntityCollector<'a> {
|
|||
let mut output = ProcessedEntities::default();
|
||||
|
||||
for source_path in sources {
|
||||
let mut source: ProcessedEntities =
|
||||
match storage::read_file(source_path.as_ref(), storage::Format::Json) {
|
||||
Ok(source) => source,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to read entity data file {}: {:?}",
|
||||
source_path.as_ref().display(),
|
||||
err,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let mut source: ProcessedEntities = match storage::read_file(source_path.as_ref()) {
|
||||
Ok(source) => source,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to read entity data file {}: {:?}",
|
||||
source_path.as_ref().display(),
|
||||
err,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
output.block_entities.append(&mut source.block_entities);
|
||||
}
|
||||
|
||||
storage::write(file, &output, storage::Format::Json).context("Failed to write entity data")
|
||||
storage::write(file, &output).context("Failed to write entity data")
|
||||
}
|
||||
|
||||
/// Runs the mipmap generation
|
||||
|
|
|
@ -61,6 +61,8 @@ struct Metadata<'t> {
|
|||
spawn: Spawn,
|
||||
/// Enabled MinedMap features
|
||||
features: Features,
|
||||
/// Format of generated map tiles
|
||||
tile_extension: &'static str,
|
||||
}
|
||||
|
||||
/// Viewer entity JSON data structure
|
||||
|
@ -122,7 +124,14 @@ impl<'a> MetadataWriter<'a> {
|
|||
|
||||
/// Reads and deserializes the `level.dat` of the Minecraft save data
|
||||
fn read_level_dat(&self) -> Result<de::LevelDat> {
|
||||
crate::nbt::data::from_file(&self.config.level_dat_path).context("Failed to read level.dat")
|
||||
let res = crate::nbt::data::from_file(&self.config.level_dat_path);
|
||||
if res.is_err() {
|
||||
if let Ok(level_dat_old) = crate::nbt::data::from_file(&self.config.level_dat_old_path)
|
||||
{
|
||||
return Ok(level_dat_old);
|
||||
}
|
||||
}
|
||||
res.context("Failed to read level.dat")
|
||||
}
|
||||
|
||||
/// Generates [Spawn] data from a [de::LevelDat]
|
||||
|
@ -170,9 +179,8 @@ impl<'a> MetadataWriter<'a> {
|
|||
|
||||
/// Generates [Entities] data from collected entity lists
|
||||
fn entities(&self) -> Result<Entities> {
|
||||
let data: ProcessedEntities =
|
||||
storage::read_file(&self.config.entities_path_final, storage::Format::Json)
|
||||
.context("Failed to read entity data file")?;
|
||||
let data: ProcessedEntities = storage::read_file(&self.config.entities_path_final)
|
||||
.context("Failed to read entity data file")?;
|
||||
|
||||
let ret = Entities {
|
||||
signs: data
|
||||
|
@ -205,6 +213,7 @@ impl<'a> MetadataWriter<'a> {
|
|||
mipmaps: Vec::new(),
|
||||
spawn: Self::spawn(&level_dat),
|
||||
features,
|
||||
tile_extension: self.config.tile_extension(),
|
||||
};
|
||||
|
||||
for tile_map in self.tiles.iter() {
|
||||
|
|
141
src/core/mod.rs
141
src/core/mod.rs
|
@ -10,31 +10,46 @@ mod tile_merger;
|
|||
mod tile_mipmapper;
|
||||
mod tile_renderer;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::mpsc::{self, Receiver},
|
||||
thread,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use clap::Parser;
|
||||
use git_version::git_version;
|
||||
|
||||
use common::Config;
|
||||
use common::{Config, ImageFormat};
|
||||
use metadata_writer::MetadataWriter;
|
||||
use notify::{RecommendedWatcher, RecursiveMode, Watcher as _};
|
||||
use rayon::ThreadPool;
|
||||
use region_processor::RegionProcessor;
|
||||
use tile_mipmapper::TileMipmapper;
|
||||
use tile_renderer::TileRenderer;
|
||||
use tokio::runtime::Runtime;
|
||||
use tracing::{info, warn};
|
||||
|
||||
use self::entity_collector::EntityCollector;
|
||||
|
||||
/// MinedMap version number
|
||||
const VERSION: &str = git_version!(
|
||||
args = ["--abbrev=7", "--match=v*", "--dirty=-modified"],
|
||||
cargo_prefix = "v",
|
||||
);
|
||||
/// Returns the MinedMap version number
|
||||
fn version() -> &'static str {
|
||||
option_env!("MINEDMAP_VERSION").unwrap_or(
|
||||
git_version!(
|
||||
args = ["--abbrev=7", "--match=v*", "--dirty=-modified"],
|
||||
cargo_prefix = "v",
|
||||
)
|
||||
.strip_prefix("v")
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Command line arguments for minedmap CLI
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
about,
|
||||
version = VERSION.strip_prefix("v").unwrap(),
|
||||
version = version(),
|
||||
max_term_width = 100,
|
||||
)]
|
||||
pub struct Args {
|
||||
|
@ -44,9 +59,29 @@ pub struct Args {
|
|||
/// use one thread per logical CPU core.
|
||||
#[arg(short, long)]
|
||||
pub jobs: Option<usize>,
|
||||
/// Number of parallel threads to use for initial processing
|
||||
///
|
||||
/// Passing this option only makes sense with --watch. The first run after
|
||||
/// starting MinedMap will use as many parallel jobs as configured using
|
||||
/// --job-initial, while subsequent regenerations of tiles will use the
|
||||
/// the number configured using --jobs.
|
||||
///
|
||||
/// If not given, the value from the --jobs option is used.
|
||||
#[arg(long)]
|
||||
pub jobs_initial: Option<usize>,
|
||||
/// Enable verbose messages
|
||||
#[arg(short, long)]
|
||||
pub verbose: bool,
|
||||
/// Watch for file changes and regenerate tiles automatically instead of
|
||||
/// exiting after generation
|
||||
#[arg(long)]
|
||||
pub watch: bool,
|
||||
/// Minimum delay between map generation cycles in watch mode
|
||||
#[arg(long, value_parser = humantime::parse_duration, default_value = "30s")]
|
||||
pub watch_delay: Duration,
|
||||
/// Format of generated map tiles
|
||||
#[arg(long, value_enum, default_value_t)]
|
||||
pub image_format: ImageFormat,
|
||||
/// Prefix for text of signs to show on the map
|
||||
#[arg(long)]
|
||||
pub sign_prefix: Vec<String>,
|
||||
|
@ -71,14 +106,73 @@ pub struct Args {
|
|||
pub output_dir: PathBuf,
|
||||
}
|
||||
|
||||
/// Configures the Rayon thread pool for parallel processing
|
||||
fn setup_threads(num_threads: usize) -> Result<()> {
|
||||
/// Configures a Rayon thread pool for parallel processing
|
||||
fn setup_threads(num_threads: usize) -> Result<ThreadPool> {
|
||||
rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(num_threads)
|
||||
.build_global()
|
||||
.build()
|
||||
.context("Failed to configure thread pool")
|
||||
}
|
||||
|
||||
/// Runs all MinedMap generation steps, updating all tiles as needed
|
||||
fn generate(config: &Config, rt: &Runtime) -> Result<()> {
|
||||
let regions = RegionProcessor::new(config).run()?;
|
||||
TileRenderer::new(config, rt, ®ions).run()?;
|
||||
let tiles = TileMipmapper::new(config, ®ions).run()?;
|
||||
EntityCollector::new(config, ®ions).run()?;
|
||||
MetadataWriter::new(config, &tiles).run()
|
||||
}
|
||||
|
||||
/// Creates a file watcher for the
|
||||
fn create_watcher(args: &Args) -> Result<(RecommendedWatcher, Receiver<()>)> {
|
||||
let (tx, rx) = mpsc::sync_channel::<()>(1);
|
||||
let mut watcher = notify::recommended_watcher(move |res| {
|
||||
// Ignore errors - we already have a watch trigger queued if try_send() fails
|
||||
let event: notify::Event = match res {
|
||||
Ok(event) => event,
|
||||
Err(err) => {
|
||||
warn!("Watch error: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let notify::EventKind::Modify(modify_kind) = event.kind else {
|
||||
return;
|
||||
};
|
||||
if !matches!(
|
||||
modify_kind,
|
||||
notify::event::ModifyKind::Data(_)
|
||||
| notify::event::ModifyKind::Name(notify::event::RenameMode::To)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if !event
|
||||
.paths
|
||||
.iter()
|
||||
.any(|path| path.ends_with("level.dat") || path.extension() == Some("mcu".as_ref()))
|
||||
{
|
||||
return;
|
||||
}
|
||||
let _ = tx.try_send(());
|
||||
})?;
|
||||
watcher.watch(&args.input_dir, RecursiveMode::Recursive)?;
|
||||
Ok((watcher, rx))
|
||||
}
|
||||
|
||||
/// Watches the data directory for changes, returning when a change has happened
|
||||
fn wait_watcher(args: &Args, watch_channel: &Receiver<()>) -> Result<()> {
|
||||
info!("Watching for changes...");
|
||||
let () = watch_channel
|
||||
.recv()
|
||||
.context("Failed to read watch event channel")?;
|
||||
info!("Change detected.");
|
||||
|
||||
thread::sleep(args.watch_delay);
|
||||
|
||||
let _ = watch_channel.try_recv();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// MinedMap CLI main function
|
||||
pub fn cli() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
|
@ -93,17 +187,28 @@ pub fn cli() -> Result<()> {
|
|||
.with_target(false)
|
||||
.init();
|
||||
|
||||
setup_threads(config.num_threads)?;
|
||||
let mut pool = setup_threads(config.num_threads_initial)?;
|
||||
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let regions = RegionProcessor::new(&config).run()?;
|
||||
TileRenderer::new(&config, &rt, ®ions).run()?;
|
||||
let tiles = TileMipmapper::new(&config, ®ions).run()?;
|
||||
EntityCollector::new(&config, ®ions).run()?;
|
||||
MetadataWriter::new(&config, &tiles).run()?;
|
||||
let watch = args.watch.then(|| create_watcher(&args)).transpose()?;
|
||||
|
||||
Ok(())
|
||||
pool.install(|| generate(&config, &rt))?;
|
||||
|
||||
let Some((_watcher, watch_channel)) = watch else {
|
||||
// watch mode disabled
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if config.num_threads != config.num_threads_initial {
|
||||
pool = setup_threads(config.num_threads)?;
|
||||
}
|
||||
pool.install(move || {
|
||||
loop {
|
||||
wait_watcher(&args, &watch_channel)?;
|
||||
generate(&config, &rt)?;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ use std::{ffi::OsStr, path::PathBuf, sync::mpsc, time::SystemTime};
|
|||
|
||||
use anyhow::{Context, Result};
|
||||
use enum_map::{Enum, EnumMap};
|
||||
use indexmap::IndexSet;
|
||||
use minedmap_resource::Biome;
|
||||
use rayon::prelude::*;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
|
@ -43,6 +45,37 @@ enum RegionProcessorStatus {
|
|||
ErrorMissing,
|
||||
}
|
||||
|
||||
/// Data of a region being processed by a [SingleRegionProcessor]
|
||||
#[derive(Debug)]
|
||||
struct SingleRegionData {
|
||||
/// [IndexSet] of biomes used by the processed region
|
||||
biome_list: IndexSet<Biome>,
|
||||
/// Processed region chunk intermediate data
|
||||
chunks: ChunkArray<Option<Box<ProcessedChunk>>>,
|
||||
/// Lightmap intermediate data
|
||||
lightmap: image::GrayAlphaImage,
|
||||
/// Processed entity intermediate data
|
||||
entities: ProcessedEntities,
|
||||
/// True if any unknown block or biome types were encountered during processing
|
||||
has_unknown: bool,
|
||||
}
|
||||
|
||||
impl Default for SingleRegionData {
|
||||
fn default() -> Self {
|
||||
/// Width/height of the region data
|
||||
const N: u32 = (BLOCKS_PER_CHUNK * CHUNKS_PER_REGION) as u32;
|
||||
|
||||
let lightmap = image::GrayAlphaImage::new(N, N);
|
||||
Self {
|
||||
biome_list: Default::default(),
|
||||
chunks: Default::default(),
|
||||
lightmap,
|
||||
entities: Default::default(),
|
||||
has_unknown: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles processing for a single region
|
||||
struct SingleRegionProcessor<'a> {
|
||||
/// Registry of known block types
|
||||
|
@ -73,22 +106,13 @@ struct SingleRegionProcessor<'a> {
|
|||
lightmap_needed: bool,
|
||||
/// True if entity output file needs to be updated
|
||||
entities_needed: bool,
|
||||
/// Processed region intermediate data
|
||||
processed_region: ProcessedRegion,
|
||||
/// Lightmap intermediate data
|
||||
lightmap: image::GrayAlphaImage,
|
||||
/// Processed entity intermediate data
|
||||
entities: ProcessedEntities,
|
||||
/// True if any unknown block or biome types were encountered during processing
|
||||
has_unknown: bool,
|
||||
/// Format of generated map tiles
|
||||
image_format: image::ImageFormat,
|
||||
}
|
||||
|
||||
impl<'a> SingleRegionProcessor<'a> {
|
||||
/// Initializes a [SingleRegionProcessor]
|
||||
fn new(processor: &'a RegionProcessor<'a>, coords: TileCoords) -> Result<Self> {
|
||||
/// Width/height of the region data
|
||||
const N: u32 = (BLOCKS_PER_CHUNK * CHUNKS_PER_REGION) as u32;
|
||||
|
||||
let input_path = processor.config.region_path(coords);
|
||||
let input_timestamp = fs::modified_timestamp(&input_path)?;
|
||||
|
||||
|
@ -105,10 +129,6 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
let lightmap_needed = Some(input_timestamp) > lightmap_timestamp;
|
||||
let entities_needed = Some(input_timestamp) > entities_timestamp;
|
||||
|
||||
let processed_region = ProcessedRegion::default();
|
||||
let lightmap = image::GrayAlphaImage::new(N, N);
|
||||
let entities = ProcessedEntities::default();
|
||||
|
||||
Ok(SingleRegionProcessor {
|
||||
block_types: &processor.block_types,
|
||||
biome_types: &processor.biome_types,
|
||||
|
@ -124,10 +144,7 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
output_needed,
|
||||
lightmap_needed,
|
||||
entities_needed,
|
||||
processed_region,
|
||||
lightmap,
|
||||
entities,
|
||||
has_unknown: false,
|
||||
image_format: processor.config.tile_image_format(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -151,15 +168,14 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
/// Saves processed region data
|
||||
///
|
||||
/// The timestamp is the time of the last modification of the input region data.
|
||||
fn save_region(&self) -> Result<()> {
|
||||
fn save_region(&self, processed_region: &ProcessedRegion) -> Result<()> {
|
||||
if !self.output_needed {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
storage::write_file(
|
||||
&self.output_path,
|
||||
&self.processed_region,
|
||||
storage::Format::Bincode,
|
||||
processed_region,
|
||||
REGION_FILE_META_VERSION,
|
||||
self.input_timestamp,
|
||||
)
|
||||
|
@ -168,7 +184,7 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
/// Saves a lightmap tile
|
||||
///
|
||||
/// The timestamp is the time of the last modification of the input region data.
|
||||
fn save_lightmap(&self) -> Result<()> {
|
||||
fn save_lightmap(&self, lightmap: &image::GrayAlphaImage) -> Result<()> {
|
||||
if !self.lightmap_needed {
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -178,8 +194,8 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
LIGHTMAP_FILE_META_VERSION,
|
||||
self.input_timestamp,
|
||||
|file| {
|
||||
self.lightmap
|
||||
.write_to(file, image::ImageFormat::Png)
|
||||
lightmap
|
||||
.write_to(file, self.image_format)
|
||||
.context("Failed to save image")
|
||||
},
|
||||
)
|
||||
|
@ -188,28 +204,32 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
/// Saves processed entity data
|
||||
///
|
||||
/// The timestamp is the time of the last modification of the input region data.
|
||||
fn save_entities(&mut self) -> Result<()> {
|
||||
fn save_entities(&self, entities: &mut ProcessedEntities) -> Result<()> {
|
||||
if !self.entities_needed {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.entities.block_entities.sort_unstable();
|
||||
entities.block_entities.sort_unstable();
|
||||
|
||||
storage::write_file(
|
||||
&self.entities_path,
|
||||
&self.entities,
|
||||
storage::Format::Json,
|
||||
entities,
|
||||
ENTITIES_FILE_META_VERSION,
|
||||
self.input_timestamp,
|
||||
)
|
||||
}
|
||||
|
||||
/// Processes a single chunk
|
||||
fn process_chunk(&mut self, chunk_coords: ChunkCoords, data: world::de::Chunk) -> Result<()> {
|
||||
fn process_chunk(
|
||||
&self,
|
||||
data: &mut SingleRegionData,
|
||||
chunk_coords: ChunkCoords,
|
||||
chunk_data: world::de::Chunk,
|
||||
) -> Result<()> {
|
||||
let (chunk, has_unknown) =
|
||||
world::chunk::Chunk::new(&data, self.block_types, self.biome_types)
|
||||
world::chunk::Chunk::new(&chunk_data, self.block_types, self.biome_types)
|
||||
.with_context(|| format!("Failed to decode chunk {:?}", chunk_coords))?;
|
||||
self.has_unknown |= has_unknown;
|
||||
data.has_unknown |= has_unknown;
|
||||
|
||||
if self.output_needed || self.lightmap_needed {
|
||||
if let Some(layer::LayerData {
|
||||
|
@ -217,11 +237,11 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
biomes,
|
||||
block_light,
|
||||
depths,
|
||||
}) = world::layer::top_layer(&mut self.processed_region.biome_list, &chunk)
|
||||
}) = world::layer::top_layer(&mut data.biome_list, &chunk)
|
||||
.with_context(|| format!("Failed to process chunk {:?}", chunk_coords))?
|
||||
{
|
||||
if self.output_needed {
|
||||
self.processed_region.chunks[chunk_coords] = Some(Box::new(ProcessedChunk {
|
||||
data.chunks[chunk_coords] = Some(Box::new(ProcessedChunk {
|
||||
blocks,
|
||||
biomes,
|
||||
depths,
|
||||
|
@ -230,7 +250,7 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
|
||||
if self.lightmap_needed {
|
||||
let chunk_lightmap = Self::render_chunk_lightmap(block_light);
|
||||
overlay_chunk(&mut self.lightmap, &chunk_lightmap, chunk_coords);
|
||||
overlay_chunk(&mut data.lightmap, &chunk_lightmap, chunk_coords);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -242,20 +262,21 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
chunk_coords,
|
||||
)
|
||||
})?;
|
||||
self.entities.block_entities.append(&mut block_entities);
|
||||
data.entities.block_entities.append(&mut block_entities);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Processes the chunks of the region
|
||||
fn process_chunks(&mut self) -> Result<()> {
|
||||
crate::nbt::region::from_file(&self.input_path)?
|
||||
.foreach_chunk(|chunk_coords, data| self.process_chunk(chunk_coords, data))
|
||||
fn process_chunks(&self, data: &mut SingleRegionData) -> Result<()> {
|
||||
crate::nbt::region::from_file(&self.input_path)?.foreach_chunk(
|
||||
|chunk_coords, chunk_data| self.process_chunk(data, chunk_coords, chunk_data),
|
||||
)
|
||||
}
|
||||
|
||||
/// Processes the region
|
||||
fn run(mut self) -> Result<RegionProcessorStatus> {
|
||||
fn run(&self) -> Result<RegionProcessorStatus> {
|
||||
if !self.output_needed && !self.lightmap_needed && !self.entities_needed {
|
||||
debug!(
|
||||
"Skipping unchanged region r.{}.{}.mca",
|
||||
|
@ -269,7 +290,9 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
self.coords.x, self.coords.z
|
||||
);
|
||||
|
||||
if let Err(err) = self.process_chunks() {
|
||||
let mut data = SingleRegionData::default();
|
||||
|
||||
if let Err(err) = self.process_chunks(&mut data) {
|
||||
if self.output_timestamp.is_some()
|
||||
&& self.lightmap_timestamp.is_some()
|
||||
&& self.entities_timestamp.is_some()
|
||||
|
@ -288,11 +311,16 @@ impl<'a> SingleRegionProcessor<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
self.save_region()?;
|
||||
self.save_lightmap()?;
|
||||
self.save_entities()?;
|
||||
let processed_region = ProcessedRegion {
|
||||
biome_list: data.biome_list.into_iter().collect(),
|
||||
chunks: data.chunks,
|
||||
};
|
||||
|
||||
Ok(if self.has_unknown {
|
||||
self.save_region(&processed_region)?;
|
||||
self.save_lightmap(&data.lightmap)?;
|
||||
self.save_entities(&mut data.entities)?;
|
||||
|
||||
Ok(if data.has_unknown {
|
||||
RegionProcessorStatus::OkWithUnknown
|
||||
} else {
|
||||
RegionProcessorStatus::Ok
|
||||
|
|
|
@ -144,7 +144,7 @@ where
|
|||
}
|
||||
|
||||
image
|
||||
.write_to(file, image::ImageFormat::Png)
|
||||
.write_to(file, self.config.tile_image_format())
|
||||
.context("Failed to save image")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ use tracing::{debug, info};
|
|||
use super::{common::*, region_group::RegionGroup};
|
||||
use crate::{
|
||||
io::{fs, storage},
|
||||
resource::{block_color, needs_biome, Colorf},
|
||||
resource::{Colorf, block_color, needs_biome},
|
||||
types::*,
|
||||
util::coord_offset,
|
||||
};
|
||||
|
@ -105,8 +105,7 @@ impl<'a> TileRenderer<'a> {
|
|||
|
||||
region_loader
|
||||
.get_or_try_init(|| async {
|
||||
storage::read_file(&processed_path, storage::Format::Bincode)
|
||||
.context("Failed to load processed region data")
|
||||
storage::read_file(&processed_path).context("Failed to load processed region data")
|
||||
})
|
||||
.await
|
||||
.cloned()
|
||||
|
@ -134,7 +133,7 @@ impl<'a> TileRenderer<'a> {
|
|||
/// Hashing the value as a single u32 is more efficient than hashing
|
||||
/// the tuple elements separately.
|
||||
fn biome_key((dx, dz, index): (i8, i8, u16)) -> u32 {
|
||||
(dx as u8 as u32) | (dz as u8 as u32) << 8 | (index as u32) << 16
|
||||
(dx as u8 as u32) | ((dz as u8 as u32) << 8) | ((index as u32) << 16)
|
||||
}
|
||||
|
||||
/// One quadrant of the kernel used to smooth biome edges
|
||||
|
@ -187,7 +186,7 @@ impl<'a> TileRenderer<'a> {
|
|||
|
||||
for ((region_x, region_z, index), w) in weights.into_values() {
|
||||
let region = region_group.get(region_x, region_z)?;
|
||||
let biome = region.biome_list.get_index(index.into())?;
|
||||
let biome = region.biome_list.get(usize::from(index))?;
|
||||
|
||||
total += w;
|
||||
color += w * block_color(block, Some(biome), depth.0 as f32);
|
||||
|
@ -304,7 +303,7 @@ impl<'a> TileRenderer<'a> {
|
|||
processed_timestamp,
|
||||
|file| {
|
||||
image
|
||||
.write_to(file, image::ImageFormat::Png)
|
||||
.write_to(file, self.config.tile_image_format())
|
||||
.context("Failed to save image")
|
||||
},
|
||||
)?;
|
||||
|
|
|
@ -10,28 +10,16 @@ use std::{
|
|||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
use super::fs;
|
||||
|
||||
/// Storage format
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum Format {
|
||||
/// Encode as Bincode
|
||||
///
|
||||
/// Bincode is more efficient than JSON, but cannot handle many of
|
||||
/// serde's features like flatten, conditional skipping, ...
|
||||
Bincode,
|
||||
/// Encode as JSON
|
||||
Json,
|
||||
}
|
||||
/// Bincode configuration
|
||||
const BINCODE_CONFIG: bincode::config::Configuration = bincode::config::standard();
|
||||
|
||||
/// Serializes data and writes it to a writer
|
||||
pub fn write<W: Write, T: Serialize>(writer: &mut W, value: &T, format: Format) -> Result<()> {
|
||||
let data = match format {
|
||||
Format::Bincode => bincode::serialize(value)?,
|
||||
Format::Json => serde_json::to_vec(value)?,
|
||||
};
|
||||
pub fn write<W: Write, T: Encode>(writer: &mut W, value: &T) -> Result<()> {
|
||||
let data = bincode::encode_to_vec(value, BINCODE_CONFIG)?;
|
||||
let len = u32::try_from(data.len())?;
|
||||
let compressed = zstd::bulk::compress(&data, 1)?;
|
||||
drop(data);
|
||||
|
@ -45,18 +33,21 @@ pub fn write<W: Write, T: Serialize>(writer: &mut W, value: &T, format: Format)
|
|||
/// Serializes data and stores it in a file
|
||||
///
|
||||
/// A timestamp is stored in an assiciated metadata file.
|
||||
pub fn write_file<T: Serialize>(
|
||||
pub fn write_file<T: Encode>(
|
||||
path: &Path,
|
||||
value: &T,
|
||||
format: Format,
|
||||
version: fs::FileMetaVersion,
|
||||
timestamp: SystemTime,
|
||||
) -> Result<()> {
|
||||
fs::create_with_timestamp(path, version, timestamp, |file| write(file, value, format))
|
||||
fs::create_with_timestamp(path, version, timestamp, |file| write(file, value))
|
||||
}
|
||||
|
||||
/// Reads data from a reader and deserializes it
|
||||
pub fn read<R: Read, T: DeserializeOwned>(reader: &mut R, format: Format) -> Result<T> {
|
||||
pub fn read<R, T>(reader: &mut R) -> Result<T>
|
||||
where
|
||||
R: Read,
|
||||
T: Decode<()>,
|
||||
{
|
||||
let mut len_buf = [0u8; 4];
|
||||
reader.read_exact(&mut len_buf)?;
|
||||
let len = usize::try_from(u32::from_be_bytes(len_buf))?;
|
||||
|
@ -66,18 +57,17 @@ pub fn read<R: Read, T: DeserializeOwned>(reader: &mut R, format: Format) -> Res
|
|||
let data = zstd::bulk::decompress(&compressed, len)?;
|
||||
drop(compressed);
|
||||
|
||||
let value = match format {
|
||||
Format::Bincode => bincode::deserialize(&data)?,
|
||||
Format::Json => serde_json::from_slice(&data)?,
|
||||
};
|
||||
Ok(value)
|
||||
Ok(bincode::decode_from_slice(&data, BINCODE_CONFIG)?.0)
|
||||
}
|
||||
|
||||
/// Reads data from a file and deserializes it
|
||||
pub fn read_file<T: DeserializeOwned>(path: &Path, format: Format) -> Result<T> {
|
||||
pub fn read_file<T>(path: &Path) -> Result<T>
|
||||
where
|
||||
T: Decode<()>,
|
||||
{
|
||||
(|| -> Result<T> {
|
||||
let mut file = File::open(path)?;
|
||||
read(&mut file, format)
|
||||
read(&mut file)
|
||||
})()
|
||||
.with_context(|| format!("Failed to read file {}", path.display()))
|
||||
}
|
||||
|
|
|
@ -2,6 +2,9 @@
|
|||
#![warn(missing_docs)]
|
||||
#![warn(clippy::missing_docs_in_private_items)]
|
||||
|
||||
#[cfg(feature = "jemalloc-auto")]
|
||||
extern crate minedmap_default_alloc;
|
||||
|
||||
mod core;
|
||||
mod io;
|
||||
mod util;
|
||||
|
|
|
@ -39,7 +39,9 @@ pub fn to_flat_coord<const AXIS: u8>(
|
|||
chunk: ChunkCoord<AXIS>,
|
||||
block: BlockCoord<AXIS>,
|
||||
) -> i32 {
|
||||
(region as i32) << (BLOCK_BITS + CHUNK_BITS) | ((chunk.0 as i32) << BLOCK_BITS | block.0 as i32)
|
||||
((region as i32) << (BLOCK_BITS + CHUNK_BITS))
|
||||
| ((chunk.0 as i32) << BLOCK_BITS)
|
||||
| (block.0 as i32)
|
||||
}
|
||||
|
||||
/// Splits a flat (linear) coordinate into region, chunk and block numbers
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
//! Processing of block entity data
|
||||
|
||||
use bincode::{Decode, Encode};
|
||||
use minedmap_resource::{BlockFlag, BlockType};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{
|
||||
de,
|
||||
|
@ -9,7 +10,7 @@ use super::{
|
|||
};
|
||||
|
||||
/// Kind of sign block
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Serialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SignKind {
|
||||
/// Standing sign
|
||||
|
@ -23,7 +24,7 @@ pub enum SignKind {
|
|||
}
|
||||
|
||||
/// Processed sign data
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Serialize)]
|
||||
pub struct Sign {
|
||||
/// The kind of the sign
|
||||
pub kind: SignKind,
|
||||
|
@ -40,10 +41,15 @@ pub struct Sign {
|
|||
|
||||
impl Sign {
|
||||
/// Processes a [de::BlockEntitySign] into a [Sign]
|
||||
fn new(sign: &de::BlockEntitySign, kind: SignKind, material: Option<String>) -> Sign {
|
||||
fn new(
|
||||
sign: &de::BlockEntitySign,
|
||||
kind: SignKind,
|
||||
material: Option<String>,
|
||||
data_version: u32,
|
||||
) -> Sign {
|
||||
let (front_text, back_text) = sign.text();
|
||||
let front_text = front_text.decode();
|
||||
let back_text = back_text.decode();
|
||||
let front_text = front_text.decode(data_version);
|
||||
let back_text = back_text.decode(data_version);
|
||||
Sign {
|
||||
kind,
|
||||
material,
|
||||
|
@ -54,7 +60,7 @@ impl Sign {
|
|||
}
|
||||
|
||||
/// Data for different kinds of [BlockEntity]
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Serialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum BlockEntityData {
|
||||
/// A sign block
|
||||
|
@ -62,7 +68,7 @@ pub enum BlockEntityData {
|
|||
}
|
||||
|
||||
/// A processed block entity
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Serialize)]
|
||||
pub struct BlockEntity {
|
||||
/// Global X coordinate
|
||||
pub x: i32,
|
||||
|
@ -77,7 +83,11 @@ pub struct BlockEntity {
|
|||
|
||||
impl BlockEntity {
|
||||
/// Processes a [de::BlockEntity] into a [BlockEntity]
|
||||
pub fn new(entity: &de::BlockEntity, block_type: Option<&BlockType>) -> Option<Self> {
|
||||
pub fn new(
|
||||
entity: &de::BlockEntity,
|
||||
block_type: Option<&BlockType>,
|
||||
data_version: u32,
|
||||
) -> Option<Self> {
|
||||
let wall_sign = block_type
|
||||
.map(|block_type| block_type.block_color.is(BlockFlag::WallSign))
|
||||
.unwrap_or_default();
|
||||
|
@ -91,7 +101,7 @@ impl BlockEntity {
|
|||
let material = block_type
|
||||
.as_ref()
|
||||
.and_then(|block_type| block_type.sign_material.as_ref());
|
||||
let data = BlockEntityData::Sign(Sign::new(sign, kind, material.cloned()));
|
||||
let data = BlockEntityData::Sign(Sign::new(sign, kind, material.cloned(), data_version));
|
||||
|
||||
Some(BlockEntity {
|
||||
x: entity.x,
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
//! over different data versions as much as possible.
|
||||
|
||||
use std::{
|
||||
collections::{btree_map, BTreeMap},
|
||||
collections::{BTreeMap, btree_map},
|
||||
iter::{self, FusedIterator},
|
||||
};
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use anyhow::{Context, Result, bail};
|
||||
|
||||
use super::{block_entity::BlockEntity, de, section::*};
|
||||
use crate::{
|
||||
|
@ -58,6 +58,8 @@ pub struct Chunk<'a> {
|
|||
inner: ChunkInner<'a>,
|
||||
/// Unprocessed block entities
|
||||
block_entities: &'a Vec<de::BlockEntity>,
|
||||
/// Chunk data version
|
||||
data_version: u32,
|
||||
}
|
||||
|
||||
impl<'a> Chunk<'a> {
|
||||
|
@ -87,6 +89,7 @@ impl<'a> Chunk<'a> {
|
|||
Chunk {
|
||||
inner,
|
||||
block_entities,
|
||||
data_version,
|
||||
},
|
||||
has_unknown,
|
||||
))
|
||||
|
@ -292,7 +295,11 @@ impl<'a> Chunk<'a> {
|
|||
.iter()
|
||||
.map(|block_entity| {
|
||||
let block_type = self.block_type_at_block_entity(block_entity)?;
|
||||
Ok(BlockEntity::new(block_entity, block_type))
|
||||
Ok(BlockEntity::new(
|
||||
block_entity,
|
||||
block_type,
|
||||
self.data_version,
|
||||
))
|
||||
})
|
||||
.collect::<Result<_>>()?;
|
||||
Ok(entities.into_iter().flatten().collect())
|
||||
|
@ -412,7 +419,7 @@ impl<'a> Iterator for SectionIter<'a> {
|
|||
}
|
||||
|
||||
fn last(mut self) -> Option<Self::Item> {
|
||||
self.with_iter(|iter| iter.last())
|
||||
self.next_back()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::json_text::JSONText;
|
||||
use super::text_value::TextValue;
|
||||
|
||||
/// Element of the `palette` list of 1.18+ [block states](BlockStatesV1_18)
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
@ -110,7 +110,7 @@ pub enum BiomesV0 {
|
|||
#[derive(Debug, Deserialize)]
|
||||
pub struct BlockEntitySignV1_20Text {
|
||||
/// Lines of sign text
|
||||
pub messages: Vec<JSONText>,
|
||||
pub messages: Vec<TextValue>,
|
||||
/// Default text color
|
||||
pub color: Option<String>,
|
||||
}
|
||||
|
@ -125,13 +125,13 @@ pub enum BlockEntitySign {
|
|||
#[serde(rename_all = "PascalCase")]
|
||||
V0 {
|
||||
/// Line 1 of the sign text
|
||||
text1: JSONText,
|
||||
text1: TextValue,
|
||||
/// Line 2 of the sign text
|
||||
text2: JSONText,
|
||||
text2: TextValue,
|
||||
/// Line 3 of the sign text
|
||||
text3: JSONText,
|
||||
text3: TextValue,
|
||||
/// Line 4 of the sign text
|
||||
text4: JSONText,
|
||||
text4: TextValue,
|
||||
/// Default text color
|
||||
color: Option<String>,
|
||||
},
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
use std::num::NonZeroU16;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use bincode::{Decode, Encode};
|
||||
use indexmap::IndexSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::chunk::{Chunk, SectionIterItem};
|
||||
use crate::{
|
||||
|
@ -13,7 +13,7 @@ use crate::{
|
|||
};
|
||||
|
||||
/// Height (Y coordinate) of a block
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Encode, Decode)]
|
||||
pub struct BlockHeight(pub i32);
|
||||
|
||||
impl BlockHeight {
|
||||
|
@ -97,14 +97,14 @@ impl LayerEntry<'_> {
|
|||
|
||||
if self.is_empty() {
|
||||
*self.block = Some(block_type.block_color);
|
||||
if let Some(biome) = section.biomes.biome_at(section.y, coords)? {
|
||||
let (biome_index, _) = biome_list.insert_full(*biome);
|
||||
*self.biome = NonZeroU16::new(
|
||||
(biome_index + 1)
|
||||
.try_into()
|
||||
.expect("biome index not in range"),
|
||||
);
|
||||
}
|
||||
|
||||
let biome = section.biomes.biome_at(section.y, coords)?;
|
||||
let (biome_index, _) = biome_list.insert_full(*biome);
|
||||
*self.biome = NonZeroU16::new(
|
||||
(biome_index + 1)
|
||||
.try_into()
|
||||
.expect("biome index not in range"),
|
||||
);
|
||||
}
|
||||
|
||||
if block_type.block_color.is(BlockFlag::Water) {
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
pub mod block_entity;
|
||||
pub mod chunk;
|
||||
pub mod de;
|
||||
pub mod json_text;
|
||||
pub mod layer;
|
||||
pub mod section;
|
||||
pub mod sign;
|
||||
pub mod text_value;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
use std::fmt::Debug;
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use num_integer::div_rem;
|
||||
use tracing::debug;
|
||||
|
||||
|
@ -208,7 +208,7 @@ impl Section for SectionV0<'_> {
|
|||
/// Trait for common functions of [BiomesV1_18] and [BiomesV0]
|
||||
pub trait Biomes: Debug {
|
||||
/// Returns the [Biome] at a coordinate tuple inside the chunk
|
||||
fn biome_at(&self, section: SectionY, coords: SectionBlockCoords) -> Result<Option<&Biome>>;
|
||||
fn biome_at(&self, section: SectionY, coords: SectionBlockCoords) -> Result<&Biome>;
|
||||
}
|
||||
|
||||
/// Minecraft v1.18+ section biome data
|
||||
|
@ -226,7 +226,7 @@ pub struct BiomesV1_18<'a> {
|
|||
/// to whole i64 values.
|
||||
biomes: Option<&'a [i64]>,
|
||||
/// Biome palette indexed by entries encoded in *biomes*
|
||||
palette: Vec<Option<&'a Biome>>,
|
||||
palette: Vec<&'a Biome>,
|
||||
/// Number of bits used for each entry in *biomes*
|
||||
bits: u8,
|
||||
}
|
||||
|
@ -253,12 +253,11 @@ impl<'a> BiomesV1_18<'a> {
|
|||
let palette_types = palette
|
||||
.iter()
|
||||
.map(|entry| {
|
||||
let biome_type = biome_types.get(entry);
|
||||
if biome_type.is_none() {
|
||||
biome_types.get(entry).unwrap_or_else(|| {
|
||||
debug!("Unknown biome type: {}", entry);
|
||||
has_unknown = true;
|
||||
}
|
||||
biome_type
|
||||
biome_types.get_fallback()
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -295,7 +294,7 @@ impl<'a> BiomesV1_18<'a> {
|
|||
}
|
||||
|
||||
impl Biomes for BiomesV1_18<'_> {
|
||||
fn biome_at(&self, _section: SectionY, coords: SectionBlockCoords) -> Result<Option<&Biome>> {
|
||||
fn biome_at(&self, _section: SectionY, coords: SectionBlockCoords) -> Result<&Biome> {
|
||||
let index = self.palette_index_at(coords);
|
||||
Ok(*self
|
||||
.palette
|
||||
|
@ -350,7 +349,7 @@ impl<'a> BiomesV0<'a> {
|
|||
}
|
||||
|
||||
impl Biomes for BiomesV0<'_> {
|
||||
fn biome_at(&self, section: SectionY, coords: SectionBlockCoords) -> Result<Option<&Biome>> {
|
||||
fn biome_at(&self, section: SectionY, coords: SectionBlockCoords) -> Result<&Biome> {
|
||||
let id = match self.data {
|
||||
BiomesV0Data::IntArrayV15(data) => {
|
||||
let LayerBlockCoords { x, z } = coords.xz;
|
||||
|
@ -370,7 +369,10 @@ impl Biomes for BiomesV0<'_> {
|
|||
}
|
||||
BiomesV0Data::ByteArray(data) => data[coords.xz.offset()] as u8,
|
||||
};
|
||||
Ok(self.biome_types.get_legacy(id))
|
||||
Ok(self
|
||||
.biome_types
|
||||
.get_legacy(id)
|
||||
.unwrap_or(self.biome_types.get_fallback()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -398,10 +400,6 @@ impl<'a> BlockLight<'a> {
|
|||
let (offset, nibble) = div_rem(coords.offset(), 2);
|
||||
let byte = block_light[offset] as u8;
|
||||
|
||||
if nibble == 1 {
|
||||
byte >> 4
|
||||
} else {
|
||||
byte & 0xf
|
||||
}
|
||||
if nibble == 1 { byte >> 4 } else { byte & 0xf }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
//! Processing of sign text
|
||||
|
||||
use std::{fmt::Display, sync::Arc};
|
||||
use std::fmt::Display;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use bincode::{Decode, Encode};
|
||||
use minedmap_resource::Color;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{
|
||||
de,
|
||||
json_text::{FormattedText, FormattedTextList, JSONText},
|
||||
text_value::{FormattedText, FormattedTextList, TextValue},
|
||||
};
|
||||
|
||||
/// Version-independent reference to (front or back) sign text
|
||||
|
@ -16,17 +18,41 @@ pub struct RawSignText<'a> {
|
|||
///
|
||||
/// A regular sign always has 4 lines of text. The back of pre-1.20
|
||||
/// signs is represented as a [SignText] without any `messages`.
|
||||
pub messages: Vec<&'a JSONText>,
|
||||
pub messages: Vec<&'a TextValue>,
|
||||
/// Sign color
|
||||
///
|
||||
/// Defaults to "black".
|
||||
pub color: Option<&'a str>,
|
||||
}
|
||||
|
||||
/// The color to use for signs without a color attribute ("black")
|
||||
const DEFAULT_COLOR: Color = Color([0, 0, 0]);
|
||||
|
||||
/// Map of text colors associated with dyes (except for black)
|
||||
static DYE_COLORS: phf::Map<&'static str, Color> = phf::phf_map! {
|
||||
"white" => Color([255, 255, 255]),
|
||||
"orange" => Color([255, 104, 31]),
|
||||
"magenta" => Color([255, 0, 255]),
|
||||
"light_blue" => Color([154, 192, 205]),
|
||||
"yellow" => Color([255, 255, 0]),
|
||||
"lime" => Color([191, 255, 0]),
|
||||
"pink" => Color([255, 105, 180]),
|
||||
"gray" => Color([128, 128, 128]),
|
||||
"light_gray" => Color([211, 211, 211]),
|
||||
"cyan" => Color([0, 255, 255]),
|
||||
"purple" => Color([160, 32, 240]),
|
||||
"blue" => Color([0, 0, 255]),
|
||||
"brown" => Color([139, 69, 19]),
|
||||
"green" => Color([0, 255, 0]),
|
||||
"red" => Color([255, 0, 0]),
|
||||
};
|
||||
|
||||
impl RawSignText<'_> {
|
||||
/// Decodes the [RawSignText] into a [SignText]
|
||||
pub fn decode(&self) -> SignText {
|
||||
let color = self.color.map(|c| Arc::new(c.to_owned()));
|
||||
pub fn decode(&self, data_version: u32) -> SignText {
|
||||
let color = self
|
||||
.color
|
||||
.map(|c| DYE_COLORS.get(c).copied().unwrap_or(DEFAULT_COLOR));
|
||||
let parent = FormattedText {
|
||||
color,
|
||||
..Default::default()
|
||||
|
@ -34,7 +60,7 @@ impl RawSignText<'_> {
|
|||
SignText(
|
||||
self.messages
|
||||
.iter()
|
||||
.map(|message| message.deserialize().linearize(&parent))
|
||||
.map(|message| message.deserialize(data_version).linearize(&parent))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
@ -79,7 +105,7 @@ impl BlockEntitySignExt for de::BlockEntitySign {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Debug, Default, Serialize, PartialEq, Eq, PartialOrd, Ord, Encode, Decode)]
|
||||
/// Deserialized and linearized sign text
|
||||
pub struct SignText(pub Vec<FormattedTextList>);
|
||||
|
||||
|
|
|
@ -1,24 +1,28 @@
|
|||
//! Newtype and helper methods for handling Minecraft Raw JSON Text
|
||||
//! Newtype and helper methods for handling Minecraft text values
|
||||
|
||||
use std::{collections::VecDeque, fmt::Display, sync::Arc};
|
||||
use std::{collections::VecDeque, fmt::Display};
|
||||
|
||||
use bincode::{Decode, Encode};
|
||||
use minedmap_resource::Color;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// A span of formatted text
|
||||
///
|
||||
/// A [JSONText] consists of a tree of [FormattedText] nodes (canonically
|
||||
/// A [TextValue] consists of a tree of [FormattedText] nodes (canonically
|
||||
/// represented as a [FormattedTextTree], but other kinds are possible with
|
||||
/// is handled by [DeserializedText].
|
||||
///
|
||||
/// Formatting that is not set in a node is inherited from the parent.
|
||||
#[derive(Debug, Serialize, Deserialize, Default, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(
|
||||
Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Encode, Decode,
|
||||
)]
|
||||
pub struct FormattedText {
|
||||
#[serde(default)]
|
||||
/// Text content
|
||||
pub text: String,
|
||||
/// Text color
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub color: Option<Arc<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", with = "text_color")]
|
||||
pub color: Option<Color>,
|
||||
/// Bold formatting
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub bold: Option<bool>,
|
||||
|
@ -41,7 +45,7 @@ impl FormattedText {
|
|||
pub fn inherit(self, parent: &Self) -> Self {
|
||||
FormattedText {
|
||||
text: self.text,
|
||||
color: self.color.or_else(|| parent.color.clone()),
|
||||
color: self.color.or(parent.color),
|
||||
bold: self.bold.or(parent.bold),
|
||||
italic: self.italic.or(parent.italic),
|
||||
underlined: self.underlined.or(parent.underlined),
|
||||
|
@ -83,7 +87,7 @@ impl From<String> for FormattedTextTree {
|
|||
}
|
||||
|
||||
/// List of [FormattedText]
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Encode, Decode)]
|
||||
pub struct FormattedTextList(pub Vec<FormattedText>);
|
||||
|
||||
impl FormattedTextList {
|
||||
|
@ -103,9 +107,9 @@ impl Display for FormattedTextList {
|
|||
}
|
||||
}
|
||||
|
||||
/// Raw deserialized [JSONText]
|
||||
/// Raw deserialized [TextValue]
|
||||
///
|
||||
/// A [JSONText] can contain various different JSON types.
|
||||
/// A [TextValue] can contain various different types serialized as JSON or NBT.
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum DeserializedText {
|
||||
|
@ -165,13 +169,106 @@ impl Default for DeserializedText {
|
|||
}
|
||||
}
|
||||
|
||||
/// Minecraft Raw JSON Text
|
||||
/// Minecraft raw text value
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct JSONText(pub String);
|
||||
pub struct TextValue(pub fastnbt::Value);
|
||||
|
||||
impl JSONText {
|
||||
/// Deserializes a [JSONText] into a [DeserializedText]
|
||||
pub fn deserialize(&self) -> DeserializedText {
|
||||
serde_json::from_str(&self.0).unwrap_or_default()
|
||||
impl TextValue {
|
||||
/// Deserializes a [TextValue] into a [DeserializedText]
|
||||
pub fn deserialize(&self, data_version: u32) -> DeserializedText {
|
||||
// TODO: Improve error handling
|
||||
//
|
||||
// Unfortunately, there are a number of weird ways an empty sign coould
|
||||
// be encoded (for example a compound with an "" key), so for now we
|
||||
// simply interpret undecodable data as empty.
|
||||
if data_version < 4290 {
|
||||
let fastnbt::Value::String(json) = &self.0 else {
|
||||
return DeserializedText::default();
|
||||
};
|
||||
|
||||
serde_json::from_str(json).unwrap_or_default()
|
||||
} else {
|
||||
fastnbt::from_value(&self.0).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod text_color {
|
||||
//! Helpers for serializing and deserializing [FormattedText](super::FormattedText) colors
|
||||
|
||||
use minedmap_resource::Color;
|
||||
use serde::{
|
||||
Deserializer, Serializer,
|
||||
de::{self, Visitor},
|
||||
ser::Error as _,
|
||||
};
|
||||
|
||||
/// Named text colors
|
||||
static COLORS: phf::Map<&'static str, Color> = phf::phf_map! {
|
||||
"black" => Color([0x00, 0x00, 0x00]),
|
||||
"dark_blue" => Color([0x00, 0x00, 0xAA]),
|
||||
"dark_green" => Color([0x00, 0xAA, 0x00]),
|
||||
"dark_aqua" => Color([0x00, 0xAA, 0xAA]),
|
||||
"dark_red" => Color([0xAA, 0x00, 0x00]),
|
||||
"dark_purple" => Color([0xAA, 0x00, 0xAA]),
|
||||
"gold" => Color([0xFF, 0xAA, 0x00]),
|
||||
"gray" => Color([0xAA, 0xAA, 0xAA]),
|
||||
"dark_gray" => Color([0x55, 0x55, 0x55]),
|
||||
"blue" => Color([0x55, 0x55, 0xFF]),
|
||||
"green" => Color([0x55, 0xFF, 0x55]),
|
||||
"aqua" => Color([0x55, 0xFF, 0xFF]),
|
||||
"red" => Color([0xFF, 0x55, 0x55]),
|
||||
"light_purple" => Color([0xFF, 0x55, 0xFF]),
|
||||
"yellow" => Color([0xFF, 0xFF, 0x55]),
|
||||
"white" => Color([0xFF, 0xFF, 0xFF]),
|
||||
};
|
||||
|
||||
/// serde serialize function for [FormattedText::color](super::FormattedText::color)
|
||||
pub fn serialize<S>(color: &Option<Color>, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let &Some(color) = color else {
|
||||
return Err(S::Error::custom("serialize called for None sign color"));
|
||||
};
|
||||
|
||||
let text = format!("#{:02x}{:02x}{:02x}", color.0[0], color.0[1], color.0[2]);
|
||||
serializer.serialize_str(&text)
|
||||
}
|
||||
|
||||
/// serde [Visitor] for use by [deserialize]
|
||||
struct ColorVisitor;
|
||||
|
||||
impl Visitor<'_> for ColorVisitor {
|
||||
type Value = Option<Color>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a string representing a color")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, color: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
if let Some(hex) = color.strip_prefix("#") {
|
||||
if let Ok(value) = u32::from_str_radix(hex, 16) {
|
||||
return Ok(Some(Color([
|
||||
(value >> 16) as u8,
|
||||
(value >> 8) as u8,
|
||||
value as u8,
|
||||
])));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(COLORS.get(color).copied())
|
||||
}
|
||||
}
|
||||
|
||||
/// serde deserialize function for [FormattedText::color](super::FormattedText::color)
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Color>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_str(ColorVisitor)
|
||||
}
|
||||
}
|
1
viewer/.dockerignore
Normal file
1
viewer/.dockerignore
Normal file
|
@ -0,0 +1 @@
|
|||
/data
|
3
viewer/Dockerfile
Normal file
3
viewer/Dockerfile
Normal file
|
@ -0,0 +1,3 @@
|
|||
FROM docker.io/library/nginx:alpine-slim
|
||||
COPY . /usr/share/nginx/html
|
||||
# datadir should be mounted to: /usr/share/nginx/html/data
|
|
@ -73,7 +73,7 @@ function signIcon(material, kind) {
|
|||
}
|
||||
|
||||
const MinedMapLayer = L.TileLayer.extend({
|
||||
initialize: function (mipmaps, layer) {
|
||||
initialize: function (mipmaps, layer, tile_extension) {
|
||||
L.TileLayer.prototype.initialize.call(this, '', {
|
||||
detectRetina: true,
|
||||
tileSize: 512,
|
||||
|
@ -88,6 +88,7 @@ const MinedMapLayer = L.TileLayer.extend({
|
|||
|
||||
this.mipmaps = mipmaps;
|
||||
this.layer = layer;
|
||||
this.ext = tile_extension;
|
||||
},
|
||||
|
||||
createTile: function (coords, done) {
|
||||
|
@ -112,7 +113,7 @@ const MinedMapLayer = L.TileLayer.extend({
|
|||
return L.Util.emptyImageUrl;
|
||||
|
||||
|
||||
return 'data/'+this.layer+'/'+z+'/r.'+coords.x+'.'+coords.y+'.png';
|
||||
return `data/${this.layer}/${z}/r.${coords.x}.${coords.y}.${this.ext}`;
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -153,25 +154,6 @@ const parseHash = function () {
|
|||
return args;
|
||||
}
|
||||
|
||||
const colors = {
|
||||
black: '#000000',
|
||||
dark_blue: '#0000AA',
|
||||
dark_green: '#00AA00',
|
||||
dark_aqua: '#00AAAA',
|
||||
dark_red: '#AA0000',
|
||||
dark_purple: '#AA00AA',
|
||||
gold: '#FFAA00',
|
||||
gray: '#AAAAAA',
|
||||
dark_gray: '#555555',
|
||||
blue: '#5555FF',
|
||||
green: '#55FF55',
|
||||
aqua: '#55FFFF',
|
||||
red: '#FF5555',
|
||||
light_purple: '#FF55FF',
|
||||
yellow: '#FFFF55',
|
||||
white: '#FFFFFF',
|
||||
};
|
||||
|
||||
function formatSignLine(line) {
|
||||
const el = document.createElement('span');
|
||||
el.style.whiteSpace = 'pre';
|
||||
|
@ -180,7 +162,9 @@ function formatSignLine(line) {
|
|||
const child = document.createElement('span');
|
||||
child.textContent = span.text;
|
||||
|
||||
const color = colors[span.color ?? 'black'] || colors['black'];
|
||||
let color = span.color ?? '';
|
||||
if (color[0] !== '#')
|
||||
color = '#000000';
|
||||
|
||||
if (span.bold)
|
||||
child.style.fontWeight = 'bold';
|
||||
|
@ -349,6 +333,7 @@ window.createMap = function () {
|
|||
const res = await response.json();
|
||||
const {mipmaps, spawn} = res;
|
||||
const features = res.features || {};
|
||||
const tile_extension = res.tile_extension || 'png';
|
||||
|
||||
const updateParams = function () {
|
||||
const args = parseHash();
|
||||
|
@ -386,10 +371,10 @@ window.createMap = function () {
|
|||
|
||||
const overlayMaps = {};
|
||||
|
||||
const mapLayer = new MinedMapLayer(mipmaps, 'map');
|
||||
const mapLayer = new MinedMapLayer(mipmaps, 'map', tile_extension);
|
||||
mapLayer.addTo(map);
|
||||
|
||||
const lightLayer = new MinedMapLayer(mipmaps, 'light');
|
||||
const lightLayer = new MinedMapLayer(mipmaps, 'light', tile_extension);
|
||||
overlayMaps['Illumination'] = lightLayer;
|
||||
if (params.light)
|
||||
map.addLayer(lightLayer);
|
||||
|
|
Loading…
Add table
Reference in a new issue