First commit

master
random-geek 2021-01-19 00:00:37 -08:00
commit 2afbc8216b
36 changed files with 3650 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/target
**/*.rs.bk

5
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"cSpell.words": [
"minetest"
]
}

283
Cargo.lock generated Normal file
View File

@ -0,0 +1,283 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "adler"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
[[package]]
name = "ansi_term"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
dependencies = [
"winapi",
]
[[package]]
name = "anyhow"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee67c11feeac938fae061b232e38e0b6d94f97a9df10e6271319325ac4c56a86"
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bitflags"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "byteorder"
version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
[[package]]
name = "cc"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "2.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
dependencies = [
"ansi_term",
"atty",
"bitflags",
"strsim",
"textwrap",
"unicode-width",
"vec_map",
]
[[package]]
name = "crc32fast"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
dependencies = [
"cfg-if",
]
[[package]]
name = "flate2"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129"
dependencies = [
"cfg-if",
"crc32fast",
"libc",
"miniz_oxide",
]
[[package]]
name = "hermit-abi"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
dependencies = [
"libc",
]
[[package]]
name = "libc"
version = "0.2.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
[[package]]
name = "mapeditr"
version = "0.1.0"
dependencies = [
"anyhow",
"byteorder",
"clap",
"flate2",
"memmem",
"sqlite",
"thiserror",
]
[[package]]
name = "memmem"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a64a92489e2744ce060c349162be1c5f33c6969234104dbd99ddb5feb08b8c15"
[[package]]
name = "miniz_oxide"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
dependencies = [
"adler",
"autocfg",
]
[[package]]
name = "pkg-config"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
[[package]]
name = "proc-macro2"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
dependencies = [
"unicode-xid",
]
[[package]]
name = "quote"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
dependencies = [
"proc-macro2",
]
[[package]]
name = "sqlite"
version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35f759dc2e373e1edd0a27da87aa9136416360c5077a23643fcd6fcdc9cb9e31"
dependencies = [
"libc",
"sqlite3-sys",
]
[[package]]
name = "sqlite3-src"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8bb25e66d026488228a97e0ad21e3d15ec5998dcd9ad73c97cc277c56a6b314"
dependencies = [
"cc",
"pkg-config",
]
[[package]]
name = "sqlite3-sys"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71fec807a1534bd13eeaaec396175d67c79bdc68df55e18a452726ec62a8fb08"
dependencies = [
"libc",
"sqlite3-src",
]
[[package]]
name = "strsim"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "syn"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc60a3d73ea6594cd712d830cc1f0390fd71542d8c8cd24e70cc54cdfd5e05d5"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76cc616c6abf8c8928e2fdcc0dbfab37175edd8fb49a4641066ad1364fdab146"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "unicode-width"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
[[package]]
name = "unicode-xid"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
[[package]]
name = "vec_map"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

14
Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "mapeditr"
version = "0.1.0"
authors = ["random-geek <35757396+random-geek@users.noreply.github.com>"]
edition = "2018"
[dependencies]
sqlite = "0.25"
flate2 = "1"
clap = "2"
byteorder = "1"
memmem = "0.1"
anyhow = "1"
thiserror = "1"

243
Manual.md Normal file
View File

@ -0,0 +1,243 @@
# The MapEditr Manual
## Introduction
MapEditr is a command-line tool for editing Minetest worlds, specifically
Minetest maps. Note that MapEditr is not a mod or plugin; it is a separate
program which operates independently of Minetest.
Minetest *worlds* are stored in the `worlds` folder within Minetest's
installation directory. Each world is a folder containing a *map database*,
usually named `map.sqlite`, among other files. The map database contains the
physical layout of that world, including all nodes (blocks) and objects (mobs,
etc.). This file is what MapEditr reads and edits.
Minetest stores map data in *mapblocks*. A single map block is a cubical,
16x16x16 node area of the map. The lower southwestern corner of a mapblock
(towards -X, -Y, -Z) is always at coordinates divisible by 16, e.g.
(0, 16, -48) or the like.
For most commands to work, the mapblocks to be read and modified must already
be generated within Minetest. This can be achieved by either exploring the area
in-game, or by using Minetest's built-in `/emergeblocks` command.
MapEditr supports map format versions 25 through 28, meaning all worlds
created since Minetest version 0.4.2-rc1 (released July 2012) should be
supported. Unsupported mapblocks will be skipped (TODO).
## General usage
`mapedit [-h] <map> <subcommand>`
Arguments:
- `-h`: Show a help message and exit.
- `<map>`: Path to the Minetest world to edit; this can be either a world
directory or a `map.sqlite` file. Note that only worlds with SQLite map
databases are currently supported. This file will be modified, so *always* shut
down the game/server before executing the command.
- `<subcommand>`: Command to execute. See "Commands" section below.
### Common command arguments
- `--p1 <x> <y> <z>` and `--p2 <x> <y> <z>`: Used to select a box-shaped
area with corners at `p1` and `p2`, similarly to how WorldEdit's area selection
works. Any two opposite corners can be used. These coordinates can be found
using Minetest's F5 debug menu.
- Node/item names: includes `node`, `new_node`, etc. Must be the full name,
e.g. "default:stone", not just "stone".
### Other tips
Text-like arguments can be surrounded with "quotes" if they contain spaces.
Due to technical limitations, MapEditr will often leave lighting glitches. To
fix these, use Minetest's built-in `/fixlight` command, or the equivalent
WorldEdit `//fixlight` command.
## Commands
### deleteblocks
Usage: `deleteblocks --p1 x y z --p2 x y z [--invert]`
Deletes all mapblocks in the given area.
Arguments:
- `--p1, --p2`: Area to delete from. Only mapblocks fully inside this area
will be deleted.
- `--invert`: Delete only mapblocks that are fully *outside* the given
area.
**Note:** Deleting mapblocks is *not* the same as filling them with air! Mapgen
will be invoked where the blocks were deleted, and this sometimes causes
terrain glitches.
### fill
Usage: `fill --p1 x y z --p2 x y z [--invert] <new_node>`
Fills the given area with one node. The affected mapblocks must be already
generated for fill to work.
This command does not affect param2, node metadata, etc.
Arguments:
- `new_node`: Name of node to fill the area with.
- `--p1, --p2`: Area to fill.
- `--invert`: Fill everything *outside* the given area.
### clone
Usage: `clone --p1 x y z --p2 x y z --offset x y z`
Clone (copy) the given area to a new location.
Arguments:
- `--p1, --p2`: Area to copy from.
- `--offset`: Offset to shift the area by. For example, to copy an area 50
nodes upward (positive Y direction), use `--offset 0 50 0`.
This command copies nodes, param1, param2, and metadata. Nothing will be copied
into mapblocks that are not yet generated.
### overlay
Usage: `overlay [--p1 x y z] [--p2 x y z] [--invert] [--offset x y z] <input_map>`
Copy part or all of an input map into the main map.
Arguments:
- `input_map`: Path to input map file. This will not be modified.
- `--p1, --p2`: Area to copy from. If not specified, MapEditr will try to
copy everything from the input map file.
- `--invert`: If present, copy everything *outside* the given area.
- `--offset`: Offset to move nodes by when copying; default is no offset.
Currently, an offset cannot be used with an inverted selection.
This command will always copy nodes, param1 and param2, and metadata. If no
offset is used, entities and node timers may also be copied.
### replacenodes
Usage: `replacenodes [--p1 x y z] [--p2 x y z] [--invert] <node> <new_node>`
Replace all of one node with another node. Can be used to remove unknown nodes
or swap a node that changed names.
This command does not affect param2, metadata, etc.
Arguments:
- `node`: Name of node to replace.
- `new_node`: Name of node to replace with.
- `--p1, --p2`: Area in which to replace nodes. If not specified, nodes
will be replaced across the entire map.
- `--invert`: Only replace nodes *outside* the given area.
### setparam2
Usage: `setparam2 [--node <node>] [--p1 x y z] [--p2 x y z] [--invert] <param2_val>`
Set param2 values of a certain node and/or within a certain area.
Arguments:
- `param2_val`: Param2 value to set, between 0 and 255.
- `--node`: Name of node to modify. If not specified, the param2 values of
all nodes will be set.
- `--p1, --p2`: Area in which to set param2. Required if `--node` is
not specified.
- `--invert`: Only set param2 *outside* the given area.
# Danger Zone!
### `deletemeta`
**Usage:** `deletemeta [--searchnode <searchnode>] [--p1 x y z] [--p2 x y z] [--invert]`
Delete metadata of a certain node and/or within a certain area. This includes node inventories as well.
Arguments:
- **`--searchnode`**: Name of node to search for. If not specified, the metadata of all nodes will be deleted.
- **`--p1, --p2`**: Area in which to delete metadata. Required if `searchnode` is not specified.
- **`--invert`**: Only delete metadata *outside* the given area.
### `setmetavar`
**Usage:** `setmetavar [--searchnode <searchnode>] [--p1 x y z] [--p2 x y z] [--invert] <metakey> <metavalue>`
Set a variable in node metadata. This only works on metadata where the variable is already set.
Arguments:
- **`metakey`**: Name of variable to set, e.g. `infotext`, `formspec`, etc.
- **`metavalue`**: Value to set variable to. This should be a string.
- **`--searchnode`**: Name of node to search for. If not specified, the variable will be set for all nodes that have it.
- **`--p1, --p2`**: Area in which to search. Required if `searchnode` is not specified.
- **`--invert`**: Only search for nodes *outside* the given area.
### `replaceininv`
**Usage:** ` replaceininv [--deletemeta] [--searchnode <searchnode>] [--p1 x y z] [--p2 x y z] [--invert] <searchitem> <replaceitem>`
Replace a certain item with another in node inventories.
To delete items instead of replacing them, use "Empty" (with a capital E) for `replacename`.
Arguments:
- **`searchitem`**: Item to search for in node inventories.
- **`replaceitem`**: Item to replace with in node inventories.
- **`--deletemeta`**: Delete metadata of replaced items. If not specified, any item metadata will remain unchanged.
- **`--searchnode`**: Name of node to to replace in. If not specified, the item will be replaced in all node inventories.
- **`--p1, --p2`**: Area in which to search for nodes. If not specified, items will be replaced across the entire map.
- **`--invert`**: Only search for nodes *outside* the given area.
**Tip:** To only delete metadata without replacing the nodes, use the `--deletemeta` flag, and make `replaceitem` the same as `searchitem`.
### `deletetimers`
**Usage:** `deletetimers [--searchnode <searchnode>] [--p1 x y z] [--p2 x y z] [--invert]`
Delete node timers of a certain node and/or within a certain area.
Arguments:
- **`--searchnode`**: Name of node to search for. If not specified, the node timers of all nodes will be deleted.
- **`--p1, --p2`**: Area in which to delete node timers. Required if `searchnode` is not specified.
- **`--invert`**: Only delete node timers *outside* the given area.
### `deleteobjects`
**Usage:** `deleteobjects [--searchobj <searchobj>] [--items] [--p1 x y z] [--p2 x y z] [--invert]`
Delete static objects of a certain name and/or within a certain area.
Arguments:
- **`--searchobj`**: Name of object to search for, e.g. "boats:boat". If not specified, all objects will be deleted.
- **`--items`**: Search for only item entities (dropped items). `searchobj` determines the item name, if specified.
- **`--p1, --p2`**: Area in which to delete objects. If not specified, objects will be deleted across the entire map.
- **`--invert`**: Only delete objects *outside* the given area.
### `vacuum`
**Usage:** `vacuum`
Vacuums the database. This reduces the size of the database, but may take a long time.
All this does is perform an SQLite `VACUUM` command. This shrinks and optimizes the database by efficiently "repacking" all mapblocks.
No map data is changed or deleted.
**Note:** Because data is copied into another file, this command could require as much free disk space as is already occupied by the map.
For example, if your database is 10 GB, make sure you have **at least 10 GB** of free space!

60
README.md Normal file
View File

@ -0,0 +1,60 @@
# MapEditr
MapEditr is a command-line tool for relatively fast manipulation of Minetest
worlds. It can replace nodes, fill areas, combine parts of different worlds,
and much more.
This tool is functionally similar to [WorldEdit][1], but designed for large
operations that would be impractical to do using WorldEdit. Since it is mainly
optimized for speed, MapEditr is not as full-featured as in-game world editors
such as WorldEdit.
MapEditr is originally based on [MapEdit][2], but rewritten in Rust,
hence the added "r". Switching to a compiled language will make MapEditr more
robust and easier to maintain in the future.
[1]: https://github.com/Uberi/Minetest-WorldEdit
[2]: https://github.com/random-geek/MapEdit
## Installation
TODO: This section is vague.
Pre-built binaries are available on the Releases page. This is the easiest way
to get MapEditr.
To compile from source, you must have Rust installed first, which can be
downloaded from [here][3]. Then, in the MapEditr directory, run:
`cargo build --release`
The `--release` flag is important, as it optimizes the generated executable,
making it much faster.
[3]: https://www.rust-lang.org/tools/install
## Usage
For an overview of how MapEditr works and a listing of commands and their
usages, see [Manual.md](Manual.md).
Some useful things you can do with MapEditr:
- Remove unknown nodes left by old mods with `replacenodes`.
- Build extremely long walls and roads in seconds using `fill`.
- Combine multiple worlds or map saves with `overlay`.
## Acknowledgments
The [Minetest][4] project has been rather important for the making of MapEdit/
MapEditr, for obvious reasons.
Some parts of the original MapEdit code were adapted from AndrejIT's
[map_unexplore][5] project. All due credit goes to the author(s) of that
project.
Thank you to ExeterDad and the moderators of the late Hometown server, for
partially inspiring MapEdit/MapEditr.
[4]: https://github.com/minetest/minetest
[5]: https://github.com/AndrejIT/map_unexplore

140
src/block_utils.rs Normal file
View File

@ -0,0 +1,140 @@
// TODO: Move this file somewhere else.
use std::collections::BTreeMap;
use crate::map_block::{MapBlock, NodeMetadataList};
use crate::spatial::{Vec3, Area};
fn block_parts_valid(a: &Area, b: &Area) -> bool {
fn part_valid(a: &Area) -> bool {
a.min.x >= 0 && a.min.y >= 0 && a.min.z >= 0
&& a.max.x < 16 && a.max.y < 16 && a.max.z < 16
}
part_valid(a) && part_valid(b) && a.max - a.min == b.max - b.min
}
pub fn merge_blocks(
src_block: &MapBlock,
dst_block: &mut MapBlock,
src_area: Area,
dst_area: Area
) {
assert!(block_parts_valid(&src_area, &dst_area));
let src_nd = src_block.node_data.get_ref();
let dst_nd = dst_block.node_data.get_mut();
let offset = dst_area.min - src_area.min;
// Warning: diff can be negative!
let diff = offset.x + offset.y * 16 + offset.z * 256;
let nimap_diff = dst_block.nimap.get_max_id().unwrap() + 1;
for (&id, name) in &src_block.nimap.map {
dst_block.nimap.insert(id + nimap_diff, name)
}
// Copy node IDs
for z in src_area.min.z ..= src_area.max.z {
for y in src_area.min.y ..= src_area.max.y {
for x in src_area.min.x ..= src_area.max.x {
let idx = x + y * 16 + z * 256;
dst_nd.nodes[(idx + diff) as usize] =
src_nd.nodes[idx as usize] + nimap_diff;
}
}
}
// Copy param1 and param2
for z in src_area.min.z ..= src_area.max.z {
for y in src_area.min.y ..= src_area.max.y {
let row_start = y * 16 + z * 256;
let start = row_start + src_area.min.x;
let end = row_start + src_area.max.x;
dst_nd.param1[(start + diff) as usize ..= (end + diff) as usize]
.clone_from_slice(
&src_nd.param1[start as usize ..= end as usize]
);
dst_nd.param2[(start + diff) as usize ..= (end + diff) as usize]
.clone_from_slice(
&src_nd.param2[start as usize ..= end as usize]
);
}
}
}
pub fn merge_metadata(
src_meta: &NodeMetadataList,
dst_meta: &mut NodeMetadataList,
src_area: Area,
dst_area: Area
) {
assert!(block_parts_valid(&src_area, &dst_area));
let offset = dst_area.min - src_area.min;
// Warning: diff can be negative!
let diff = offset.x + offset.y * 16 + offset.z * 256;
// Delete any existing metadata in the destination block
let mut to_delete = Vec::with_capacity(dst_meta.list.len());
for (&idx, _) in &dst_meta.list {
let pos = Vec3::from_u16_key(idx);
if dst_area.contains(pos) {
to_delete.push(idx);
}
}
for idx in &to_delete {
dst_meta.list.remove(idx);
}
// Copy new metadata
for (&idx, meta) in &src_meta.list {
let pos = Vec3::from_u16_key(idx);
if src_area.contains(pos) {
dst_meta.list.insert((idx as i32 + diff) as u16, meta.clone());
}
}
}
/// Culls duplicate and unused IDs from the name-ID map and node data.
pub fn clean_name_id_map(block: &mut MapBlock) {
let nd = block.node_data.get_mut();
let id_count = (block.nimap.get_max_id().unwrap() + 1) as usize;
// Determine which IDs are used.
let mut used = vec![false; id_count];
for id in &nd.nodes {
used[*id as usize] = true;
}
// Rebuild the name-ID map.
let mut new_nimap = BTreeMap::<u16, String>::new();
let mut map = vec![0u16; id_count];
for id in 0 .. id_count {
// Skip unused IDs.
if !used[id] {
continue;
}
let name = &block.nimap.map[&(id as u16)];
if let Some(first_id) = new_nimap.iter().position(|(_, v)| v == name) {
// Name is already in the map; map old, duplicate ID to the
// existing ID.
map[id] = first_id as u16;
} else {
// Name is not yet in the map; assign it to the next ID.
new_nimap.insert(new_nimap.len() as u16, name.clone());
// Map old ID to newly-inserted ID.
map[id] = new_nimap.len() as u16 - 1;
}
}
block.nimap.map = new_nimap;
// Re-assign node IDs.
for id in &mut nd.nodes {
*id = map[*id as usize];
}
}

281
src/cmd_line.rs Normal file
View File

@ -0,0 +1,281 @@
use std::io::prelude::*;
use std::time::{Duration, Instant};
use clap::{App, Arg, SubCommand, AppSettings, crate_version, crate_authors};
use anyhow::Context;
use crate::spatial::{Vec3, Area};
use crate::instance::{ArgType, InstArgs};
use crate::commands::{get_commands};
use crate::utils::fmt_duration;
fn arg_to_pos(p: clap::Values) -> anyhow::Result<Vec3> {
let vals: Vec<_> = p.collect();
if vals.len() != 3 {
anyhow::bail!("");
}
Ok(Vec3::new(
vals[0].parse()?,
vals[1].parse()?,
vals[2].parse()?
))
}
fn to_cmd_line_args<'a>(tup: &(ArgType, &'a str))
-> Vec<Arg<'a, 'a>>
{
let arg = tup.0.clone();
let help = tup.1;
if let ArgType::Area(req) = arg {
return vec![
Arg::with_name("p1")
.long("p1")
.allow_hyphen_values(true)
.number_of_values(3)
.value_names(&["x", "y", "z"])
.required(req)
.requires("p2")
.help(help),
Arg::with_name("p2")
.long("p2")
.allow_hyphen_values(true)
.number_of_values(3)
.value_names(&["x", "y", "z"])
.required(req)
.requires("p1")
.help(help)
];
}
vec![match arg {
ArgType::InputMapPath =>
Arg::with_name("input_map")
.required(true)
.help(help),
ArgType::Area(_) => unreachable!(),
ArgType::Invert =>
Arg::with_name("invert")
.long("invert")
.help(help),
ArgType::Offset(req) =>
Arg::with_name("offset")
.long("offset")
.allow_hyphen_values(true)
.number_of_values(3)
.value_names(&["x", "y", "z"])
.required(req)
.help(help),
ArgType::Node(req) => {
let a = Arg::with_name("node")
.required(req)
.help(help);
if !req {
a.long("node").takes_value(true)
} else {
a
}
},
ArgType::NewNode(req) => {
let a = Arg::with_name("new_node")
.required(req)
.help(help);
if !req {
a.long("newnode").takes_value(true)
} else {
a
}
},
ArgType::Param2Val(_) =>
Arg::with_name("param2_val")
.required(true)
.help(help),
}]
}
fn parse_cmd_line_args() -> anyhow::Result<InstArgs> {
/* Create the clap app */
let commands = get_commands();
let app_commands = commands.iter().map(|(cmd_name, cmd)| {
let args: Vec<_> = cmd.args.iter().flat_map(to_cmd_line_args)
.collect();
SubCommand::with_name(cmd_name)
.about(cmd.help)
.args(&args)
});
let app = App::new("MapEditr")
.about("Edits Minetest worlds/map databases.")
.after_help("For command-specific help, run: mapeditr <command> -h")
.version(crate_version!())
.author(crate_authors!())
// TODO: Move map arg to subcommands?
.arg(Arg::with_name("map")
.required(true)
.help("Path to world directory or map database to edit.")
)
.setting(AppSettings::SubcommandRequired)
.subcommands(app_commands);
/* Parse the arguments */
let matches = app.get_matches();
let sub_name = matches.subcommand_name().unwrap().to_string();
let sub_matches = matches.subcommand_matches(&sub_name).unwrap();
Ok(InstArgs {
map_path: matches.value_of("map").unwrap().to_string(),
command: sub_name,
input_map_path: sub_matches.value_of("input_map").map(str::to_string),
area: {
let p1_maybe = sub_matches.values_of("p1").map(arg_to_pos)
.transpose().context("Invalid p1 value")?;
let p2_maybe = sub_matches.values_of("p2").map(arg_to_pos)
.transpose().context("Invalid p2 value")?;
if let (Some(p1), Some(p2)) = (p1_maybe, p2_maybe) {
Some(Area::from_unsorted(p1, p2))
} else {
None
}
},
invert: sub_matches.is_present("invert"),
offset: sub_matches.values_of("offset").map(arg_to_pos).transpose()
.context("Invalid offset value")?,
node: sub_matches.value_of("node").map(str::to_string),
new_node: sub_matches.value_of("new_node").map(str::to_string),
param2_val: sub_matches.value_of("param2_val")
.map(|v| v.parse().unwrap()),
})
}
fn print_progress(done: usize, total: usize, real_start: Instant,
eta_start: Instant)
{
let progress = match total {
0 => 0.0,
_ => done as f32 / total as f32
};
let now = Instant::now();
let real_elapsed = now.duration_since(real_start);
let eta_elapsed = now.duration_since(eta_start);
let remaining = if progress >= 0.1 {
Some(Duration::from_secs_f32(
eta_elapsed.as_secs_f32() / progress * (1.0 - progress)
))
} else {
None
};
const TOTAL_BARS: usize = 25;
let num_bars = (progress * TOTAL_BARS as f32) as usize;
let bars = "=".repeat(num_bars);
eprint!(
"\r[{bars:<total_bars$}] {progress:.1}% | {elapsed} elapsed \
| {remaining} remaining",
bars=bars,
total_bars=TOTAL_BARS,
progress=progress * 100.0,
elapsed=fmt_duration(real_elapsed),
remaining=if let Some(d) = remaining {
fmt_duration(d)
} else {
String::from("--:--")
}
);
std::io::stdout().flush().unwrap();
}
pub fn run_cmd_line() {
use std::sync::mpsc;
use crate::instance::{InstState, InstEvent, spawn_compute_thread};
let args = parse_cmd_line_args().unwrap();
let (handle, status) = spawn_compute_thread(args);
const TICK: Duration = Duration::from_millis(25);
const UPDATE_INTERVAL: Duration = Duration::from_millis(250);
let mut querying_start = Instant::now();
let mut editing_start = Instant::now();
let mut last_update = Instant::now();
let mut cur_state = InstState::Ignore;
let mut last_printed = InstState::Ignore;
loop { /* Main command-line logging loop */
let now = Instant::now();
let mut forced_update = InstState::Ignore;
match status.event_rx.recv_timeout(TICK) {
Ok(event) => match event {
InstEvent::NewState(new_state) => {
// Force progress updates at the beginning and end of
// querying/editing stages.
if (cur_state == InstState::Ignore) !=
(new_state == InstState::Ignore)
{
forced_update =
if cur_state == InstState::Ignore { new_state }
else { cur_state };
}
if new_state == InstState::Querying {
// Store time for determining elapsed time.
querying_start = now;
} else if new_state == InstState::Editing {
// Store start time for determining ETA.
editing_start = now;
}
cur_state = new_state;
},
InstEvent::Log(log_type, msg) => {
if last_printed != InstState::Ignore {
eprintln!();
}
last_printed = InstState::Ignore;
eprintln!("{}: {}", log_type, msg);
}
},
Err(err) => {
// Compute thread has exited; break out of the loop.
if err == mpsc::RecvTimeoutError::Disconnected {
break;
}
}
}
let timed_update_ready = now >= last_update + UPDATE_INTERVAL;
if forced_update == InstState::Querying
|| (cur_state == InstState::Querying && timed_update_ready)
{
eprint!("\rQuerying map blocks... {} found.",
status.get().blocks_total);
last_update = now;
last_printed = InstState::Querying;
}
else if forced_update == InstState::Editing
|| (cur_state == InstState::Editing && timed_update_ready)
{
if last_printed == InstState::Querying {
eprintln!();
}
last_printed = InstState::Editing;
let s = status.get();
print_progress(s.blocks_done, s.blocks_total,
querying_start, editing_start);
last_update = now;
}
}
if last_printed != InstState::Ignore {
eprintln!("");
}
let _ = handle.join();
}

98
src/commands/clone.rs Normal file
View File

@ -0,0 +1,98 @@
use super::Command;
use crate::spatial::{Vec3, area_rel_block_overlap,
area_abs_block_overlap};
use crate::map_block::{MapBlock, NodeMetadataList};
use crate::block_utils::{merge_blocks, merge_metadata, clean_name_id_map};
use crate::instance::{ArgType, InstBundle};
use crate::utils::query_keys;
use crate::time_keeper::TimeKeeper;
fn clone(inst: &mut InstBundle) {
let src_area = inst.args.area.unwrap();
let offset = inst.args.offset.unwrap();
let dst_area = src_area + offset;
let mut keys = query_keys(&mut inst.db, &inst.status,
None, Some(dst_area), false, true);
// Sort blocks according to offset such that we don't read blocks that
// have already been written.
let sort_dir = offset.map(|v| if v > 0 { -1 } else { 1 });
// Subtract one from inverted axes to keep values from overflowing.
let sort_offset = sort_dir.map(|v| if v == -1 { -1 } else { 0 });
keys.sort_unstable_by_key(|k| {
(Vec3::from_block_key(*k) * sort_dir + sort_offset).to_block_key()
});
inst.status.begin_editing();
let mut tk = TimeKeeper::new();
for key in keys {
inst.status.inc_done();
let dst_data = inst.db.get_block(key).unwrap();
// TODO: is_valid_generated
let mut dst_block = MapBlock::deserialize(&dst_data).unwrap();
let mut dst_meta = NodeMetadataList::deserialize(
dst_block.metadata.get_ref()).unwrap();
let dst_pos = Vec3::from_block_key(key);
let dst_part_abs = area_abs_block_overlap(&dst_area, dst_pos)
.unwrap();
let src_part_abs = dst_part_abs - offset;
let src_blocks_needed = src_part_abs.to_touching_block_area();
for src_pos in src_blocks_needed.iterate() {
if !src_pos.is_valid_block_pos() {
continue;
}
let src_data = inst.db.get_block(src_pos.to_block_key()).unwrap();
let src_block = MapBlock::deserialize(&src_data).unwrap();
let src_meta = NodeMetadataList::deserialize(
&src_block.metadata.get_ref()).unwrap();
let src_frag_abs = area_abs_block_overlap(&src_part_abs, src_pos)
.unwrap();
let src_frag_rel = src_frag_abs - src_pos * 16;
let dst_frag_rel = area_rel_block_overlap(
&(src_frag_abs + offset), dst_pos).unwrap();
{
let _t = tk.get_timer("merge");
merge_blocks(&src_block, &mut dst_block,
src_frag_rel, dst_frag_rel);
}
{
let _t = tk.get_timer("merge_meta");
merge_metadata(&src_meta, &mut dst_meta,
src_frag_rel, dst_frag_rel);
}
}
{
let _t = tk.get_timer("name-ID map cleanup");
clean_name_id_map(&mut dst_block);
}
*dst_block.metadata.get_mut() = dst_meta.serialize(dst_block.version);
inst.db.set_block(key, &dst_block.serialize()).unwrap();
}
// tk.print();
inst.status.end_editing();
}
pub fn get_command() -> Command {
Command {
func: clone,
verify_args: None,
args: vec![
(ArgType::Area(true), "Area to clone"),
(ArgType::Offset(true), "Vector to shift nodes by")
],
help: "Clone a given area to a new location."
}
}

View File

@ -0,0 +1,31 @@
use super::Command;
use crate::instance::{ArgType, InstBundle};
use crate::utils::query_keys;
fn delete_blocks(inst: &mut InstBundle) {
let keys = query_keys(&mut inst.db, &inst.status, None,
inst.args.area, inst.args.invert, false);
inst.status.begin_editing();
for key in keys {
inst.status.inc_done();
inst.db.delete_block(key).unwrap();
}
inst.status.end_editing();
}
pub fn get_command() -> Command {
Command {
func: delete_blocks,
verify_args: None,
args: vec![
(ArgType::Area(true), "Area containing blocks to delete"),
(ArgType::Invert, "Delete all blocks *outside* the area")
],
help: "Delete all map blocks in a given area."
}
}

View File

@ -0,0 +1,68 @@
use super::Command;
use crate::instance::{ArgType, InstBundle};
use crate::map_block::{MapBlock};
use crate::utils::{query_keys, fmt_big_num};
macro_rules! unwrap_or {
($res:expr, $alt:expr) => {
match $res {
Ok(val) => val,
Err(_) => $alt
}
}
}
fn delete_objects(inst: &mut InstBundle) {
let keys = query_keys(&mut inst.db, &mut inst.status,
None, inst.args.area, inst.args.invert, true);
inst.status.begin_editing();
let mut count: u64 = 0;
for key in keys {
inst.status.inc_done();
let data = unwrap_or!(inst.db.get_block(key), continue);
let mut block = unwrap_or!(MapBlock::deserialize(&data), continue);
let mut modified = false;
for i in (0..block.static_objects.list.len()).rev() {
let obj = &block.static_objects.list[i];
if let Some(area) = inst.args.area {
const DIV_FAC: i32 = 10_000;
let rounded_pos = obj.f_pos.map(
|v| (v - DIV_FAC / 2).div_euclid(DIV_FAC));
if area.contains(rounded_pos) == inst.args.invert {
continue;
}
}
block.static_objects.list.remove(i);
modified = true;
count += 1;
}
if modified {
inst.db.set_block(key, &block.serialize()).unwrap();
}
}
inst.status.end_editing();
inst.status.log_info(format!("Deleted {} objects.", fmt_big_num(count)));
}
pub fn get_command() -> Command {
Command {
func: delete_objects,
verify_args: None,
args: vec![
(ArgType::Area(false), "Area in which to delete objects"),
(ArgType::Invert, "Delete all objects outside the area"),
],
help: "Delete certain objects (entities)."
}
}

79
src/commands/fill.rs Normal file
View File

@ -0,0 +1,79 @@
use super::Command;
use crate::spatial::{Vec3, Area, area_rel_block_overlap, area_contains_block};
use crate::instance::{ArgType, InstBundle};
use crate::map_block::{MapBlock};
use crate::block_utils::clean_name_id_map;
use crate::utils::{query_keys, fmt_big_num};
fn fill_area(block: &mut MapBlock, area: Area, id: u16) {
let nd = block.node_data.get_mut();
for z in area.min.z ..= area.max.z {
let z_start = z * 256;
for y in area.min.y ..= area.max.y {
let zy_start = z_start + y * 16;
for x in area.min.x ..= area.max.x {
nd.nodes[(zy_start + x) as usize] = id;
}
}
}
}
fn fill(inst: &mut InstBundle) {
let area = inst.args.area.unwrap();
let node = inst.args.new_node.clone().unwrap();
let keys = query_keys(&mut inst.db, &mut inst.status,
None, Some(area), false, true);
inst.status.begin_editing();
let mut count: u64 = 0;
for key in keys {
let pos = Vec3::from_block_key(key);
let data = inst.db.get_block(key).unwrap();
let mut block = MapBlock::deserialize(&data).unwrap();
if area_contains_block(&area, pos) {
let nd = block.node_data.get_mut();
for x in &mut nd.nodes {
*x = 0;
}
block.nimap.map.clear();
block.nimap.insert(0, &node);
count += nd.nodes.len() as u64;
} else {
let slice = area_rel_block_overlap(&area, pos).unwrap();
let fill_id = block.nimap.get_id(&node).unwrap_or_else(|| {
let next = block.nimap.get_max_id().unwrap() + 1;
block.nimap.insert(next, &node);
next
});
fill_area(&mut block, slice, fill_id);
clean_name_id_map(&mut block);
count += slice.volume();
}
inst.db.set_block(key, &block.serialize()).unwrap();
inst.status.inc_done();
}
inst.status.end_editing();
inst.status.log_info(
format!("{} nodes filled.", fmt_big_num(count)).as_str());
}
pub fn get_command() -> Command {
Command {
func: fill,
verify_args: None,
args: vec![
(ArgType::Area(true), "Area to fill"),
(ArgType::NewNode(true), "Node to fill area with")
],
help: "Fill the entire area with one node."
}
}

41
src/commands/mod.rs Normal file
View File

@ -0,0 +1,41 @@
use std::collections::BTreeMap;
use crate::instance::{ArgType, InstArgs, InstBundle};
mod clone;
mod delete_blocks;
mod delete_objects;
mod fill;
mod overlay;
mod replace_nodes;
mod set_param2;
mod vacuum;
pub struct Command {
pub func: fn(&mut InstBundle),
pub verify_args: Option<fn(&InstArgs) -> anyhow::Result<()>>,
pub help: &'static str,
pub args: Vec<(ArgType, &'static str)>
}
pub fn get_commands() -> BTreeMap<&'static str, Command> {
let mut commands = BTreeMap::new();
macro_rules! new_cmd {
($name:expr, $module:ident) => {
commands.insert($name, $module::get_command())
}
}
new_cmd!("clone", clone);
new_cmd!("deleteblocks", delete_blocks);
new_cmd!("deleteobjects", delete_objects);
new_cmd!("fill", fill);
new_cmd!("replacenodes", replace_nodes);
new_cmd!("overlay", overlay);
new_cmd!("setparam2", set_param2);
new_cmd!("vacuum", vacuum);
commands
}

191
src/commands/overlay.rs Normal file
View File

@ -0,0 +1,191 @@
use super::Command;
use crate::spatial::{Vec3, Area, area_rel_block_overlap,
area_abs_block_overlap, area_contains_block, area_touches_block};
use crate::instance::{ArgType, InstArgs, InstBundle};
use crate::map_block::{MapBlock, NodeMetadataList, is_valid_generated};
use crate::block_utils::{merge_blocks, merge_metadata, clean_name_id_map};
use crate::utils::query_keys;
fn verify_args(args: &InstArgs) -> anyhow::Result<()> {
let offset_if_nonzero =
args.offset.filter(|&off| off != Vec3::new(0, 0, 0));
if args.invert && offset_if_nonzero.is_some() {
anyhow::bail!("Inverted selections cannot be offset.");
}
Ok(())
}
/// Overlay without offsetting anything.
///
/// Possible argument configurations:
/// - No arguments (copy everything)
/// - Area
/// - Area + Invert
#[inline]
fn overlay_no_offset(inst: &mut InstBundle) {
let mut idb = inst.idb.as_mut().unwrap();
let invert = inst.args.invert;
// Get keys from input database.
let keys = query_keys(&mut idb, &inst.status, None,
inst.args.area, invert, true);
inst.status.begin_editing();
for key in keys {
inst.status.inc_done();
if let Some(area) = inst.args.area {
let pos = Vec3::from_block_key(key);
if (!invert && area_contains_block(&area, pos))
|| (invert && !area_touches_block(&area, pos))
{ // If possible, copy whole map block.
let data = idb.get_block(key).unwrap();
if is_valid_generated(&data) {
inst.db.set_block(key, &data).unwrap();
}
} else { // Copy part of map block
let dst_data = match inst.db.get_block(key) {
Ok(d) => if is_valid_generated(&d) {
d
} else {
continue;
},
Err(_) => continue
};
let src_data = idb.get_block(key).unwrap();
let mut src_block = MapBlock::deserialize(&src_data).unwrap();
let mut dst_block = MapBlock::deserialize(&dst_data).unwrap();
let mut src_meta = NodeMetadataList::deserialize(
&src_block.metadata.get_ref()).unwrap();
let mut dst_meta = NodeMetadataList::deserialize(
&dst_block.metadata.get_ref()).unwrap();
let block_part = area_rel_block_overlap(&area, pos).unwrap();
if invert {
// For inverted selections, reverse the order of the
// overlay operations.
merge_blocks(&dst_block, &mut src_block,
block_part, block_part);
merge_metadata(&dst_meta, &mut src_meta,
block_part, block_part);
clean_name_id_map(&mut src_block);
inst.db.set_block(key, &src_block.serialize()).unwrap();
} else {
merge_blocks(&src_block, &mut dst_block,
block_part, block_part);
merge_metadata(&src_meta, &mut dst_meta,
block_part, block_part);
clean_name_id_map(&mut dst_block);
inst.db.set_block(key, &dst_block.serialize()).unwrap();
}
}
} else {
// No area; copy whole map block.
let data = idb.get_block(key).unwrap();
if is_valid_generated(&data) {
inst.db.set_block(key, &data).unwrap();
}
}
}
inst.status.end_editing();
}
/// Overlay with offset, with or without area.
#[inline]
fn overlay_with_offset(inst: &mut InstBundle) {
let offset = inst.args.offset.unwrap();
let src_area = inst.args.area;
let dst_area = src_area.map(|a| a + offset);
let idb = inst.idb.as_mut().unwrap();
// Get keys from output database.
let keys = query_keys(&mut inst.db, &inst.status, None,
dst_area, inst.args.invert, true);
inst.status.begin_editing();
for key in keys {
inst.status.inc_done();
let dst_pos = Vec3::from_block_key(key);
let dst_data = inst.db.get_block(key).unwrap();
if !is_valid_generated(&dst_data) {
continue;
}
let mut dst_block = MapBlock::deserialize(&dst_data).unwrap();
let mut dst_meta = NodeMetadataList::deserialize(
dst_block.metadata.get_ref()).unwrap();
let dst_part_abs = dst_area.map_or(
Area::new(dst_pos * 16, dst_pos * 16 + 15),
|ref a| area_abs_block_overlap(a, dst_pos).unwrap()
);
let src_part_abs = dst_part_abs - offset;
let src_blocks_needed = src_part_abs.to_touching_block_area();
for src_pos in src_blocks_needed.iterate() {
if !src_pos.is_valid_block_pos() {
continue;
}
let src_data = match idb.get_block(src_pos.to_block_key()) {
Ok(d) => if is_valid_generated(&d) {
d
} else {
continue
},
Err(_) => continue
};
let src_block = MapBlock::deserialize(&src_data).unwrap();
let src_meta = NodeMetadataList::deserialize(
src_block.metadata.get_ref()).unwrap();
let src_frag_abs = area_abs_block_overlap(&src_part_abs, src_pos)
.unwrap();
let src_frag_rel = src_frag_abs - src_pos * 16;
let dst_frag_rel = area_rel_block_overlap(
&(src_frag_abs + offset), dst_pos).unwrap();
merge_blocks(&src_block, &mut dst_block,
src_frag_rel, dst_frag_rel);
merge_metadata(&src_meta, &mut dst_meta,
src_frag_rel, dst_frag_rel);
}
clean_name_id_map(&mut dst_block);
*dst_block.metadata.get_mut() = dst_meta.serialize(dst_block.version);
inst.db.set_block(key, &dst_block.serialize()).unwrap();
}
inst.status.end_editing();
}
fn overlay(inst: &mut InstBundle) {
let offset = inst.args.offset.unwrap_or(Vec3::new(0, 0, 0));
if offset == Vec3::new(0, 0, 0) {
overlay_no_offset(inst);
} else {
overlay_with_offset(inst);
}
}
pub fn get_command() -> Command {
Command {
func: overlay,
verify_args: Some(verify_args),
args: vec![
(ArgType::InputMapPath, "Path to input map file"),
(ArgType::Area(false), "Area to overlay"),
(ArgType::Invert, "Overlay all nodes outside the given area"),
(ArgType::Offset(false), "Vector to offset nodes by"),
],
help: "Copy part or all of one map into another."
}
}

View File

@ -0,0 +1,172 @@
use super::Command;
use crate::spatial::{Vec3, Area, area_contains_block, area_touches_block,
area_rel_block_overlap};
use crate::instance::{ArgType, InstArgs, InstBundle};
use crate::map_block::MapBlock;
use crate::utils::query_keys;
use crate::time_keeper::TimeKeeper;
use crate::utils::fmt_big_num;
fn do_replace(
block: &mut MapBlock,
key: i64,
search_id: u16,
new_node: &str,
area: Option<Area>,
invert: bool,
tk: &mut TimeKeeper
) -> u64
{
let block_pos = Vec3::from_block_key(key);
let mut count = 0;
// Replace nodes in a portion of a map block.
if area.is_some() && area_contains_block(&area.unwrap(), block_pos) !=
area_touches_block(&area.unwrap(), block_pos)
{
let _t = tk.get_timer("replace (partial block)");
let node_area = area_rel_block_overlap(&area.unwrap(), block_pos)
.unwrap();
let mut new_replace_id = false;
let replace_id = block.nimap.get_id(new_node)
.unwrap_or_else(|| {
new_replace_id = true;
block.nimap.get_max_id().unwrap() + 1
});
let mut idx = 0;
let mut old_node_present = false;
let mut new_node_present = false;
let nd = block.node_data.get_mut();
for z in 0 .. 16 {
for y in 0 .. 16 {
for x in 0 .. 16 {
if nd.nodes[idx] == search_id
&& node_area.contains(Vec3 {x, y, z}) != invert
{
nd.nodes[idx] = replace_id;
new_node_present = true;
count += 1;
}
if nd.nodes[idx] == search_id {
old_node_present = true;
}
idx += 1;
}
}
}
// Replacement node not yet in name-ID map; insert it.
if new_replace_id && new_node_present {
block.nimap.insert(replace_id, new_node);
}
// Search node was completely eliminated; shift IDs down.
if !old_node_present {
for i in 0 .. nd.nodes.len() {
if nd.nodes[i] > search_id {
nd.nodes[i] -= 1;
}
}
block.nimap.remove(search_id);
}
}
// Replace nodes in whole map block.
else {
// Block already contains replacement node, beware!
if let Some(mut replace_id) = block.nimap.get_id(new_node) {
let _t = tk.get_timer("replace (non-unique replacement)");
// Delete unused ID from name-ID map and shift IDs down.
block.nimap.remove(search_id);
// Shift replacement ID, if necessary.
replace_id -= (replace_id > search_id) as u16;
// Map old node IDs to new node IDs.
let nd = block.node_data.get_mut();
for id in &mut nd.nodes {
*id = if *id == search_id {
count += 1;
replace_id
} else {
*id - (*id > search_id) as u16
};
}
}
// Block does not contain replacement node.
// Simply replace the node name in the name-ID map.
else {
let _t = tk.get_timer("replace (unique replacement)");
let nd = block.node_data.get_ref();
for id in &nd.nodes {
count += (*id == search_id) as u64;
}
block.nimap.insert(search_id, new_node);
}
}
count
}
fn replace_nodes(inst: &mut InstBundle) {
let node = inst.args.node.clone().unwrap();
let new_node = inst.args.new_node.clone().unwrap();
let keys = query_keys(&mut inst.db, &inst.status,
Some(node.clone()), inst.args.area, inst.args.invert, true);
inst.status.begin_editing();
let mut count = 0;
let mut tk = TimeKeeper::new();
for key in keys {
let data = inst.db.get_block(key).unwrap();
let mut block = {
let _t = tk.get_timer("decode");
MapBlock::deserialize(&data).unwrap()
};
if let Some(search_id) = block.nimap.get_id(&node) {
count += do_replace(&mut block, key, search_id, &new_node,
inst.args.area, inst.args.invert, &mut tk);
let new_data = {
let _t = tk.get_timer("encode");
block.serialize()
};
inst.db.set_block(key, &new_data).unwrap();
}
inst.status.inc_done();
}
// tk.print();
inst.status.end_editing();
inst.status.log_info(
format!("{} nodes replaced.", fmt_big_num(count)).as_str());
}
fn verify_args(args: &InstArgs) -> anyhow::Result<()> {
anyhow::ensure!(args.node != args.new_node,
"node and new_node must be different.");
Ok(())
}
pub fn get_command() -> Command {
Command {
func: replace_nodes,
verify_args: Some(verify_args),
args: vec![
(ArgType::Node(true), "Node to replace"),
(ArgType::NewNode(true), "New node to replace with"),
(ArgType::Area(false), "Area in which to replace nodes"),
(ArgType::Invert, "Replace nodes outside the given area")
],
help: "Replace all of one node with another node."
}
}

117
src/commands/set_param2.rs Normal file
View File

@ -0,0 +1,117 @@
use super::Command;
use crate::spatial::{Vec3, Area, area_rel_block_overlap, area_contains_block};
use crate::instance::{ArgType, InstBundle};
use crate::map_block::{MapBlock};
use crate::utils::{query_keys, fmt_big_num};
fn set_in_area_node(block: &mut MapBlock, area: Area, id: u16, val: u8) -> u64
{
let nd = block.node_data.get_mut();
let mut count = 0;
for z in area.min.z ..= area.max.z {
let z_start = z * 256;
for y in area.min.y ..= area.max.y {
let zy_start = z_start + y * 16;
for x in area.min.x ..= area.max.x {
let i = (zy_start + x) as usize;
if nd.nodes[i] == id {
nd.param2[i] = val;
count += 1;
}
}
}
}
count
}
fn set_in_area(block: &mut MapBlock, area: Area, val: u8) {
let nd = block.node_data.get_mut();
for z in area.min.z ..= area.max.z {
let z_start = z * 256;
for y in area.min.y ..= area.max.y {
let zy_start = z_start + y * 16;
for x in area.min.x ..= area.max.x {
nd.param2[(zy_start + x) as usize] = val;
}
}
}
}
fn set_param2(inst: &mut InstBundle) {
// TODO: Actually verify!
assert!(inst.args.area.is_some() || inst.args.node.is_some());
let param2_val = inst.args.param2_val.unwrap();
let keys = query_keys(&mut inst.db, &mut inst.status,
inst.args.node.clone(), inst.args.area, false, true);
inst.status.begin_editing();
let mut count: u64 = 0;
for key in keys {
inst.status.inc_done();
let pos = Vec3::from_block_key(key);
let data = inst.db.get_block(key).unwrap();
let mut block = MapBlock::deserialize(&data).unwrap();
let node_id = inst.args.node.as_deref()
.and_then(|node| block.nimap.get_id(&node));
if inst.args.node.is_some() && node_id.is_none() {
// Node not found in this map block.
continue;
}
let nd = block.node_data.get_mut();
if let Some(area) = inst.args.area
.filter(|a| !area_contains_block(&a, pos))
{ // Modify part of block
let overlap = area_rel_block_overlap(&area, pos).unwrap();
if let Some(nid) = node_id {
count +=
set_in_area_node(&mut block, overlap, nid, param2_val);
} else {
set_in_area(&mut block, overlap, param2_val);
count += overlap.volume();
}
} else { // Modify whole block
if let Some(nid) = node_id {
for i in 0 .. nd.param2.len() {
if nd.nodes[i] == nid {
nd.param2[i] = param2_val;
count += 1;
}
}
} else {
for x in &mut nd.param2 {
*x = param2_val;
}
count += nd.param2.len() as u64;
}
}
inst.db.set_block(key, &block.serialize()).unwrap();
}
inst.status.end_editing();
inst.status.log_info(
format!("{} nodes set.", fmt_big_num(count)).as_str());
}
pub fn get_command() -> Command {
Command {
func: set_param2,
verify_args: None,
args: vec![
(ArgType::Area(false), "Area in which to set param2 values"),
(ArgType::Node(false), "Node to set param2 values of"),
(ArgType::Param2Val(true), "New param2 value")
],
help: "Set param2 values of an area or node."
}
}

29
src/commands/vacuum.rs Normal file
View File

@ -0,0 +1,29 @@
use super::Command;
use std::time::Instant;
use crate::instance::InstBundle;
use crate::utils::fmt_duration;
fn vacuum(inst: &mut InstBundle) {
inst.status.log_info("Starting vacuum.");
let start = Instant::now();
match inst.db.vacuum() {
Ok(_) => {
let time = fmt_duration(start.elapsed());
inst.status.log_info(format!("Completed vacuum in {}.", time));
},
Err(e) => inst.status.log_error(format!("Vacuum failed: {}.", e))
}
}
pub fn get_command() -> Command {
Command {
func: vacuum,
verify_args: None,
args: vec![],
help: "Rebuild map database to reduce its size"
}
}

277
src/instance.rs Normal file
View File

@ -0,0 +1,277 @@
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use std::sync::mpsc;
use anyhow::Context;
use crate::spatial::{Vec3, Area};
use crate::map_database::MapDatabase;
use crate::commands;
#[derive(Clone)]
pub enum ArgType {
InputMapPath,
Area(bool),
Invert,
Offset(bool),
Node(bool),
NewNode(bool),
Param2Val(bool),
}
#[derive(Debug)]
pub struct InstArgs {
pub command: String,
pub map_path: String,
pub input_map_path: Option<String>,
pub area: Option<Area>,
pub invert: bool,
pub offset: Option<Vec3>,
pub node: Option<String>,
pub new_node: Option<String>,
pub param2_val: Option<u8>,
}
/// Used to tell what sort of progress bar/counter should be shown to the user.
#[derive(Clone, Copy, PartialEq)]
pub enum InstState {
Ignore,
Querying,
Editing
}
#[derive(Clone)]
pub struct InstStatus {
pub blocks_total: usize,
pub blocks_done: usize,
pub state: InstState
}
impl InstStatus {
fn new() -> Self {
Self {
blocks_total: 0,
blocks_done: 0,
state: InstState::Ignore
}
}
}
pub enum LogType {
Info,
Error
}
impl std::fmt::Display for LogType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Info => write!(f, "info"),
Self::Error => write!(f, "error")
}
}
}
pub enum InstEvent {
NewState(InstState),
Log(LogType, String)
}
#[derive(Clone)]
pub struct StatusServer {
status: Arc<Mutex<InstStatus>>,
event_tx: mpsc::Sender<InstEvent>
}
impl StatusServer {
pub fn set_state(&self, new_state: InstState) {
self.status.lock().unwrap().state = new_state;
self.event_tx.send(InstEvent::NewState(new_state)).unwrap();
}
pub fn set_total(&self, total: usize) {
self.status.lock().unwrap().blocks_total = total;
}
pub fn inc_done(&self) {
self.status.lock().unwrap().blocks_done += 1;
}
pub fn begin_editing(&self) {
self.set_state(InstState::Editing);
}
pub fn end_editing(&self) {
self.set_state(InstState::Ignore);
}
pub fn log<S: AsRef<str>>(&self, lt: LogType, msg: S) {
self.event_tx.send(InstEvent::Log(lt, msg.as_ref().to_string()))
.unwrap();
}
pub fn log_info<S: AsRef<str>>(&self, msg: S) {
self.log(LogType::Info, msg);
}
pub fn log_error<S: AsRef<str>>(&self, msg: S) {
self.log(LogType::Error, msg);
}
}
pub struct StatusClient {
pub event_rx: mpsc::Receiver<InstEvent>,
status: Arc<Mutex<InstStatus>>
}
impl StatusClient {
pub fn get(&self) -> InstStatus {
self.status.lock().unwrap().clone()
}
}
pub struct InstBundle<'a> {
pub args: InstArgs,
pub status: StatusServer,
pub db: MapDatabase<'a>,
pub idb: Option<MapDatabase<'a>>
}
fn status_channel() -> (StatusServer, StatusClient) {
let status1 = Arc::new(Mutex::new(InstStatus::new()));
let status2 = status1.clone();
let (event_tx, event_rx) = mpsc::channel();
(
StatusServer {status: status1, event_tx},
StatusClient {status: status2, event_rx}
)
}
fn verify_args(args: &InstArgs) -> anyhow::Result<()> {
fn verify_item_name(name: &str) -> anyhow::Result<()> {
if name == "air" || name == "ignore" {
Ok(())
} else {
let delim = name.find(':')
.ok_or(anyhow::anyhow!(""))?;
let mod_name = &name[..delim];
anyhow::ensure!(mod_name.find(|c: char|
!(c.is_ascii_lowercase() || c.is_ascii_digit() || c == '_')
).is_none());
let item_name = &name[delim + 1..];
anyhow::ensure!(item_name.find(|c: char|
!(c.is_ascii_alphanumeric() || c == '_')
).is_none());
Ok(())
}
}
if args.area.is_none() && args.invert {
anyhow::bail!("Cannot invert without a specified area.");
}
if let Some(a) = args.area {
for pos in vec![a.min, a.max] {
anyhow::ensure!(pos.is_valid_node_pos(),
"Area corner is outside map bounds: {}.", pos);
}
}
if let Some(sn) = &args.node {
verify_item_name(sn.as_str())
.with_context(|| format!("Invalid node name: {}.", sn))?;
}
if let Some(rn) = &args.new_node {
verify_item_name(rn.as_str())
.with_context(|| format!("Invalid replacement name: {}.", rn))?;
}
Ok(())
}
fn open_map(path: PathBuf, flags: sqlite::OpenFlags)
-> anyhow::Result<sqlite::Connection>
{
let new_path = if path.is_file() {
path
} else {
let with_file = path.join("map.sqlite");
if with_file.is_file() {
with_file
} else {
anyhow::bail!("could not find map file");
}
};
Ok(sqlite::Connection::open_with_flags(new_path, flags)?)
}
fn compute_thread(args: InstArgs, status: StatusServer)
-> anyhow::Result<()>
{
verify_args(&args)?;
let commands = commands::get_commands();
if let Some(cmd_verify) = commands[args.command.as_str()].verify_args {
cmd_verify(&args)?
}
let db_conn = open_map(PathBuf::from(&args.map_path),
sqlite::OpenFlags::new().set_read_write())?;
let db = MapDatabase::new(&db_conn)
.context("Failed to open main world/map.")?;
let idb_conn = args.input_map_path.as_deref().map(
|imp| open_map(PathBuf::from(imp),
sqlite::OpenFlags::new().set_read_only())
).transpose().context("Failed to open input world/map.")?;
let idb = match &idb_conn {
Some(conn) => Some(MapDatabase::new(conn)?),
None => None
};
let func = commands[args.command.as_str()].func;
let mut inst = InstBundle {
args,
status,
db,
idb
};
func(&mut inst);
if inst.db.is_in_transaction() {
inst.status.log_info("Committing...");
inst.db.commit_if_needed()?;
}
inst.status.log_info("Done.");
Ok(())
}
pub fn spawn_compute_thread(args: InstArgs)
-> (std::thread::JoinHandle<()>, StatusClient)
{
let (status_tx, status_rx) = status_channel();
let h = std::thread::Builder::new()
.name("compute".to_string())
.spawn(move || {
compute_thread(args, status_tx.clone()).unwrap_or_else(
|err| status_tx.log_error(&err.to_string())
);
})
.unwrap();
(h, status_rx)
}

16
src/main.rs Normal file
View File

@ -0,0 +1,16 @@
mod time_keeper;
mod spatial;
mod utils;
mod map_database;
mod map_block;
mod block_utils;
mod instance;
mod commands;
mod cmd_line;
// Todo: Check for unnecessary #derives!
fn main() {
// TODO: Add GUI. hmm...
cmd_line::run_cmd_line();
}

View File

@ -0,0 +1,73 @@
use std::io::Cursor;
use std::io::prelude::*;
use flate2::write::ZlibEncoder;
use flate2::read::ZlibDecoder;
use flate2::Compression;
use super::MapBlockError;
pub trait Compress {
fn compress(&self, dst: &mut Cursor<Vec<u8>>);
fn decompress(src: &mut Cursor<&[u8]>) -> Result<Self, MapBlockError>
where Self: std::marker::Sized;
}
impl Compress for Vec<u8> {
fn compress(&self, dst: &mut Cursor<Vec<u8>>) {
let mut encoder = ZlibEncoder::new(dst, Compression::default());
encoder.write_all(self.as_ref()).unwrap();
encoder.finish().unwrap();
}
fn decompress(src: &mut Cursor<&[u8]>) -> Result<Self, MapBlockError> {
let start = src.position();
let mut decoder = ZlibDecoder::new(src);
let mut dst = Self::new();
decoder.read_to_end(&mut dst).unwrap();
let total_in = decoder.total_in();
let src = decoder.into_inner();
src.set_position(start + total_in);
Ok(dst)
}
}
#[derive(Debug)]
pub struct ZlibContainer<T: Compress> {
compressed: Option<Vec<u8>>,
data: T
}
impl<T: Compress> ZlibContainer<T> {
pub fn read(src: &mut Cursor<&[u8]>) -> Result<Self, MapBlockError> {
let start = src.position() as usize;
let data = T::decompress(src)?;
let end = src.position() as usize;
Ok(Self {
compressed: Some(src.get_ref()[start..end].to_vec()),
data
})
}
pub fn write(&self, dst: &mut Cursor<Vec<u8>>) {
if let Some(compressed) = self.compressed.as_deref() {
dst.write_all(compressed).unwrap();
} else {
self.data.compress(dst);
}
}
pub fn get_ref(&self) -> &T {
&self.data
}
pub fn get_mut(&mut self) -> &mut T {
self.compressed = None;
&mut self.data
}
}

122
src/map_block/map_block.rs Normal file
View File

@ -0,0 +1,122 @@
use super::*;
const MIN_BLOCK_VER: u8 = 25;
const MAX_BLOCK_VER: u8 = 28;
const BLOCK_BUF_SIZE: usize = 2048;
pub fn is_valid_generated(data: &[u8]) -> bool {
data.len() > 2
&& MIN_BLOCK_VER <= data[0] && data[0] <= MAX_BLOCK_VER
&& data[1] & 0x08 == 0
}
#[derive(Debug)]
pub struct MapBlock {
pub version: u8,
pub flags: u8,
pub lighting_complete: u16,
pub content_width: u8,
pub params_width: u8,
pub node_data: ZlibContainer<NodeData>,
pub metadata: ZlibContainer<Vec<u8>>,
pub static_objects: StaticObjectList,
pub timestamp: u32,
pub nimap: NameIdMap,
pub node_timers: NodeTimerList
}
impl MapBlock {
pub fn deserialize(data_slice: &[u8]) -> Result<Self, MapBlockError> {
let mut data = Cursor::new(data_slice);
// Version
let version = data.read_u8()?;
if version < MIN_BLOCK_VER || version > MAX_BLOCK_VER {
return Err(MapBlockError::InvalidVersion);
}
// Flags
let flags = data.read_u8()?;
// Light data
let lighting_complete =
if version >= 27 { data.read_u16::<BigEndian>()? }
else { 0 };
// Content width/param width
let content_width = data.read_u8()?;
let params_width = data.read_u8()?;
if content_width != 2 || params_width != 2 {
return Err(MapBlockError::Other);
}
// Node data
let node_data = ZlibContainer::read(&mut data)?;
// Node metadata
let metadata = ZlibContainer::read(&mut data)?;
// Static objects
let static_objects = StaticObjectList::deserialize(&mut data)?;
// Timestamp
let timestamp = data.read_u32::<BigEndian>()?;
// Name-ID mappings
let nimap = NameIdMap::deserialize(&mut data)?;
// Node timers
let node_timers = NodeTimerList::deserialize(&mut data)?;
Ok(Self {
version,
flags,
lighting_complete,
content_width,
params_width,
node_data,
metadata,
static_objects,
timestamp,
nimap,
node_timers
})
}
pub fn serialize(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(BLOCK_BUF_SIZE);
let mut data = Cursor::new(buf);
assert!(MIN_BLOCK_VER <= self.version && self.version <= MAX_BLOCK_VER,
"Invalid mapblock version.");
// Version
data.write_u8(self.version).unwrap();
// Flags
data.write_u8(self.flags).unwrap();
// Light data
if self.version >= 27 {
data.write_u16::<BigEndian>(self.lighting_complete).unwrap();
}
// Content width/param width
data.write_u8(self.content_width).unwrap();
data.write_u8(self.params_width).unwrap();
// Node data
self.node_data.write(&mut data);
// Node metadata
self.metadata.write(&mut data);
// Static objects
self.static_objects.serialize(&mut data);
// Timestamp
data.write_u32::<BigEndian>(self.timestamp).unwrap();
// Name-ID mappings
self.nimap.serialize(&mut data);
// Node timers
self.node_timers.serialize(&mut data);
buf = data.into_inner();
buf.shrink_to_fit();
buf
}
}

112
src/map_block/metadata.rs Normal file
View File

@ -0,0 +1,112 @@
use std::io::prelude::*;
use std::io::Cursor;
use std::collections::HashMap;
use byteorder::{ReadBytesExt, WriteBytesExt, BigEndian};
use memmem::{Searcher, TwoWaySearcher};
use super::{MapBlockError, read_string16, write_string16, read_string32,
write_string32, vec_with_len};
#[derive(Debug, Clone)]
pub struct NodeMetadata {
pub vars: HashMap<Vec<u8>, (Vec<u8>, bool)>,
pub inv: Vec<u8>
}
impl NodeMetadata {
fn deserialize(data: &mut Cursor<&[u8]>, version: u8)
-> Result<Self, MapBlockError>
{
let var_count = data.read_u32::<BigEndian>()?;
let mut vars = HashMap::with_capacity(var_count as usize);
for _ in 0..var_count {
let name = read_string16(data)?;
let val = read_string32(data)?;
let private = if version >= 2 {
data.read_u8()? != 0
} else { false };
vars.insert(name.clone(), (val, private));
}
const END_STR: &[u8; 13] = b"EndInventory\n";
let end_finder = TwoWaySearcher::new(END_STR);
let end = end_finder
.search_in(&data.get_ref()[data.position() as usize ..])
.ok_or(MapBlockError::Other)?;
let mut inv = vec_with_len(end + END_STR.len());
data.read_exact(&mut inv)?;
Ok(Self {
vars,
inv
})
}
fn serialize(&self, data: &mut Cursor<Vec<u8>>, version: u8) {
data.write_u32::<BigEndian>(self.vars.len() as u32).unwrap();
for (name, (val, private)) in &self.vars {
write_string16(data, name);
write_string32(data, &val);
if version >= 2 {
data.write_u8(*private as u8).unwrap();
}
}
data.write_all(&self.inv).unwrap();
}
}
#[derive(Debug)]
pub struct NodeMetadataList {
pub list: HashMap<u16, NodeMetadata>
}
impl NodeMetadataList {
pub fn deserialize(data_slice: &[u8]) -> Result<Self, MapBlockError> {
let mut data = Cursor::new(data_slice);
let version = data.read_u8()?;
if version > 2 {
return Err(MapBlockError::InvalidVersion)
}
let count = match version {
0 => 0,
_ => data.read_u16::<BigEndian>()?
};
let mut list = HashMap::with_capacity(count as usize);
for _ in 0..count {
let pos = data.read_u16::<BigEndian>()?;
let meta = NodeMetadata::deserialize(&mut data, version)?;
list.insert(pos, meta);
}
Ok(Self { list })
}
pub fn serialize(&self, block_version: u8) -> Vec<u8> {
let buf = Vec::new();
let mut data = Cursor::new(buf);
if self.list.len() == 0 {
data.write_u8(0).unwrap();
} else {
let version = if block_version >= 28 { 2 } else { 1 };
data.write_u8(version).unwrap();
data.write_u16::<BigEndian>(self.list.len() as u16).unwrap();
for (&pos, meta) in &self.list {
data.write_u16::<BigEndian>(pos).unwrap();
meta.serialize(&mut data, version);
}
}
data.into_inner()
}
}

69
src/map_block/mod.rs Normal file
View File

@ -0,0 +1,69 @@
use std::io::prelude::*;
use std::io::Cursor;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
mod map_block;
mod compression;
mod node_data;
mod metadata;
mod static_object;
mod node_timer;
mod name_id_map;
pub use map_block::{MapBlock, is_valid_generated};
pub use compression::ZlibContainer;
pub use node_data::NodeData;
pub use metadata::NodeMetadataList;
pub use static_object::{StaticObject, StaticObjectList};
pub use node_timer::{NodeTimer, NodeTimerList};
pub use name_id_map::NameIdMap;
#[derive(Debug)]
pub enum MapBlockError {
InvalidVersion,
DataError,
Other,
}
impl From<std::io::Error> for MapBlockError {
fn from(_: std::io::Error) -> Self {
Self::DataError
}
}
fn vec_with_len<T>(len: usize) -> Vec<T> {
let mut v = Vec::with_capacity(len);
unsafe { v.set_len(len) }
v
}
fn read_string16(src: &mut Cursor<&[u8]>) -> Result<Vec<u8>, std::io::Error> {
let count = src.read_u16::<BigEndian>()?;
let mut bytes = vec_with_len(count as usize);
src.read_exact(&mut bytes)?;
Ok(bytes)
}
fn read_string32(src: &mut Cursor<&[u8]>) -> Result<Vec<u8>, std::io::Error> {
let count = src.read_u32::<BigEndian>()?;
let mut bytes = vec_with_len(count as usize);
src.read_exact(&mut bytes)?;
Ok(bytes)
}
fn write_string16(dst: &mut Cursor<Vec<u8>>, data: &[u8]) {
dst.write_u16::<BigEndian>(data.len() as u16).unwrap();
dst.write(data).unwrap();
}
fn write_string32(dst: &mut Cursor<Vec<u8>>, data: &[u8]) {
dst.write_u32::<BigEndian>(data.len() as u32).unwrap();
dst.write(data).unwrap();
}

View File

@ -0,0 +1,73 @@
use std::collections::BTreeMap;
use super::*;
/// Maps 16-bit node IDs to actual node names.
/// Relevant Minetest source file: /src/nameidmapping.cpp
#[derive(Debug)]
pub struct NameIdMap {
// Use a BTreeMap instead of a HashMap to preserve the order of IDs.
pub map: BTreeMap<u16, String>,
}
impl NameIdMap {
pub fn deserialize(data: &mut Cursor<&[u8]>)
-> Result<Self, MapBlockError>
{
let version = data.read_u8()?;
if version != 0 {
return Err(MapBlockError::Other);
}
let count = data.read_u16::<BigEndian>()? as usize;
let mut map = BTreeMap::new();
for _ in 0 .. count {
let id = data.read_u16::<BigEndian>()?;
let name = read_string16(data)?;
let string = String::from_utf8_lossy(&name).into_owned();
map.insert(id, string);
}
Ok(Self {map})
}
pub fn serialize(&self, out: &mut Cursor<Vec<u8>>) {
out.write_u8(0).unwrap();
out.write_u16::<BigEndian>(self.map.len() as u16).unwrap();
for (id, name) in &self.map {
out.write_u16::<BigEndian>(*id).unwrap();
write_string16(out, name.as_bytes());
}
}
#[inline]
pub fn get_id(&self, name: &str) -> Option<u16> {
self.map.iter()
.find_map(|(&k, v)| if v == name { Some(k) } else { None })
}
#[inline]
pub fn get_max_id(&self) -> Option<u16> {
self.map.iter().next_back().map(|k| *(k.0))
}
#[inline]
pub fn insert(&mut self, id: u16, name: &str) {
self.map.insert(id, name.to_string());
}
/// Remove the name at a given ID and shift down values above it.
pub fn remove(&mut self, id: u16) {
self.map.remove(&id);
let mut next_id = id + 1;
while self.map.contains_key(&next_id) {
let name = self.map.remove(&next_id).unwrap();
self.map.insert(next_id - 1, name);
next_id += 1;
}
}
}

View File

@ -0,0 +1,68 @@
use std::io::Cursor;
use std::io::prelude::*;
use flate2::write::ZlibEncoder;
use flate2::read::ZlibDecoder;
use flate2::Compression;
use byteorder::{ByteOrder, BigEndian};
use super::{MapBlockError, vec_with_len};
use super::compression::Compress;
const BLOCK_SIZE: usize = 16;
const NODE_COUNT: usize = BLOCK_SIZE * BLOCK_SIZE * BLOCK_SIZE;
#[derive(Debug)]
pub struct NodeData {
pub nodes: Vec<u16>,
pub param1: Vec<u8>,
pub param2: Vec<u8>
}
impl Compress for NodeData {
fn compress(&self, dst: &mut Cursor<Vec<u8>>) {
let mut encoder = ZlibEncoder::new(dst, Compression::default());
let mut node_data = Vec::with_capacity(NODE_COUNT * 2);
unsafe { node_data.set_len(NODE_COUNT * 2) }
BigEndian::write_u16_into(&self.nodes,
&mut node_data[.. NODE_COUNT * 2]);
encoder.write_all(&node_data).unwrap();
encoder.write_all(&self.param1).unwrap();
encoder.write_all(&self.param2).unwrap();
encoder.finish().unwrap();
}
fn decompress(src: &mut Cursor<&[u8]>) -> Result<Self, MapBlockError> {
let start = src.position();
let mut decoder = ZlibDecoder::new(src);
let mut node_bytes = vec_with_len(NODE_COUNT * 2);
decoder.read_exact(&mut node_bytes)?;
let mut nodes = vec_with_len(NODE_COUNT);
BigEndian::read_u16_into(&node_bytes, &mut nodes);
let mut param1 = vec_with_len(NODE_COUNT);
decoder.read_exact(&mut param1)?;
let mut param2 = Vec::with_capacity(NODE_COUNT);
decoder.read_to_end(&mut param2)?;
if param2.len() != NODE_COUNT {
return Err(MapBlockError::DataError)
}
let total_in = decoder.total_in();
let src = decoder.into_inner();
src.set_position(start + total_in);
Ok(Self {
nodes,
param1,
param2
})
}
}

View File

@ -0,0 +1,49 @@
use super::*;
#[derive(Debug)]
pub struct NodeTimer {
pos: u16,
timeout: u32,
elapsed: u32
}
#[derive(Debug)]
pub struct NodeTimerList {
timers: Vec<NodeTimer>
}
impl NodeTimerList {
pub fn deserialize(data: &mut Cursor<&[u8]>)
-> Result<Self, MapBlockError>
{
let data_len = data.read_u8()?;
if data_len != 10 {
return Err(MapBlockError::Other);
}
let count = data.read_u16::<BigEndian>()?;
let mut timers = Vec::with_capacity(count as usize);
for _ in 0 .. count {
let pos = data.read_u16::<BigEndian>()?;
let timeout = data.read_u32::<BigEndian>()?;
let elapsed = data.read_u32::<BigEndian>()?;
timers.push(NodeTimer {pos, timeout, elapsed});
}
Ok(NodeTimerList {timers})
}
pub fn serialize(&self, data: &mut Cursor<Vec<u8>>) {
data.write_u8(10).unwrap();
data.write_u16::<BigEndian>(self.timers.len() as u16).unwrap();
for t in &self.timers {
data.write_u16::<BigEndian>(t.pos).unwrap();
data.write_u32::<BigEndian>(t.timeout).unwrap();
data.write_u32::<BigEndian>(t.elapsed).unwrap();
}
}
}

View File

@ -0,0 +1,64 @@
use super::*;
use crate::spatial::Vec3;
#[derive(Debug)]
pub struct StaticObject {
pub obj_type: u8,
pub f_pos: Vec3,
pub data: Vec<u8>
}
impl StaticObject {
fn deserialize(src: &mut Cursor<&[u8]>) -> Result<Self, MapBlockError> {
let obj_type = src.read_u8()?;
let f_pos = Vec3::new(
src.read_i32::<BigEndian>()?,
src.read_i32::<BigEndian>()?,
src.read_i32::<BigEndian>()?
);
let data = read_string16(src)?;
Ok(Self {obj_type, f_pos, data})
}
fn serialize(&self, dst: &mut Cursor<Vec<u8>>) {
dst.write_u8(self.obj_type).unwrap();
dst.write_i32::<BigEndian>(self.f_pos.x).unwrap();
dst.write_i32::<BigEndian>(self.f_pos.y).unwrap();
dst.write_i32::<BigEndian>(self.f_pos.z).unwrap();
write_string16(dst, &self.data);
}
}
#[derive(Debug)]
pub struct StaticObjectList {
pub list: Vec<StaticObject>
}
impl StaticObjectList {
pub fn deserialize(src: &mut Cursor<&[u8]>)
-> Result<Self, MapBlockError>
{
let version = src.read_u8()?;
if version != 0 {
return Err(MapBlockError::Other);
}
let count = src.read_u16::<BigEndian>()?;
let mut list = Vec::with_capacity(count as usize);
for _ in 0 .. count {
list.push(StaticObject::deserialize(src)?);
}
Ok(Self {list})
}
pub fn serialize(&self, dst: &mut Cursor<Vec<u8>>) {
dst.write_u8(0).unwrap();
dst.write_u16::<BigEndian>(self.list.len() as u16).unwrap();
for obj in &self.list {
obj.serialize(dst);
}
}
}

146
src/map_database.rs Normal file
View File

@ -0,0 +1,146 @@
#[derive(Debug, thiserror::Error)]
pub enum DBError {
#[error("database operation failed")]
DatabaseError,
#[error("database is not a valid map database")]
InvalidDatabase,
#[error("requested data was not found")]
MissingData,
}
impl From<sqlite::Error> for DBError {
fn from(_: sqlite::Error) -> Self {
Self::DatabaseError
}
}
fn verify_database(conn: &sqlite::Connection) -> Result<(), DBError> {
let my_assert = |res: bool| -> Result<(), DBError> {
match res {
true => Ok(()),
false => Err(DBError::InvalidDatabase)
}
};
let mut stmt = conn.prepare("PRAGMA table_info(blocks)")?;
stmt.next()?;
my_assert(stmt.read::<String>(1)? == "pos")?;
my_assert(stmt.read::<String>(2)? == "INT")?;
my_assert(stmt.read::<i64>(5)? == 1)?;
stmt.next()?;
my_assert(stmt.read::<String>(1)? == "data")?;
my_assert(stmt.read::<String>(2)? == "BLOB")?;
my_assert(stmt.read::<i64>(5)? == 0)?;
Ok(())
}
pub struct MapDatabaseRows<'a> {
stmt_get: sqlite::Statement<'a>
}
impl Iterator for MapDatabaseRows<'_> {
type Item = (i64, Vec<u8>);
fn next(&mut self) -> Option<Self::Item> {
match self.stmt_get.next().unwrap() {
sqlite::State::Row => {
Some((
self.stmt_get.read(0).unwrap(),
self.stmt_get.read(1).unwrap()
))
},
sqlite::State::Done => None
}
}
}
pub struct MapDatabase<'a> {
conn: &'a sqlite::Connection,
stmt_get: sqlite::Statement<'a>,
stmt_set: sqlite::Statement<'a>,
stmt_del: sqlite::Statement<'a>,
in_transaction: bool,
}
impl<'a> MapDatabase<'a> {
pub fn new(conn: &'a sqlite::Connection) -> Result<Self, DBError> {
conn.execute("BEGIN")?;
verify_database(conn)?;
let stmt_get = conn.prepare("SELECT data FROM blocks WHERE pos = ?")?;
let stmt_set = conn.prepare(
"INSERT OR REPLACE INTO blocks (pos, data) VALUES (?, ?)")?;
let stmt_del = conn.prepare("DELETE FROM blocks WHERE pos = ?")?;
Ok(Self {conn, stmt_get, stmt_set, stmt_del, in_transaction: true})
}
pub fn is_in_transaction(&self) -> bool {
self.in_transaction
}
#[inline]
fn begin_if_needed(&self) -> Result<(), DBError> {
if !self.in_transaction {
self.conn.execute("BEGIN")?;
}
Ok(())
}
pub fn commit_if_needed(&mut self) -> Result<(), DBError> {
if self.in_transaction {
self.conn.execute("COMMIT")?;
self.in_transaction = false;
}
Ok(())
}
pub fn iter_rows(&mut self) -> MapDatabaseRows {
self.begin_if_needed().unwrap();
let stmt = self.conn.prepare("SELECT pos, data FROM blocks").unwrap();
MapDatabaseRows {stmt_get: stmt}
}
pub fn get_block(&mut self, map_key: i64) -> Result<Vec<u8>, DBError> {
self.begin_if_needed()?;
self.stmt_get.bind(1, map_key)?;
let value = match self.stmt_get.next()? {
sqlite::State::Row => Ok(self.stmt_get.read(0)?),
sqlite::State::Done => Err(DBError::MissingData)
};
self.stmt_get.reset()?;
value
}
pub fn set_block(&mut self, map_key: i64, data: &[u8])
-> Result<(), DBError>
{
self.begin_if_needed()?;
self.stmt_set.bind(1, map_key)?;
self.stmt_set.bind(2, data)?;
self.stmt_set.next()?;
self.stmt_set.reset()?;
Ok(())
}
pub fn delete_block(&mut self, map_key: i64) -> Result<(), DBError> {
self.begin_if_needed()?;
self.stmt_del.bind(1, map_key)?;
self.stmt_del.next()?;
self.stmt_del.reset()?;
Ok(())
}
pub fn vacuum(&mut self) -> Result<(), DBError> {
self.commit_if_needed()?;
self.conn.execute("VACUUM")?;
Ok(())
}
}

165
src/spatial/area.rs Normal file
View File

@ -0,0 +1,165 @@
use std::cmp::{min, max};
use super::Vec3;
pub struct AreaIterator {
min: Vec3,
max: Vec3,
cur: Vec3
}
impl Iterator for AreaIterator {
// TODO: Fix this mess.
type Item = Vec3;
fn next(&mut self) -> Option<Self::Item> {
self.cur.x += 1;
if self.cur.x > self.max.x {
self.cur.x = self.min.x;
self.cur.y += 1;
if self.cur.y > self.max.y {
self.cur.y = self.min.y;
self.cur.z += 1;
if self.cur.z > self.max.z {
return None;
}
}
}
Some(self.cur)
}
}
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct Area {
pub min: Vec3,
pub max: Vec3
}
impl Area {
pub fn new(min: Vec3, max: Vec3) -> Self {
assert!(min.x <= max.x
&& min.y <= max.y
&& min.z <= max.z);
Self {min, max}
}
pub fn from_unsorted(a: Vec3, b: Vec3) -> Self {
Area {
min: Vec3 {
x: min(a.x, b.x),
y: min(a.y, b.y),
z: min(a.z, b.z)
},
max: Vec3 {
x: max(a.x, b.x),
y: max(a.y, b.y),
z: max(a.z, b.z)
}
}
}
pub fn volume(&self) -> u64 {
(self.max.x - self.min.x + 1) as u64 *
(self.max.y - self.min.y + 1) as u64 *
(self.max.z - self.min.z + 1) as u64
}
pub fn contains(&self, pos: Vec3) -> bool {
self.min.x <= pos.x && pos.x <= self.max.x
&& self.min.y <= pos.y && pos.y <= self.max.y
&& self.min.z <= pos.z && pos.z <= self.max.z
}
pub fn iterate(&self) -> AreaIterator {
AreaIterator {
min: self.min,
max: self.max,
cur: self.min - Vec3::new(1, 0, 0)
}
}
pub fn to_contained_block_area(&self) -> Self {
let min = Vec3::new(
(self.min.x + 15).div_euclid(16),
(self.min.y + 15).div_euclid(16),
(self.min.z + 15).div_euclid(16)
);
let max = Vec3::new(
(self.max.x - 15).div_euclid(16),
(self.max.y - 15).div_euclid(16),
(self.max.z - 15).div_euclid(16)
);
Self {min, max}
}
pub fn to_touching_block_area(&self) -> Self {
let min = Vec3::new(
self.min.x.div_euclid(16),
self.min.y.div_euclid(16),
self.min.z.div_euclid(16)
);
let max = Vec3::new(
self.max.x.div_euclid(16),
self.max.y.div_euclid(16),
self.max.z.div_euclid(16)
);
Self {min, max}
}
}
impl std::ops::Add<Vec3> for Area {
type Output = Self;
fn add(self, rhs: Vec3) -> Self {
Area {
min: self.min + rhs,
max: self.max + rhs
}
}
}
impl std::ops::Sub<Vec3> for Area {
type Output = Self;
fn sub(self, rhs: Vec3) -> Self {
Area {
min: self.min - rhs,
max: self.max - rhs
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_area() {
assert_eq!(
Area::from_unsorted(Vec3::new(8, 0, -10), Vec3::new(-8, 0, 10)),
Area::new(Vec3::new(-8, 0, -10), Vec3::new(8, 0, 10))
);
assert_eq!(
Area::from_unsorted(Vec3::new(10, 80, 42), Vec3::new(10, -50, 99)),
Area::new(Vec3::new(10, -50, 42), Vec3::new(10, 80, 99))
);
}
#[test]
fn test_area_iteration() {
let a = Area::new(Vec3::new(-1, -1, -1), Vec3::new(1, 1, 1));
let mut iter = a.iterate();
for z in -1 .. 2 {
for y in -1 .. 2 {
for x in -1 .. 2 {
assert_eq!(iter.next(), Some(Vec3::new(x, y, z)));
}
}
}
assert_eq!(iter.next(), None);
}
}

132
src/spatial/mod.rs Normal file
View File

@ -0,0 +1,132 @@
use std::cmp::{min, max};
mod vec3;
// TODO
// mod v3f;
mod area;
pub use vec3::Vec3;
// pub use v3f::V3f;
pub use area::Area;
pub fn area_contains_block(area: &Area, block_pos: Vec3) -> bool {
let corner = block_pos * 16;
area.min.x <= corner.x && corner.x + 15 <= area.max.x
&& area.min.y <= corner.y && corner.y + 15 <= area.max.y
&& area.min.z <= corner.z && corner.z + 15 <= area.max.z
}
pub fn area_touches_block(area: &Area, block_pos: Vec3) -> bool {
let corner = block_pos * 16;
area.min.x <= corner.x + 15 && corner.x <= area.max.x
&& area.min.y <= corner.y + 15 && corner.y <= area.max.y
&& area.min.z <= corner.z + 15 && corner.z <= area.max.z
}
pub fn area_abs_block_overlap(area: &Area, block_pos: Vec3) -> Option<Area> {
let block_min = block_pos * 16;
let block_max = block_min + 15;
let node_min = Vec3 {
x: max(area.min.x, block_min.x),
y: max(area.min.y, block_min.y),
z: max(area.min.z, block_min.z)
};
let node_max = Vec3 {
x: min(area.max.x, block_max.x),
y: min(area.max.y, block_max.y),
z: min(area.max.z, block_max.z)
};
if node_min.x <= node_max.x
&& node_min.y <= node_max.y
&& node_min.z <= node_max.z
{
Some(Area {min: node_min, max: node_max})
} else {
None
}
}
pub fn area_rel_block_overlap(area: &Area, block_pos: Vec3) -> Option<Area> {
let corner = block_pos * 16;
let rel_min = area.min - corner;
let rel_max = area.max - corner;
let node_min = Vec3 {
x: max(rel_min.x, 0),
y: max(rel_min.y, 0),
z: max(rel_min.z, 0)
};
let node_max = Vec3 {
x: min(rel_max.x, 15),
y: min(rel_max.y, 15),
z: min(rel_max.z, 15)
};
if node_min.x <= node_max.x
&& node_min.y <= node_max.y
&& node_min.z <= node_max.z
{
Some(Area {min: node_min, max: node_max})
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_area_containment() {
let area = Area::new(Vec3::new(-1, -32, 16), Vec3::new(30, -17, 54));
let test_blocks = vec![
// Fully contained
(Vec3::new(0, -2, 1), true, true),
(Vec3::new(0, -2, 2), true, true),
// Partially contained
(Vec3::new(-1, -2, 1), true, false),
(Vec3::new(-1, -2, 2), true, false),
(Vec3::new(-1, -2, 3), true, false),
(Vec3::new(0, -2, 3), true, false),
(Vec3::new(1, -2, 3), true, false),
(Vec3::new(1, -2, 3), true, false),
(Vec3::new(1, -2, 2), true, false),
(Vec3::new(1, -2, 1), true, false),
// Not contained
(Vec3::new(-1, -2, 0), false, false),
(Vec3::new(0, -2, 0), false, false),
(Vec3::new(1, -2, 0), false, false),
(Vec3::new(2, -2, 0), false, false),
(Vec3::new(2, -2, 1), false, false),
(Vec3::new(2, -2, 2), false, false),
(Vec3::new(2, -2, 3), false, false),
];
for (pos, touches, contains) in test_blocks {
assert_eq!(area_touches_block(&area, pos), touches);
assert_eq!(area_contains_block(&area, pos), contains);
}
}
#[test]
fn test_area_block_overlap() {
let area = Area::new(Vec3::new(-3, -3, -3), Vec3::new(15, 15, 15));
let pairs = vec![
(Vec3::new(-1, -1, -1),
Some(Area::new(Vec3::new(13, 13, 13), Vec3::new(15, 15, 15)))),
(Vec3::new(0, 0, 0),
Some(Area::new(Vec3::new(0, 0, 0), Vec3::new(15, 15, 15)))),
(Vec3::new(1, 1, 1), None),
(Vec3::new(-1, 0, 0),
Some(Area::new(Vec3::new(13, 0, 0), Vec3::new(15, 15, 15)))),
];
for pair in pairs {
assert_eq!(area_rel_block_overlap(&area, pair.0), pair.1);
}
}
}

72
src/spatial/v3f.rs Normal file
View File

@ -0,0 +1,72 @@
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct V3f {
pub x: f32,
pub y: f32,
pub z: f32
}
impl V3f {
pub fn new(x: f32, y: f32, z: f32) -> Self {
Self {x, y, z}
}
}
impl std::ops::Add<Self> for V3f {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
z: self.z + rhs.z
}
}
}
impl std::ops::Add<f32> for V3f {
type Output = Self;
fn add(self, rhs: f32) -> Self {
Self {
x: self.x + rhs,
y: self.y + rhs,
z: self.z + rhs
}
}
}
impl std::ops::Sub<Self> for V3f {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
z: self.z - rhs.z
}
}
}
impl std::ops::Mul<Self> for V3f {
type Output = Self;
fn mul(self, rhs: Self) -> Self {
Self {
x: self.x * rhs.x,
y: self.y * rhs.y,
z: self.z * rhs.z
}
}
}
impl std::ops::Mul<f32> for V3f {
type Output = Self;
fn mul(self, rhs: f32) -> Self {
Self {
x: self.x * rhs,
y: self.y * rhs,
z: self.z * rhs
}
}
}

193
src/spatial/vec3.rs Normal file
View File

@ -0,0 +1,193 @@
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct Vec3 {
pub x: i32,
pub y: i32,
pub z: i32
}
impl Vec3 {
#[inline]
pub fn new(x: i32, y: i32, z: i32) -> Self {
Self {x, y, z}
}
pub fn from_block_key(key: i64) -> Self {
let x = (key + 2048).rem_euclid(4096) - 2048;
let rem = (key - x) / 4096;
let y = (rem + 2048).rem_euclid(4096) - 2048;
let z = (rem - y) / 4096;
Self {x: x as i32, y: y as i32, z: z as i32}
}
pub fn to_block_key(&self) -> i64 {
// Make sure values are within range.
assert!(-2048 <= self.x && self.x < 2048
&& -2048 <= self.y && self.y < 2048
&& -2048 <= self.z && self.z < 2048);
self.x as i64
+ self.y as i64 * 4096
+ self.z as i64 * 4096 * 4096
}
pub fn from_u16_key(key: u16) -> Self {
Self {
x: (key & 0xF) as i32,
y: ((key >> 4) & 0xF) as i32,
z: ((key >> 8) & 0xF) as i32
}
}
pub fn is_valid_block_pos(&self) -> bool {
const LIMIT: i32 = 31000 / 16;
-LIMIT <= self.x && self.x <= LIMIT
&& -LIMIT <= self.y && self.y <= LIMIT
&& -LIMIT <= self.z && self.z <= LIMIT
}
pub fn is_valid_node_pos(&self) -> bool {
const LIMIT: i32 = 31000;
-LIMIT <= self.x && self.x <= LIMIT
&& -LIMIT <= self.y && self.y <= LIMIT
&& -LIMIT <= self.z && self.z <= LIMIT
}
pub fn map(&self, func: fn(i32) -> i32) -> Self {
Self {
x: func(self.x),
y: func(self.y),
z: func(self.z)
}
}
}
impl std::ops::Add<Self> for Vec3 {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
z: self.z + rhs.z
}
}
}
impl std::ops::Add<i32> for Vec3 {
type Output = Self;
fn add(self, rhs: i32) -> Self {
Self {
x: self.x + rhs,
y: self.y + rhs,
z: self.z + rhs
}
}
}
impl std::ops::Sub<Self> for Vec3 {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
z: self.z - rhs.z
}
}
}
impl std::ops::Mul<Self> for Vec3 {
type Output = Self;
fn mul(self, rhs: Self) -> Self {
Self {
x: self.x * rhs.x,
y: self.y * rhs.y,
z: self.z * rhs.z
}
}
}
impl std::ops::Mul<i32> for Vec3 {
type Output = Self;
fn mul(self, rhs: i32) -> Self {
Self {
x: self.x * rhs,
y: self.y * rhs,
z: self.z * rhs
}
}
}
impl std::fmt::Display for Vec3 {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "({}, {}, {})", self.x, self.y, self.z)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_vec3() {
// Test `new` function
assert_eq!(Vec3::new(42, 0, -6000), Vec3 {x: 42, y: 0, z: -6000});
/* Test block key/vector conversions */
const Y_FAC: i64 = 0x1_000;
const Z_FAC: i64 = 0x1_000_000;
let bk_pairs = [
// Basics
(Vec3 {x: 0, y: 0, z: 0}, 0),
(Vec3 {x: 1, y: 0, z: 0}, 1),
(Vec3 {x: 0, y: 1, z: 0}, 1 * Y_FAC),
(Vec3 {x: 0, y: 0, z: 1}, 1 * Z_FAC),
// X/Y/Z Boundaries
(Vec3 {x: -2048, y: 0, z: 0}, -2048),
(Vec3 {x: 2047, y: 0, z: 0}, 2047),
(Vec3 {x: 0, y: -2048, z: 0}, -2048 * Y_FAC),
(Vec3 {x: 0, y: 2047, z: 0}, 2047 * Y_FAC),
(Vec3 {x: 0, y: 0, z: -2048}, -2048 * Z_FAC),
(Vec3 {x: 0, y: 0, z: 2047}, 2047 * Z_FAC),
// Extra spicy boundaries
(Vec3 {x: -42, y: 2047, z: -99},
-42 + 2047 * Y_FAC + -99 * Z_FAC),
(Vec3 {x: 64, y: -2048, z: 22},
64 + -2048 * Y_FAC + 22 * Z_FAC),
(Vec3 {x: 2047, y: 555, z: 35},
2047 + 555 * Y_FAC + 35 * Z_FAC),
(Vec3 {x: -2048, y: 600, z: -70},
-2048 + 600 * Y_FAC + -70 * Z_FAC),
// Multiple boundaries
(Vec3 {x: 2047, y: -2048, z: 16},
2047 + -2048 * Y_FAC + 16 * Z_FAC),
(Vec3 {x: -2048, y: 2047, z: 50},
-2048 + 2047 * Y_FAC + 50 * Z_FAC),
];
for pair in &bk_pairs {
assert_eq!(pair.0.to_block_key(), pair.1);
assert_eq!(pair.0, Vec3::from_block_key(pair.1));
}
/* Test u16/vector conversions */
let u16_pairs = [
(Vec3::new(0, 0, 0), 0x000),
(Vec3::new(1, 0, 0), 0x001),
(Vec3::new(0, 1, 0), 0x010),
(Vec3::new(0, 0, 1), 0x100),
(Vec3::new(15, 15, 15), 0xFFF),
(Vec3::new(5, 15, 9), 0x9F5)
];
for pair in &u16_pairs {
assert_eq!(pair.0, Vec3::from_u16_key(pair.1));
}
}
}

48
src/time_keeper.rs Normal file
View File

@ -0,0 +1,48 @@
use std::collections::HashMap;
use std::time::{Instant, Duration};
pub struct Timer<'a> {
parent: &'a mut TimeKeeper,
name: String,
start: Instant
}
impl<'a> Drop for Timer<'a> {
fn drop(&mut self) {
let elapsed = Instant::now().duration_since(self.start);
self.parent.add_time(&self.name, elapsed);
}
}
pub struct TimeKeeper {
times: HashMap<String, (Duration, u32)>
}
impl TimeKeeper {
pub fn new() -> Self {
Self {times: HashMap::new()}
}
fn add_time(&mut self, name: &str, elapsed: Duration) {
if let Some(item) = self.times.get_mut(name) {
(*item).0 += elapsed;
(*item).1 += 1;
} else {
self.times.insert(name.to_string(), (elapsed, 1));
}
}
pub fn get_timer(&mut self, name: &str) -> Timer {
Timer {parent: self, name: name.to_string(), start: Instant::now()}
}
/*pub fn print(&mut self) {
println!("");
for (name, (duration, count)) in &self.times {
println!("{}: {} x {:?} each; {:?} total",
name, count, *duration / *count, duration);
}
}*/
}

99
src/utils.rs Normal file
View File

@ -0,0 +1,99 @@
use std::time::Duration;
use memmem::{Searcher, TwoWaySearcher};
use byteorder::{WriteBytesExt, BigEndian};
use crate::instance::{InstState, StatusServer};
use crate::map_database::MapDatabase;
use crate::spatial::{Area, Vec3};
pub fn query_keys(
db: &mut MapDatabase,
status: &StatusServer,
search_str: Option<String>,
area: Option<Area>,
invert: bool,
include_partial: bool
) -> Vec<i64> {
status.set_state(InstState::Querying);
// Prepend 16-bit search string length to reduce false positives.
// This will break if the name-ID map format changes.
let search_bytes = search_str.map(|s| {
let mut res = Vec::new();
res.write_u16::<BigEndian>(s.as_bytes().len() as u16).unwrap();
res.extend(s.as_bytes());
res
});
let data_searcher = search_bytes.as_ref().map(|b| {
TwoWaySearcher::new(b)
});
let mut keys = Vec::new();
// Area of included block positions.
// If invert == true, the function returns only blocks outside this area.
let block_area = area.map(|a| {
if invert == include_partial {
a.to_contained_block_area()
} else {
a.to_touching_block_area()
}
});
for (i, (key, data)) in db.iter_rows().enumerate() {
if let Some(a) = &block_area {
let block_pos = Vec3::from_block_key(key);
if a.contains(block_pos) == invert {
continue;
}
}
if let Some(s) = &data_searcher {
if s.search_in(&data).is_none() {
continue;
}
}
keys.push(key);
// Update total every 1024 iterations.
if i & 1023 == 0 {
status.set_total(keys.len())
}
}
status.set_total(keys.len());
status.set_state(InstState::Ignore);
keys
}
pub fn fmt_duration(dur: Duration) -> String {
let s = dur.as_secs();
if s < 3600 {
format!("{:02}:{:02}", s / 60 % 60, s % 60)
} else {
format!("{}:{:02}:{:02}", s / 3600, s / 60 % 60, s % 60)
}
}
pub fn fmt_big_num(num: u64) -> String {
let f_num = num as f32;
let abbrevs = vec![
("T".to_string(), 1_000_000_000_000.),
("B".to_string(), 1_000_000_000.),
("M".to_string(), 1_000_000.),
("k".to_string(), 1_000.)
];
for (suffix, unit) in abbrevs {
if f_num >= unit {
let mantissa = f_num / unit;
let place_vals =
if mantissa >= 100. { 0 }
else if mantissa >= 10. { 1 }
else { 2 };
return format!("{:.*}{}", place_vals, mantissa, suffix)
}
}
format!("{}", f_num.round())
}

10
todo.md Normal file
View File

@ -0,0 +1,10 @@
Optimizations:
- Cache deserialized map blocks instead of deserializing each time.
- Overlay: Iterate map blocks in space-filling curve shape to optimize cache usefulness.
- (DONE) Don't re-compress node data/metadata if it isn't changed.
- MapBlock::serialize: Use a big buffer somewhere to avoid heap allocations.
Todo?
- Fold area utility functions into the area struct.

8
workspace.code-workspace Normal file
View File

@ -0,0 +1,8 @@
{
"folders": [
{
"path": "."
}
],
"settings": {}
}