Polishing map_block module
This commit is contained in:
parent
21ee17db58
commit
98cec33c81
@ -82,22 +82,22 @@ pub fn merge_metadata(
|
||||
let diff = offset.x + offset.y * 16 + offset.z * 256;
|
||||
|
||||
// Delete any existing metadata in the destination block
|
||||
let mut to_delete = Vec::with_capacity(dst_meta.list.len());
|
||||
for (&idx, _) in &dst_meta.list {
|
||||
let mut to_delete = Vec::with_capacity(dst_meta.len());
|
||||
for (&idx, _) in dst_meta.iter() {
|
||||
let pos = Vec3::from_u16_key(idx);
|
||||
if dst_area.contains(pos) {
|
||||
to_delete.push(idx);
|
||||
}
|
||||
}
|
||||
for idx in &to_delete {
|
||||
dst_meta.list.remove(idx);
|
||||
dst_meta.remove(idx);
|
||||
}
|
||||
|
||||
// Copy new metadata
|
||||
for (&idx, meta) in &src_meta.list {
|
||||
for (&idx, meta) in src_meta {
|
||||
let pos = Vec3::from_u16_key(idx);
|
||||
if src_area.contains(pos) {
|
||||
dst_meta.list.insert((idx as i32 + diff) as u16, meta.clone());
|
||||
dst_meta.insert((idx as i32 + diff) as u16, meta.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -117,7 +117,7 @@ pub fn clean_name_id_map(block: &mut MapBlock) {
|
||||
// Rebuild the name-ID map.
|
||||
let mut new_nimap = BTreeMap::<u16, Vec<u8>>::new();
|
||||
let mut map = vec![0u16; id_count];
|
||||
for id in 0 .. id_count {
|
||||
for id in 0..id_count {
|
||||
// Skip unused IDs.
|
||||
if !used[id] {
|
||||
continue;
|
||||
|
@ -5,7 +5,7 @@ use crate::spatial::{Vec3, area_rel_block_overlap,
|
||||
area_abs_block_overlap};
|
||||
use crate::map_database::MapDatabase;
|
||||
use crate::map_block::{MapBlock, MapBlockError, is_valid_generated,
|
||||
NodeMetadataList};
|
||||
NodeMetadataList, NodeMetadataListExt};
|
||||
use crate::block_utils::{merge_blocks, merge_metadata, clean_name_id_map};
|
||||
use crate::instance::{ArgType, InstBundle};
|
||||
use crate::utils::{CacheMap, query_keys};
|
||||
|
@ -3,7 +3,7 @@ use super::Command;
|
||||
use crate::unwrap_or;
|
||||
use crate::spatial::Vec3;
|
||||
use crate::instance::{ArgType, InstBundle};
|
||||
use crate::map_block::{MapBlock, NodeMetadataList};
|
||||
use crate::map_block::{MapBlock, NodeMetadataList, NodeMetadataListExt};
|
||||
use crate::utils::{query_keys, to_bytes, to_slice, fmt_big_num};
|
||||
|
||||
|
||||
@ -31,9 +31,9 @@ fn delete_metadata(inst: &mut InstBundle) {
|
||||
NodeMetadataList::deserialize(block.metadata.get_ref()), continue);
|
||||
|
||||
let block_corner = Vec3::from_block_key(key) * 16;
|
||||
let mut to_delete = Vec::with_capacity(meta.list.len());
|
||||
let mut to_delete = Vec::with_capacity(meta.len());
|
||||
|
||||
for (&idx, _) in &meta.list {
|
||||
for (&idx, _) in &meta {
|
||||
let pos = Vec3::from_u16_key(idx);
|
||||
let abs_pos = pos + block_corner;
|
||||
|
||||
@ -54,7 +54,7 @@ fn delete_metadata(inst: &mut InstBundle) {
|
||||
if !to_delete.is_empty() {
|
||||
count += to_delete.len() as u64;
|
||||
for idx in &to_delete {
|
||||
meta.list.remove(idx);
|
||||
meta.remove(idx);
|
||||
}
|
||||
*block.metadata.get_mut() = meta.serialize(block.version);
|
||||
inst.db.set_block(key, &block.serialize()).unwrap();
|
||||
|
@ -5,8 +5,8 @@ use crate::spatial::{Vec3, Area, area_rel_block_overlap,
|
||||
area_abs_block_overlap, area_contains_block, area_touches_block};
|
||||
use crate::instance::{ArgType, InstArgs, InstBundle};
|
||||
use crate::map_database::MapDatabase;
|
||||
use crate::map_block::{MapBlock, NodeMetadataList, is_valid_generated,
|
||||
MapBlockError};
|
||||
use crate::map_block::{MapBlock, MapBlockError, is_valid_generated,
|
||||
NodeMetadataList, NodeMetadataListExt};
|
||||
use crate::block_utils::{merge_blocks, merge_metadata, clean_name_id_map};
|
||||
use crate::utils::{query_keys, CacheMap};
|
||||
|
||||
|
@ -3,7 +3,7 @@ use super::Command;
|
||||
use crate::unwrap_or;
|
||||
use crate::spatial::Vec3;
|
||||
use crate::instance::{ArgType, InstArgs, InstBundle};
|
||||
use crate::map_block::{MapBlock, NodeMetadataList};
|
||||
use crate::map_block::{MapBlock, NodeMetadataList, NodeMetadataListExt};
|
||||
use crate::utils::{query_keys, to_bytes, fmt_big_num};
|
||||
|
||||
const NEWLINE: u8 = b'\n';
|
||||
@ -94,7 +94,7 @@ fn replace_in_inv(inst: &mut InstBundle) {
|
||||
let block_corner = Vec3::from_block_key(key) * 16;
|
||||
let mut modified = false;
|
||||
|
||||
for (&idx, data) in &mut meta.list {
|
||||
for (&idx, data) in &mut meta {
|
||||
let pos = Vec3::from_u16_key(idx);
|
||||
let abs_pos = pos + block_corner;
|
||||
if let Some(a) = inst.args.area {
|
||||
|
@ -41,9 +41,9 @@ fn do_replace(
|
||||
let mut new_node_present = false;
|
||||
|
||||
let nd = block.node_data.get_mut();
|
||||
for z in 0 .. 16 {
|
||||
for y in 0 .. 16 {
|
||||
for x in 0 .. 16 {
|
||||
for z in 0..16 {
|
||||
for y in 0..16 {
|
||||
for x in 0..16 {
|
||||
if nd.nodes[idx] == search_id
|
||||
&& node_area.contains(Vec3 {x, y, z}) != invert
|
||||
{
|
||||
|
@ -3,7 +3,7 @@ use super::Command;
|
||||
use crate::unwrap_or;
|
||||
use crate::spatial::Vec3;
|
||||
use crate::instance::{ArgType, InstBundle};
|
||||
use crate::map_block::{MapBlock, NodeMetadataList};
|
||||
use crate::map_block::{MapBlock, NodeMetadataList, NodeMetadataListExt};
|
||||
use crate::utils::{query_keys, to_bytes, fmt_big_num};
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ fn set_meta_var(inst: &mut InstBundle) {
|
||||
let block_corner = Vec3::from_block_key(block_key) * 16;
|
||||
let mut modified = false;
|
||||
|
||||
for (&idx, data) in &mut meta.list {
|
||||
for (&idx, data) in &mut meta {
|
||||
let pos = Vec3::from_u16_key(idx);
|
||||
let abs_pos = pos + block_corner;
|
||||
|
||||
|
@ -59,10 +59,12 @@ impl<T: Compress> ZlibContainer<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_ref(&self) -> &T {
|
||||
&self.data
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_mut(&mut self) -> &mut T {
|
||||
self.compressed = None;
|
||||
&mut self.data
|
||||
|
@ -6,14 +6,12 @@ const BLOCK_BUF_SIZE: usize = 2048;
|
||||
|
||||
|
||||
pub fn is_valid_generated(data: &[u8]) -> bool {
|
||||
data.len() > 2
|
||||
data.len() >= 2
|
||||
&& MIN_BLOCK_VER <= data[0] && data[0] <= MAX_BLOCK_VER
|
||||
&& data[1] & 0x08 == 0
|
||||
}
|
||||
|
||||
|
||||
// TODO: Allocation limit for everything to prevent crashes with bad data.
|
||||
// TODO: Make sure all data structures are the best choice.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MapBlock {
|
||||
pub version: u8,
|
||||
@ -30,8 +28,8 @@ pub struct MapBlock {
|
||||
}
|
||||
|
||||
impl MapBlock {
|
||||
pub fn deserialize(data_slice: &[u8]) -> Result<Self, MapBlockError> {
|
||||
let mut data = Cursor::new(data_slice);
|
||||
pub fn deserialize(src: &[u8]) -> Result<Self, MapBlockError> {
|
||||
let mut data = Cursor::new(src);
|
||||
|
||||
// Version
|
||||
let version = data.read_u8()?;
|
||||
|
@ -1,10 +1,10 @@
|
||||
use std::collections::HashMap;
|
||||
use super::*;
|
||||
|
||||
use std::collections::{HashMap, BTreeMap};
|
||||
use std::cmp::min;
|
||||
|
||||
use memmem::{Searcher, TwoWaySearcher};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NodeMetadata {
|
||||
@ -17,8 +17,8 @@ impl NodeMetadata {
|
||||
-> Result<Self, MapBlockError>
|
||||
{
|
||||
let var_count = data.read_u32::<BigEndian>()?;
|
||||
// Avoid memory allocation errors with corrupt data.
|
||||
let mut vars = HashMap::with_capacity(min(var_count as usize, 0xFFFF));
|
||||
// Avoid allocating huge numbers of variables (bad data handling).
|
||||
let mut vars = HashMap::with_capacity(min(var_count as usize, 64));
|
||||
|
||||
for _ in 0..var_count {
|
||||
let name = read_string16(data)?;
|
||||
@ -38,14 +38,12 @@ impl NodeMetadata {
|
||||
let mut inv = vec_with_len(end + END_STR.len());
|
||||
data.read_exact(&mut inv)?;
|
||||
|
||||
Ok(Self {
|
||||
vars,
|
||||
inv
|
||||
})
|
||||
Ok(Self { vars, inv })
|
||||
}
|
||||
|
||||
fn serialize(&self, data: &mut Cursor<Vec<u8>>, version: u8) {
|
||||
data.write_u32::<BigEndian>(self.vars.len() as u32).unwrap();
|
||||
|
||||
for (name, (val, private)) in &self.vars {
|
||||
write_string16(data, name);
|
||||
write_string32(data, &val);
|
||||
@ -59,16 +57,18 @@ impl NodeMetadata {
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NodeMetadataList {
|
||||
// TODO: Switch to BTreeMap or something more stable
|
||||
// TODO: This is just a wrapper struct, switch to a type alias?
|
||||
pub list: HashMap<u16, NodeMetadata>
|
||||
pub trait NodeMetadataListExt {
|
||||
fn deserialize(src: &[u8]) -> Result<Self, MapBlockError>
|
||||
where Self: std::marker::Sized;
|
||||
fn serialize(&self, block_version: u8) -> Vec<u8>;
|
||||
}
|
||||
|
||||
impl NodeMetadataList {
|
||||
pub fn deserialize(data_slice: &[u8]) -> Result<Self, MapBlockError> {
|
||||
let mut data = Cursor::new(data_slice);
|
||||
|
||||
pub type NodeMetadataList = BTreeMap<u16, NodeMetadata>;
|
||||
|
||||
impl NodeMetadataListExt for NodeMetadataList {
|
||||
fn deserialize(src: &[u8]) -> Result<Self, MapBlockError> {
|
||||
let mut data = Cursor::new(src);
|
||||
|
||||
let version = data.read_u8()?;
|
||||
if version > 2 {
|
||||
@ -80,28 +80,28 @@ impl NodeMetadataList {
|
||||
_ => data.read_u16::<BigEndian>()?
|
||||
};
|
||||
|
||||
let mut list = HashMap::with_capacity(count as usize);
|
||||
let mut list = BTreeMap::new();
|
||||
for _ in 0..count {
|
||||
let pos = data.read_u16::<BigEndian>()?;
|
||||
let meta = NodeMetadata::deserialize(&mut data, version)?;
|
||||
list.insert(pos, meta);
|
||||
}
|
||||
|
||||
Ok(Self { list })
|
||||
Ok(list)
|
||||
}
|
||||
|
||||
pub fn serialize(&self, block_version: u8) -> Vec<u8> {
|
||||
fn serialize(&self, block_version: u8) -> Vec<u8> {
|
||||
let buf = Vec::new();
|
||||
let mut data = Cursor::new(buf);
|
||||
|
||||
if self.list.len() == 0 {
|
||||
if self.len() == 0 {
|
||||
data.write_u8(0).unwrap();
|
||||
} else {
|
||||
let version = if block_version >= 28 { 2 } else { 1 };
|
||||
data.write_u8(version).unwrap();
|
||||
data.write_u16::<BigEndian>(self.list.len() as u16).unwrap();
|
||||
data.write_u16::<BigEndian>(self.len() as u16).unwrap();
|
||||
|
||||
for (&pos, meta) in &self.list {
|
||||
for (&pos, meta) in self {
|
||||
data.write_u16::<BigEndian>(pos).unwrap();
|
||||
meta.serialize(&mut data, version);
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
use std::io::prelude::*;
|
||||
use std::io::Cursor;
|
||||
|
||||
use byteorder::{ByteOrder, BigEndian, ReadBytesExt, WriteBytesExt};
|
||||
|
||||
mod map_block;
|
||||
@ -14,7 +15,7 @@ pub use map_block::{MapBlock, is_valid_generated};
|
||||
pub use compression::ZlibContainer;
|
||||
use compression::Compress;
|
||||
pub use node_data::NodeData;
|
||||
pub use metadata::NodeMetadataList;
|
||||
pub use metadata::{NodeMetadataList, NodeMetadataListExt};
|
||||
pub use static_object::{StaticObject, StaticObjectList, LuaEntityData};
|
||||
use static_object::{serialize_objects, deserialize_objects};
|
||||
pub use node_timer::{NodeTimer, NodeTimerList};
|
||||
@ -43,19 +44,32 @@ fn vec_with_len<T>(len: usize) -> Vec<T> {
|
||||
}
|
||||
|
||||
|
||||
fn read_string16(src: &mut Cursor<&[u8]>) -> Result<Vec<u8>, std::io::Error> {
|
||||
let count = src.read_u16::<BigEndian>()?;
|
||||
let mut bytes = vec_with_len(count as usize);
|
||||
/// Return `n` bytes of data from `src`. Will fail safely if there are not
|
||||
/// enough bytes in `src`.
|
||||
#[inline(always)]
|
||||
fn try_read_n(src: &mut Cursor<&[u8]>, n: usize)
|
||||
-> Result<Vec<u8>, std::io::Error>
|
||||
{
|
||||
if src.get_ref().len() - (src.position() as usize) < n {
|
||||
Err(std::io::Error::new(std::io::ErrorKind::UnexpectedEof,
|
||||
"not enough bytes to fill buffer"))
|
||||
} else {
|
||||
let mut bytes = vec_with_len(n);
|
||||
src.read_exact(&mut bytes)?;
|
||||
Ok(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn read_string16(src: &mut Cursor<&[u8]>) -> Result<Vec<u8>, std::io::Error> {
|
||||
let count = src.read_u16::<BigEndian>()?;
|
||||
try_read_n(src, count as usize)
|
||||
}
|
||||
|
||||
|
||||
fn read_string32(src: &mut Cursor<&[u8]>) -> Result<Vec<u8>, std::io::Error> {
|
||||
let count = src.read_u32::<BigEndian>()?;
|
||||
let mut bytes = vec_with_len(count as usize);
|
||||
src.read_exact(&mut bytes)?;
|
||||
Ok(bytes)
|
||||
try_read_n(src, count as usize)
|
||||
}
|
||||
|
||||
|
||||
@ -69,3 +83,34 @@ fn write_string32(dst: &mut Cursor<Vec<u8>>, data: &[u8]) {
|
||||
dst.write_u32::<BigEndian>(data.len() as u32).unwrap();
|
||||
dst.write(data).unwrap();
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_string_serialization() {
|
||||
let buf =
|
||||
b"\x00\x00\
|
||||
\x00\x0DHello, world!\
|
||||
\x00\x00\x00\x10more test data..\
|
||||
\x00\x00\x00\x00\
|
||||
\x00\x00\x00\x11corrupted length";
|
||||
let mut cursor = Cursor::new(&buf[..]);
|
||||
|
||||
fn contains<E>(res: Result<Vec<u8>, E>, val: &[u8]) -> bool {
|
||||
if let Ok(inner) = res {
|
||||
inner == val
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
assert!(contains(read_string16(&mut cursor), b""));
|
||||
assert!(contains(read_string16(&mut cursor), b"Hello, world!"));
|
||||
assert!(contains(read_string32(&mut cursor), b"more test data.."));
|
||||
assert!(contains(read_string32(&mut cursor), b""));
|
||||
assert!(read_string32(&mut cursor).is_err());
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use super::*;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
|
||||
/// Maps 16-bit node IDs to actual node names.
|
||||
@ -21,7 +20,7 @@ impl NameIdMap {
|
||||
let count = data.read_u16::<BigEndian>()? as usize;
|
||||
let mut map = BTreeMap::new();
|
||||
|
||||
for _ in 0 .. count {
|
||||
for _ in 0..count {
|
||||
let id = data.read_u16::<BigEndian>()?;
|
||||
let name = read_string16(data)?;
|
||||
map.insert(id, name);
|
||||
@ -52,15 +51,13 @@ impl NameIdMap {
|
||||
self.0.iter().next_back().map(|(&k, _)| k)
|
||||
}
|
||||
|
||||
/// Remove the name at a given ID and shift down values above it.
|
||||
/// Remove the name at a given ID and shift down any values above it.
|
||||
pub fn remove_shift(&mut self, id: u16) {
|
||||
self.0.remove(&id);
|
||||
let mut next_id = id + 1;
|
||||
|
||||
while self.0.contains_key(&next_id) {
|
||||
let name = self.0.remove(&next_id).unwrap();
|
||||
self.0.insert(next_id - 1, name);
|
||||
next_id += 1;
|
||||
for k in id + 1 ..= self.get_max_id().unwrap_or(0) {
|
||||
if let Some(name) = self.0.remove(&k) {
|
||||
self.0.insert(k - 1, name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use super::*;
|
||||
|
||||
use flate2::write::ZlibEncoder;
|
||||
use flate2::read::ZlibDecoder;
|
||||
use flate2::Compression;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
||||
const BLOCK_SIZE: usize = 16;
|
||||
const NODE_COUNT: usize = BLOCK_SIZE * BLOCK_SIZE * BLOCK_SIZE;
|
||||
@ -20,12 +20,11 @@ impl Compress for NodeData {
|
||||
fn compress(&self, dst: &mut Cursor<Vec<u8>>) {
|
||||
let mut encoder = ZlibEncoder::new(dst, Compression::default());
|
||||
|
||||
let mut node_data = Vec::with_capacity(NODE_COUNT * 2);
|
||||
unsafe { node_data.set_len(NODE_COUNT * 2) }
|
||||
let mut node_bytes = vec_with_len(NODE_COUNT * 2);
|
||||
BigEndian::write_u16_into(&self.nodes,
|
||||
&mut node_data[.. NODE_COUNT * 2]);
|
||||
&mut node_bytes[..NODE_COUNT * 2]);
|
||||
|
||||
encoder.write_all(&node_data).unwrap();
|
||||
encoder.write_all(&node_bytes).unwrap();
|
||||
encoder.write_all(&self.param1).unwrap();
|
||||
encoder.write_all(&self.param2).unwrap();
|
||||
encoder.finish().unwrap();
|
||||
|
@ -1,4 +1,5 @@
|
||||
use super::*;
|
||||
use std::cmp::min;
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -21,9 +22,10 @@ pub fn deserialize_timers(src: &mut Cursor<&[u8]>)
|
||||
}
|
||||
|
||||
let count = src.read_u16::<BigEndian>()?;
|
||||
let mut timers = Vec::with_capacity(count as usize);
|
||||
// Limit allocation to number of nodes (bad data handling).
|
||||
let mut timers = Vec::with_capacity(min(count, 4096) as usize);
|
||||
|
||||
for _ in 0 .. count {
|
||||
for _ in 0..count {
|
||||
let pos = src.read_u16::<BigEndian>()?;
|
||||
let timeout = src.read_u32::<BigEndian>()?;
|
||||
let elapsed = src.read_u32::<BigEndian>()?;
|
||||
|
@ -1,5 +1,6 @@
|
||||
use super::*;
|
||||
use crate::spatial::Vec3;
|
||||
use std::cmp::min;
|
||||
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@ -43,8 +44,9 @@ pub fn deserialize_objects(src: &mut Cursor<&[u8]>)
|
||||
}
|
||||
|
||||
let count = src.read_u16::<BigEndian>()?;
|
||||
let mut list = Vec::with_capacity(count as usize);
|
||||
for _ in 0 .. count {
|
||||
// Limit allocation to MT's default max object count (bad data handling).
|
||||
let mut list = Vec::with_capacity(min(count, 64) as usize);
|
||||
for _ in 0..count {
|
||||
list.push(StaticObject::deserialize(src)?);
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user