amulet-core 1.9.19__py3-none-any.whl → 1.9.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of amulet-core might be problematic. Click here for more details.
- amulet/__init__.py +27 -27
- amulet/__pyinstaller/__init__.py +2 -2
- amulet/__pyinstaller/hook-amulet.py +4 -4
- amulet/_version.py +21 -21
- amulet/api/__init__.py +2 -2
- amulet/api/abstract_base_entity.py +128 -128
- amulet/api/block.py +630 -630
- amulet/api/block_entity.py +71 -71
- amulet/api/cache.py +107 -107
- amulet/api/chunk/__init__.py +6 -6
- amulet/api/chunk/biomes.py +207 -207
- amulet/api/chunk/block_entity_dict.py +175 -175
- amulet/api/chunk/blocks.py +46 -46
- amulet/api/chunk/chunk.py +389 -389
- amulet/api/chunk/entity_list.py +75 -75
- amulet/api/chunk/status.py +167 -167
- amulet/api/data_types/__init__.py +4 -4
- amulet/api/data_types/generic_types.py +4 -4
- amulet/api/data_types/operation_types.py +16 -16
- amulet/api/data_types/world_types.py +49 -49
- amulet/api/data_types/wrapper_types.py +71 -71
- amulet/api/entity.py +74 -74
- amulet/api/errors.py +119 -119
- amulet/api/history/__init__.py +36 -36
- amulet/api/history/base/__init__.py +3 -3
- amulet/api/history/base/base_history.py +26 -26
- amulet/api/history/base/history_manager.py +63 -63
- amulet/api/history/base/revision_manager.py +73 -73
- amulet/api/history/changeable.py +15 -15
- amulet/api/history/data_types.py +7 -7
- amulet/api/history/history_manager/__init__.py +3 -3
- amulet/api/history/history_manager/container.py +102 -102
- amulet/api/history/history_manager/database.py +279 -279
- amulet/api/history/history_manager/meta.py +93 -93
- amulet/api/history/history_manager/object.py +116 -116
- amulet/api/history/revision_manager/__init__.py +2 -2
- amulet/api/history/revision_manager/disk.py +33 -33
- amulet/api/history/revision_manager/ram.py +12 -12
- amulet/api/item.py +75 -75
- amulet/api/level/__init__.py +4 -4
- amulet/api/level/base_level/__init__.py +1 -1
- amulet/api/level/base_level/base_level.py +1035 -1026
- amulet/api/level/base_level/chunk_manager.py +227 -227
- amulet/api/level/base_level/clone.py +389 -389
- amulet/api/level/base_level/player_manager.py +101 -101
- amulet/api/level/immutable_structure/__init__.py +1 -1
- amulet/api/level/immutable_structure/immutable_structure.py +94 -94
- amulet/api/level/immutable_structure/void_format_wrapper.py +117 -117
- amulet/api/level/structure.py +22 -22
- amulet/api/level/world.py +19 -19
- amulet/api/partial_3d_array/__init__.py +2 -2
- amulet/api/partial_3d_array/base_partial_3d_array.py +263 -263
- amulet/api/partial_3d_array/bounded_partial_3d_array.py +528 -528
- amulet/api/partial_3d_array/data_types.py +15 -15
- amulet/api/partial_3d_array/unbounded_partial_3d_array.py +229 -229
- amulet/api/partial_3d_array/util.py +152 -152
- amulet/api/player.py +65 -65
- amulet/api/registry/__init__.py +2 -2
- amulet/api/registry/base_registry.py +34 -34
- amulet/api/registry/biome_manager.py +153 -153
- amulet/api/registry/block_manager.py +156 -156
- amulet/api/selection/__init__.py +2 -2
- amulet/api/selection/abstract_selection.py +315 -315
- amulet/api/selection/box.py +805 -805
- amulet/api/selection/group.py +488 -488
- amulet/api/structure.py +37 -37
- amulet/api/wrapper/__init__.py +8 -8
- amulet/api/wrapper/chunk/interface.py +441 -441
- amulet/api/wrapper/chunk/translator.py +567 -567
- amulet/api/wrapper/format_wrapper.py +772 -772
- amulet/api/wrapper/structure_format_wrapper.py +116 -116
- amulet/api/wrapper/world_format_wrapper.py +63 -63
- amulet/level/__init__.py +1 -1
- amulet/level/formats/anvil_forge_world.py +40 -40
- amulet/level/formats/anvil_world/__init__.py +3 -3
- amulet/level/formats/anvil_world/_sector_manager.py +291 -384
- amulet/level/formats/anvil_world/data_pack/__init__.py +2 -2
- amulet/level/formats/anvil_world/data_pack/data_pack.py +224 -224
- amulet/level/formats/anvil_world/data_pack/data_pack_manager.py +77 -77
- amulet/level/formats/anvil_world/dimension.py +177 -177
- amulet/level/formats/anvil_world/format.py +769 -769
- amulet/level/formats/anvil_world/region.py +384 -384
- amulet/level/formats/construction/__init__.py +3 -3
- amulet/level/formats/construction/format_wrapper.py +515 -515
- amulet/level/formats/construction/interface.py +134 -134
- amulet/level/formats/construction/section.py +60 -60
- amulet/level/formats/construction/util.py +165 -165
- amulet/level/formats/leveldb_world/__init__.py +3 -3
- amulet/level/formats/leveldb_world/chunk.py +33 -33
- amulet/level/formats/leveldb_world/dimension.py +385 -419
- amulet/level/formats/leveldb_world/format.py +659 -641
- amulet/level/formats/leveldb_world/interface/chunk/__init__.py +36 -36
- amulet/level/formats/leveldb_world/interface/chunk/base_leveldb_interface.py +836 -836
- amulet/level/formats/leveldb_world/interface/chunk/generate_interface.py +31 -31
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_0.py +30 -30
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_1.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_10.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_11.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_12.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_13.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_14.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_15.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_16.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_17.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_18.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_19.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_2.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_20.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_21.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_22.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_23.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_24.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_25.py +24 -24
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_26.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_27.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_28.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_29.py +33 -33
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_3.py +57 -57
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_30.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_31.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_32.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_33.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_34.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_35.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_36.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_37.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_38.py +10 -10
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_39.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_4.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_40.py +16 -16
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_5.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_6.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_7.py +12 -12
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_8.py +180 -180
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_9.py +18 -18
- amulet/level/formats/leveldb_world/interface/chunk/leveldb_chunk_versions.py +79 -79
- amulet/level/formats/mcstructure/__init__.py +3 -3
- amulet/level/formats/mcstructure/chunk.py +50 -50
- amulet/level/formats/mcstructure/format_wrapper.py +408 -408
- amulet/level/formats/mcstructure/interface.py +175 -175
- amulet/level/formats/schematic/__init__.py +3 -3
- amulet/level/formats/schematic/chunk.py +55 -55
- amulet/level/formats/schematic/data_types.py +4 -4
- amulet/level/formats/schematic/format_wrapper.py +373 -373
- amulet/level/formats/schematic/interface.py +142 -142
- amulet/level/formats/sponge_schem/__init__.py +4 -4
- amulet/level/formats/sponge_schem/chunk.py +62 -62
- amulet/level/formats/sponge_schem/format_wrapper.py +463 -463
- amulet/level/formats/sponge_schem/interface.py +118 -118
- amulet/level/formats/sponge_schem/varint/__init__.py +1 -1
- amulet/level/formats/sponge_schem/varint/varint.py +87 -87
- amulet/level/interfaces/chunk/anvil/anvil_0.py +72 -72
- amulet/level/interfaces/chunk/anvil/anvil_1444.py +336 -336
- amulet/level/interfaces/chunk/anvil/anvil_1466.py +94 -94
- amulet/level/interfaces/chunk/anvil/anvil_1467.py +37 -37
- amulet/level/interfaces/chunk/anvil/anvil_1484.py +20 -20
- amulet/level/interfaces/chunk/anvil/anvil_1503.py +20 -20
- amulet/level/interfaces/chunk/anvil/anvil_1519.py +34 -34
- amulet/level/interfaces/chunk/anvil/anvil_1901.py +20 -20
- amulet/level/interfaces/chunk/anvil/anvil_1908.py +20 -20
- amulet/level/interfaces/chunk/anvil/anvil_1912.py +21 -21
- amulet/level/interfaces/chunk/anvil/anvil_1934.py +20 -20
- amulet/level/interfaces/chunk/anvil/anvil_2203.py +69 -69
- amulet/level/interfaces/chunk/anvil/anvil_2529.py +19 -19
- amulet/level/interfaces/chunk/anvil/anvil_2681.py +76 -76
- amulet/level/interfaces/chunk/anvil/anvil_2709.py +19 -19
- amulet/level/interfaces/chunk/anvil/anvil_2844.py +267 -267
- amulet/level/interfaces/chunk/anvil/anvil_3463.py +19 -19
- amulet/level/interfaces/chunk/anvil/anvil_na.py +607 -607
- amulet/level/interfaces/chunk/anvil/base_anvil_interface.py +326 -326
- amulet/level/load.py +59 -59
- amulet/level/loader.py +95 -95
- amulet/level/translators/chunk/bedrock/__init__.py +267 -267
- amulet/level/translators/chunk/bedrock/bedrock_nbt_blockstate_translator.py +46 -46
- amulet/level/translators/chunk/bedrock/bedrock_numerical_translator.py +39 -39
- amulet/level/translators/chunk/bedrock/bedrock_psudo_numerical_translator.py +37 -37
- amulet/level/translators/chunk/java/java_1_18_translator.py +40 -40
- amulet/level/translators/chunk/java/java_blockstate_translator.py +94 -94
- amulet/level/translators/chunk/java/java_numerical_translator.py +62 -62
- amulet/libs/leveldb/__init__.py +7 -7
- amulet/operations/__init__.py +5 -5
- amulet/operations/clone.py +18 -18
- amulet/operations/delete_chunk.py +32 -32
- amulet/operations/fill.py +30 -30
- amulet/operations/paste.py +65 -65
- amulet/operations/replace.py +58 -58
- amulet/utils/__init__.py +14 -14
- amulet/utils/format_utils.py +41 -41
- amulet/utils/generator.py +15 -15
- amulet/utils/matrix.py +243 -243
- amulet/utils/numpy_helpers.py +46 -46
- amulet/utils/world_utils.py +349 -349
- {amulet_core-1.9.19.dist-info → amulet_core-1.9.20.dist-info}/METADATA +97 -97
- amulet_core-1.9.20.dist-info/RECORD +208 -0
- amulet_core-1.9.19.dist-info/RECORD +0 -208
- {amulet_core-1.9.19.dist-info → amulet_core-1.9.20.dist-info}/WHEEL +0 -0
- {amulet_core-1.9.19.dist-info → amulet_core-1.9.20.dist-info}/entry_points.txt +0 -0
- {amulet_core-1.9.19.dist-info → amulet_core-1.9.20.dist-info}/top_level.txt +0 -0
|
@@ -1,836 +1,836 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from typing import Tuple, Dict, List, Union, Iterable, Optional, TYPE_CHECKING, Any
|
|
4
|
-
import struct
|
|
5
|
-
import logging
|
|
6
|
-
|
|
7
|
-
import numpy
|
|
8
|
-
from amulet_nbt import (
|
|
9
|
-
ShortTag,
|
|
10
|
-
IntTag,
|
|
11
|
-
StringTag,
|
|
12
|
-
CompoundTag,
|
|
13
|
-
NamedTag,
|
|
14
|
-
load as load_nbt,
|
|
15
|
-
load_many,
|
|
16
|
-
ReadContext,
|
|
17
|
-
utf8_escape_decoder,
|
|
18
|
-
utf8_escape_encoder,
|
|
19
|
-
)
|
|
20
|
-
|
|
21
|
-
import amulet
|
|
22
|
-
from amulet.api.block import Block
|
|
23
|
-
from amulet.api.chunk import Chunk, StatusFormats
|
|
24
|
-
|
|
25
|
-
from amulet.utils.numpy_helpers import brute_sort_objects
|
|
26
|
-
from amulet.utils.world_utils import fast_unique, from_nibble_array
|
|
27
|
-
from amulet.api.wrapper import Interface
|
|
28
|
-
from amulet.api.data_types import (
|
|
29
|
-
AnyNDArray,
|
|
30
|
-
SubChunkNDArray,
|
|
31
|
-
PlatformType,
|
|
32
|
-
VersionNumberTuple,
|
|
33
|
-
VersionIdentifierTuple,
|
|
34
|
-
)
|
|
35
|
-
from amulet.level import loader
|
|
36
|
-
from amulet.api.wrapper import EntityIDType, EntityCoordType
|
|
37
|
-
from .leveldb_chunk_versions import chunk_to_game_version
|
|
38
|
-
from amulet.api.chunk.entity_list import EntityList
|
|
39
|
-
from amulet.level.formats.leveldb_world.chunk import ChunkData
|
|
40
|
-
|
|
41
|
-
if TYPE_CHECKING:
|
|
42
|
-
from amulet.api.block_entity import BlockEntity
|
|
43
|
-
from amulet.api.entity import Entity
|
|
44
|
-
from amulet.api.chunk.blocks import Blocks
|
|
45
|
-
from amulet.api.wrapper import Translator
|
|
46
|
-
|
|
47
|
-
log = logging.getLogger(__name__)
|
|
48
|
-
|
|
49
|
-
# This is here to scale a 4x array to a 16x array. This can be removed when we natively support 16x array
|
|
50
|
-
_scale_grid = tuple(numpy.meshgrid(*[numpy.arange(16) // 4] * 3, indexing="ij"))
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
class BaseLevelDBInterface(Interface):
|
|
54
|
-
chunk_version: int = None
|
|
55
|
-
|
|
56
|
-
def __init__(self):
|
|
57
|
-
self._feature_options = {
|
|
58
|
-
"finalised_state": ["int0-2"],
|
|
59
|
-
"data_2d": ["height512|biome256", "height512|biome4096"],
|
|
60
|
-
"block_entities": ["31list"],
|
|
61
|
-
"block_entity_format": [EntityIDType.namespace_str_id, EntityIDType.str_id],
|
|
62
|
-
"block_entity_coord_format": [EntityCoordType.xyz_int],
|
|
63
|
-
"entities": ["32list", "actor"],
|
|
64
|
-
"entity_format": [
|
|
65
|
-
EntityIDType.namespace_str_identifier,
|
|
66
|
-
EntityIDType.int_id,
|
|
67
|
-
],
|
|
68
|
-
"entity_coord_format": [EntityCoordType.Pos_list_float],
|
|
69
|
-
"terrain": ["30array", "2farray", "2f1palette", "2fnpalette"],
|
|
70
|
-
}
|
|
71
|
-
self._features = {key: None for key in self._feature_options.keys()}
|
|
72
|
-
|
|
73
|
-
def _set_feature(self, feature: str, option: Any):
|
|
74
|
-
assert feature in self._feature_options, f"{feature} is not a valid feature."
|
|
75
|
-
assert (
|
|
76
|
-
option in self._feature_options[feature]
|
|
77
|
-
), f'Invalid option {option} for feature "{feature}"'
|
|
78
|
-
self._features[feature] = option
|
|
79
|
-
|
|
80
|
-
def is_valid(self, key: Tuple) -> bool:
|
|
81
|
-
return key[0] == "bedrock" and key[1] == self.chunk_version
|
|
82
|
-
|
|
83
|
-
def get_translator(
|
|
84
|
-
self,
|
|
85
|
-
max_world_version: Tuple[PlatformType, VersionNumberTuple],
|
|
86
|
-
data: ChunkData = None,
|
|
87
|
-
) -> Tuple["Translator", VersionNumberTuple]:
|
|
88
|
-
"""
|
|
89
|
-
Get the Translator class for the requested version.
|
|
90
|
-
:param max_world_version: The game version the world was last opened in. Version number tuple or data version number.
|
|
91
|
-
:param data: Optional data to get translator based on chunk version rather than world version
|
|
92
|
-
:return: Tuple[Translator, version number for PyMCTranslate to use]
|
|
93
|
-
"""
|
|
94
|
-
if data:
|
|
95
|
-
if b"," in data:
|
|
96
|
-
chunk_version = data[b","][0]
|
|
97
|
-
else:
|
|
98
|
-
chunk_version = data[b"v"][0]
|
|
99
|
-
game_version = chunk_to_game_version(max_world_version[1], chunk_version)
|
|
100
|
-
else:
|
|
101
|
-
game_version = max_world_version[1]
|
|
102
|
-
return loader.Translators.get(("bedrock", game_version)), game_version
|
|
103
|
-
|
|
104
|
-
@staticmethod
|
|
105
|
-
def _chunk_key_to_sub_chunk(cy: int, min_y: int) -> int:
|
|
106
|
-
"""Convert the database sub-chunk key to the sub-chunk index."""
|
|
107
|
-
return cy
|
|
108
|
-
|
|
109
|
-
@staticmethod
|
|
110
|
-
def _get_sub_chunk_storage_byte(cy: int, min_y: int) -> bytes:
|
|
111
|
-
return struct.pack("b", cy)
|
|
112
|
-
|
|
113
|
-
def decode(
|
|
114
|
-
self, cx: int, cz: int, chunk_data: ChunkData, bounds: Tuple[int, int]
|
|
115
|
-
) -> Tuple[Chunk, AnyNDArray]:
|
|
116
|
-
"""
|
|
117
|
-
Create an amulet.api.chunk.Chunk object from raw data given by the format
|
|
118
|
-
:param cx: chunk x coordinate
|
|
119
|
-
:param cz: chunk z coordinate
|
|
120
|
-
:param chunk_data: Raw chunk data provided by the format.
|
|
121
|
-
:param bounds: The minimum and maximum height of the chunk.
|
|
122
|
-
:return: Chunk object in version-specific format, along with the block_palette for that chunk.
|
|
123
|
-
"""
|
|
124
|
-
chunk = Chunk(cx, cz)
|
|
125
|
-
chunk_palette = numpy.empty(0, dtype=object)
|
|
126
|
-
chunk.misc = {"bedrock_chunk_data": chunk_data}
|
|
127
|
-
|
|
128
|
-
chunk_data.pop(b"v", None)
|
|
129
|
-
chunk_data.pop(b",", None)
|
|
130
|
-
|
|
131
|
-
if self._features["terrain"].startswith(
|
|
132
|
-
"2f"
|
|
133
|
-
): # ["2farray", "2f1palette", "2fnpalette"]
|
|
134
|
-
subchunks = {}
|
|
135
|
-
for key in chunk_data.copy().keys():
|
|
136
|
-
if len(key) == 2 and key[0:1] == b"\x2F":
|
|
137
|
-
cy = struct.unpack("b", key[1:2])[0]
|
|
138
|
-
subchunks[
|
|
139
|
-
self._chunk_key_to_sub_chunk(cy, bounds[0] >> 4)
|
|
140
|
-
] = chunk_data.pop(key)
|
|
141
|
-
chunk.blocks, chunk_palette = self._load_subchunks(subchunks)
|
|
142
|
-
elif self._features["terrain"] == "30array":
|
|
143
|
-
section_data = chunk_data.pop(b"\x30", None)
|
|
144
|
-
if section_data is not None:
|
|
145
|
-
block_ids = numpy.frombuffer(
|
|
146
|
-
section_data[: 2**15], dtype=numpy.uint8
|
|
147
|
-
).astype(numpy.uint16)
|
|
148
|
-
block_data = from_nibble_array(
|
|
149
|
-
numpy.frombuffer(
|
|
150
|
-
section_data[2**15 : 2**15 + 2**14], dtype=numpy.uint8
|
|
151
|
-
)
|
|
152
|
-
)
|
|
153
|
-
|
|
154
|
-
# there is other data here but we are going to skip over it
|
|
155
|
-
combined_palette, block_array = fast_unique(
|
|
156
|
-
numpy.transpose(
|
|
157
|
-
((block_ids << 4) + block_data).reshape(16, 16, 128), (0, 2, 1)
|
|
158
|
-
)
|
|
159
|
-
)
|
|
160
|
-
chunk.blocks = {
|
|
161
|
-
i: block_array[:, i * 16 : (i + 1) * 16, :] for i in range(8)
|
|
162
|
-
}
|
|
163
|
-
palette: AnyNDArray = numpy.array(
|
|
164
|
-
[combined_palette >> 4, combined_palette & 15]
|
|
165
|
-
).T
|
|
166
|
-
chunk_palette = numpy.empty(len(palette), dtype=object)
|
|
167
|
-
for i, b in enumerate(palette):
|
|
168
|
-
chunk_palette[i] = ((None, tuple(b)),)
|
|
169
|
-
|
|
170
|
-
else:
|
|
171
|
-
raise Exception
|
|
172
|
-
|
|
173
|
-
if self._features["finalised_state"] == "int0-2":
|
|
174
|
-
state = chunk_data.pop(b"\x36", None)
|
|
175
|
-
val = 2
|
|
176
|
-
if isinstance(state, bytes):
|
|
177
|
-
if len(state) == 1:
|
|
178
|
-
# old versions of the game store this as a byte
|
|
179
|
-
val = struct.unpack("b", state)[0]
|
|
180
|
-
elif len(state) == 4:
|
|
181
|
-
# newer versions store it as an int
|
|
182
|
-
val = struct.unpack("<i", state)[0]
|
|
183
|
-
chunk.status = val
|
|
184
|
-
|
|
185
|
-
if b"+" in chunk_data:
|
|
186
|
-
height, biome = self._decode_height_3d_biomes(
|
|
187
|
-
chunk_data[b"+"], bounds[0] >> 4
|
|
188
|
-
)
|
|
189
|
-
chunk.misc["height"] = height
|
|
190
|
-
chunk.biomes = biome
|
|
191
|
-
elif b"\x2D" in chunk_data:
|
|
192
|
-
d2d = chunk_data[b"\x2D"]
|
|
193
|
-
height, biome = (
|
|
194
|
-
numpy.frombuffer(d2d[:512], "<i2").reshape((16, 16)),
|
|
195
|
-
d2d[512:],
|
|
196
|
-
)
|
|
197
|
-
chunk.misc["height"] = height
|
|
198
|
-
chunk.biomes = numpy.frombuffer(biome, dtype="uint8").reshape(16, 16).T
|
|
199
|
-
|
|
200
|
-
# TODO: implement key support
|
|
201
|
-
# \x2D heightmap and biomes
|
|
202
|
-
# \x31 block entity
|
|
203
|
-
# \x32 entity
|
|
204
|
-
# \x33 ticks
|
|
205
|
-
# \x34 block extra data
|
|
206
|
-
# \x35 biome state
|
|
207
|
-
# \x39 7 ints and an end (03)? Honestly don't know what this is
|
|
208
|
-
# \x3A fire tick?
|
|
209
|
-
|
|
210
|
-
# \x2E 2d legacy
|
|
211
|
-
# \x30 legacy terrain
|
|
212
|
-
|
|
213
|
-
# unpack block entities and entities
|
|
214
|
-
if self._features["block_entities"] == "31list":
|
|
215
|
-
block_entities = self._unpack_nbt_list(chunk_data.pop(b"\x31", b""))
|
|
216
|
-
chunk.block_entities = self._decode_block_entity_list(block_entities)
|
|
217
|
-
|
|
218
|
-
if self._features["entities"] in ("32list", "actor"):
|
|
219
|
-
entities = self._unpack_nbt_list(chunk_data.pop(b"\x32", b""))
|
|
220
|
-
if amulet.entity_support:
|
|
221
|
-
chunk.entities = self._decode_entity_list(entities)
|
|
222
|
-
else:
|
|
223
|
-
chunk._native_entities = EntityList(self._decode_entity_list(entities))
|
|
224
|
-
chunk._native_version = (
|
|
225
|
-
"bedrock",
|
|
226
|
-
(0, 0, 0),
|
|
227
|
-
) # TODO: find a way to determine entity version
|
|
228
|
-
|
|
229
|
-
if self._features["entities"] == "actor":
|
|
230
|
-
if chunk_data.entity_actor:
|
|
231
|
-
if amulet.entity_support:
|
|
232
|
-
chunk.entities += self._decode_entity_list(chunk_data.entity_actor)
|
|
233
|
-
else:
|
|
234
|
-
chunk._native_entities += self._decode_entity_list(
|
|
235
|
-
chunk_data.entity_actor
|
|
236
|
-
)
|
|
237
|
-
chunk._native_version = ("bedrock", (0, 0, 0))
|
|
238
|
-
chunk_data.entity_actor.clear()
|
|
239
|
-
|
|
240
|
-
return chunk, chunk_palette
|
|
241
|
-
|
|
242
|
-
def encode(
|
|
243
|
-
self,
|
|
244
|
-
chunk: Chunk,
|
|
245
|
-
palette: AnyNDArray,
|
|
246
|
-
max_world_version: VersionIdentifierTuple,
|
|
247
|
-
bounds: Tuple[int, int],
|
|
248
|
-
) -> Dict[bytes, Optional[bytes]]:
|
|
249
|
-
chunk_data = chunk.misc.get("bedrock_chunk_data", {})
|
|
250
|
-
if isinstance(chunk_data, ChunkData):
|
|
251
|
-
pass
|
|
252
|
-
elif isinstance(chunk_data, dict):
|
|
253
|
-
chunk_data = ChunkData(
|
|
254
|
-
{
|
|
255
|
-
k: v
|
|
256
|
-
for k, v in chunk_data.items()
|
|
257
|
-
if isinstance(k, bytes) and isinstance(v, bytes)
|
|
258
|
-
}
|
|
259
|
-
)
|
|
260
|
-
else:
|
|
261
|
-
chunk_data = ChunkData()
|
|
262
|
-
|
|
263
|
-
chunk_data: ChunkData
|
|
264
|
-
|
|
265
|
-
# chunk version
|
|
266
|
-
if self.chunk_version is not None:
|
|
267
|
-
chunk_data[b"v" if self.chunk_version <= 20 else b","] = bytes(
|
|
268
|
-
[self.chunk_version]
|
|
269
|
-
)
|
|
270
|
-
|
|
271
|
-
# terrain data
|
|
272
|
-
terrain = self._encode_subchunks(
|
|
273
|
-
chunk.blocks, palette, bounds, max_world_version
|
|
274
|
-
)
|
|
275
|
-
min_y = bounds[0] // 16
|
|
276
|
-
for cy, sub_chunk in terrain.items():
|
|
277
|
-
chunk_data[
|
|
278
|
-
b"\x2F" + self._get_sub_chunk_storage_byte(cy, min_y)
|
|
279
|
-
] = sub_chunk
|
|
280
|
-
|
|
281
|
-
# chunk status
|
|
282
|
-
if self._features["finalised_state"] == "int0-2":
|
|
283
|
-
chunk_data[b"\x36"] = struct.pack(
|
|
284
|
-
"<i", chunk.status.as_type(StatusFormats.Bedrock)
|
|
285
|
-
)
|
|
286
|
-
|
|
287
|
-
# biome and height data
|
|
288
|
-
if self._features["data_2d"] == "height512|biome256":
|
|
289
|
-
d2d: List[bytes] = [self._encode_height(chunk)]
|
|
290
|
-
chunk.biomes.convert_to_2d()
|
|
291
|
-
d2d.append(chunk.biomes.astype("uint8").T.tobytes())
|
|
292
|
-
chunk_data[b"\x2D"] = b"".join(d2d)
|
|
293
|
-
if b"+" in chunk_data:
|
|
294
|
-
chunk_data[b"+"] = None
|
|
295
|
-
elif self._features["data_2d"] == "height512|biome4096":
|
|
296
|
-
chunk_data[b"+"] = self._encode_height_3d_biomes(
|
|
297
|
-
chunk, bounds[0] >> 4, bounds[1] >> 4
|
|
298
|
-
)
|
|
299
|
-
if b"\x2D" in chunk_data:
|
|
300
|
-
chunk_data[b"\x2D"] = None
|
|
301
|
-
|
|
302
|
-
# pack block entities and entities
|
|
303
|
-
if self._features["block_entities"] == "31list":
|
|
304
|
-
block_entities_out = self._encode_block_entity_list(chunk.block_entities)
|
|
305
|
-
|
|
306
|
-
if block_entities_out:
|
|
307
|
-
chunk_data[b"\x31"] = self._pack_nbt_list(block_entities_out)
|
|
308
|
-
else:
|
|
309
|
-
chunk_data[b"\x31"] = None
|
|
310
|
-
|
|
311
|
-
if self._features["entities"] == "32list":
|
|
312
|
-
|
|
313
|
-
def save_entities(entities_out):
|
|
314
|
-
if entities_out:
|
|
315
|
-
chunk_data[b"\x32"] = self._pack_nbt_list(entities_out)
|
|
316
|
-
else:
|
|
317
|
-
chunk_data[b"\x32"] = None
|
|
318
|
-
|
|
319
|
-
if amulet.entity_support:
|
|
320
|
-
save_entities(self._encode_entity_list(chunk.entities))
|
|
321
|
-
else:
|
|
322
|
-
try:
|
|
323
|
-
if chunk._native_version[0] == "bedrock":
|
|
324
|
-
save_entities(self._encode_entity_list(chunk._native_entities))
|
|
325
|
-
except:
|
|
326
|
-
pass
|
|
327
|
-
|
|
328
|
-
elif self._features["entities"] == "actor":
|
|
329
|
-
chunk_data.entity_actor.clear()
|
|
330
|
-
if amulet.entity_support:
|
|
331
|
-
chunk_data.entity_actor.extend(self._encode_entity_list(chunk.entities))
|
|
332
|
-
else:
|
|
333
|
-
try:
|
|
334
|
-
if chunk._native_version[0] == "bedrock":
|
|
335
|
-
chunk_data.entity_actor.extend(
|
|
336
|
-
self._encode_entity_list(chunk._native_entities)
|
|
337
|
-
)
|
|
338
|
-
except:
|
|
339
|
-
pass
|
|
340
|
-
|
|
341
|
-
return chunk_data
|
|
342
|
-
|
|
343
|
-
def _load_subchunks(
|
|
344
|
-
self, subchunks: Dict[int, Optional[bytes]]
|
|
345
|
-
) -> Tuple[Dict[int, SubChunkNDArray], AnyNDArray]:
|
|
346
|
-
"""
|
|
347
|
-
Load a list of bytes objects which contain chunk data
|
|
348
|
-
This function should be able to load all sub-chunk formats (technically before it)
|
|
349
|
-
All sub-chunks will almost certainly all have the same sub-chunk version but
|
|
350
|
-
it should be able to handle a case where that is not true.
|
|
351
|
-
|
|
352
|
-
As such this function will return a Chunk and a rather complicated block_palette
|
|
353
|
-
The newer formats allow multiple blocks to occupy the same space and the
|
|
354
|
-
newer versions also include a version ber block. So this will also need
|
|
355
|
-
returning for the translator to handle.
|
|
356
|
-
|
|
357
|
-
The block_palette will be a numpy array containing tuple objects
|
|
358
|
-
The tuple represents the "block" however can contain more than one Block object.
|
|
359
|
-
Inside the tuple are one or more tuples.
|
|
360
|
-
These include the block version number and the block itself
|
|
361
|
-
The block version number will be either None if no block version is given
|
|
362
|
-
or a tuple containing 4 ints.
|
|
363
|
-
|
|
364
|
-
The block will be either a Block class for the newer formats or a tuple of two ints for the older formats
|
|
365
|
-
"""
|
|
366
|
-
blocks: Dict[int, SubChunkNDArray] = {}
|
|
367
|
-
palette: List[
|
|
368
|
-
Tuple[Tuple[Optional[int], Union[Tuple[int, int], Block]], ...]
|
|
369
|
-
] = [
|
|
370
|
-
(
|
|
371
|
-
(
|
|
372
|
-
17563649,
|
|
373
|
-
Block(
|
|
374
|
-
namespace="minecraft",
|
|
375
|
-
base_name="air",
|
|
376
|
-
properties={"block_data": IntTag(0)},
|
|
377
|
-
),
|
|
378
|
-
),
|
|
379
|
-
)
|
|
380
|
-
]
|
|
381
|
-
for cy, data in subchunks.items():
|
|
382
|
-
if data is None:
|
|
383
|
-
continue
|
|
384
|
-
if data[0] in {0, 2, 3, 4, 5, 6, 7}:
|
|
385
|
-
block_ids = numpy.frombuffer(
|
|
386
|
-
data[1 : 1 + 2**12], dtype=numpy.uint8
|
|
387
|
-
).astype(numpy.uint16)
|
|
388
|
-
block_data = from_nibble_array(
|
|
389
|
-
numpy.frombuffer(
|
|
390
|
-
data[1 + 2**12 : 1 + 2**12 + 2**11], dtype=numpy.uint8
|
|
391
|
-
)
|
|
392
|
-
)
|
|
393
|
-
combined_palette, block_array = fast_unique(
|
|
394
|
-
numpy.transpose(
|
|
395
|
-
((block_ids << 4) + block_data).reshape(16, 16, 16), (0, 2, 1)
|
|
396
|
-
)
|
|
397
|
-
)
|
|
398
|
-
blocks[cy] = block_array + len(palette)
|
|
399
|
-
for b in numpy.array([combined_palette >> 4, combined_palette & 15]).T:
|
|
400
|
-
palette.append(((None, tuple(b)),))
|
|
401
|
-
|
|
402
|
-
else:
|
|
403
|
-
if data[0] == 1:
|
|
404
|
-
storage_count = 1
|
|
405
|
-
data = data[1:]
|
|
406
|
-
elif data[0] == 8:
|
|
407
|
-
storage_count, data = data[1], data[2:]
|
|
408
|
-
elif data[0] == 9:
|
|
409
|
-
# There is an extra byte in this format storing the cy value
|
|
410
|
-
storage_count, cy, data = (
|
|
411
|
-
data[1],
|
|
412
|
-
struct.unpack("b", data[2:3])[0],
|
|
413
|
-
data[3:],
|
|
414
|
-
)
|
|
415
|
-
else:
|
|
416
|
-
raise Exception(f"sub-chunk version {data[0]} is not known.")
|
|
417
|
-
|
|
418
|
-
sub_chunk_blocks = numpy.zeros(
|
|
419
|
-
(16, 16, 16, storage_count), dtype=numpy.uint32
|
|
420
|
-
)
|
|
421
|
-
sub_chunk_palette: List[List[Tuple[Optional[int], Block]]] = []
|
|
422
|
-
for storage_index in range(storage_count):
|
|
423
|
-
(
|
|
424
|
-
sub_chunk_blocks[:, :, :, storage_index],
|
|
425
|
-
palette_data,
|
|
426
|
-
data,
|
|
427
|
-
) = self._load_palette_blocks(data)
|
|
428
|
-
palette_data_out: List[Tuple[Optional[int], Block]] = []
|
|
429
|
-
for block in palette_data:
|
|
430
|
-
block = block.compound
|
|
431
|
-
*namespace_, base_name = block["name"].py_str.split(":", 1)
|
|
432
|
-
namespace = namespace_[0] if namespace_ else "minecraft"
|
|
433
|
-
if "version" in block:
|
|
434
|
-
version: Optional[int] = block.get_int("version").py_int
|
|
435
|
-
else:
|
|
436
|
-
version = None
|
|
437
|
-
|
|
438
|
-
if "states" in block or "val" not in block: # 1.13 format
|
|
439
|
-
properties = block.get_compound(
|
|
440
|
-
"states", CompoundTag()
|
|
441
|
-
).py_dict
|
|
442
|
-
if version is None:
|
|
443
|
-
version = 17694720 # 1, 14, 0, 0
|
|
444
|
-
else:
|
|
445
|
-
properties = {"block_data": IntTag(block["val"].py_int)}
|
|
446
|
-
palette_data_out.append(
|
|
447
|
-
(
|
|
448
|
-
version,
|
|
449
|
-
Block(
|
|
450
|
-
namespace=namespace,
|
|
451
|
-
base_name=base_name,
|
|
452
|
-
properties=properties,
|
|
453
|
-
),
|
|
454
|
-
)
|
|
455
|
-
)
|
|
456
|
-
sub_chunk_palette.append(palette_data_out)
|
|
457
|
-
|
|
458
|
-
if storage_count == 1:
|
|
459
|
-
blocks[cy] = sub_chunk_blocks[:, :, :, 0] + len(palette)
|
|
460
|
-
palette += [(val,) for val in sub_chunk_palette[0]]
|
|
461
|
-
elif storage_count > 1:
|
|
462
|
-
# we have two or more storages so need to find the unique block combinations and merge them together
|
|
463
|
-
sub_chunk_palette_, sub_chunk_blocks = numpy.unique(
|
|
464
|
-
sub_chunk_blocks.reshape(-1, storage_count),
|
|
465
|
-
return_inverse=True,
|
|
466
|
-
axis=0,
|
|
467
|
-
)
|
|
468
|
-
blocks[cy] = sub_chunk_blocks.reshape(16, 16, 16).astype(
|
|
469
|
-
numpy.uint32
|
|
470
|
-
) + len(palette)
|
|
471
|
-
palette += [
|
|
472
|
-
tuple(
|
|
473
|
-
sub_chunk_palette[storage_index][index]
|
|
474
|
-
for storage_index, index in enumerate(palette_indexes)
|
|
475
|
-
if not (
|
|
476
|
-
storage_index > 0
|
|
477
|
-
and sub_chunk_palette[storage_index][index][
|
|
478
|
-
1
|
|
479
|
-
].namespaced_name
|
|
480
|
-
== "minecraft:air"
|
|
481
|
-
)
|
|
482
|
-
)
|
|
483
|
-
for palette_indexes in sub_chunk_palette_
|
|
484
|
-
]
|
|
485
|
-
else:
|
|
486
|
-
continue
|
|
487
|
-
|
|
488
|
-
# block_palette should now look like this
|
|
489
|
-
# List[
|
|
490
|
-
# Tuple[
|
|
491
|
-
# Tuple[version, Block], ...
|
|
492
|
-
# ]
|
|
493
|
-
# ]
|
|
494
|
-
|
|
495
|
-
numpy_palette, lut = brute_sort_objects(palette)
|
|
496
|
-
for cy in blocks.keys():
|
|
497
|
-
blocks[cy] = lut[blocks[cy]]
|
|
498
|
-
|
|
499
|
-
return blocks, numpy_palette
|
|
500
|
-
|
|
501
|
-
def _encode_subchunks(
|
|
502
|
-
self,
|
|
503
|
-
blocks: "Blocks",
|
|
504
|
-
palette: AnyNDArray,
|
|
505
|
-
bounds: Tuple[int, int],
|
|
506
|
-
max_world_version: VersionIdentifierTuple,
|
|
507
|
-
) -> Dict[int, Optional[bytes]]:
|
|
508
|
-
raise NotImplementedError
|
|
509
|
-
|
|
510
|
-
def _save_subchunks_1(
|
|
511
|
-
self, blocks: "Blocks", palette: AnyNDArray
|
|
512
|
-
) -> Dict[int, Optional[bytes]]:
|
|
513
|
-
for index, block in enumerate(palette):
|
|
514
|
-
block: Tuple[Tuple[None, Block], ...]
|
|
515
|
-
block_data_tag = block[0][1].properties.get("block_data", IntTag(0))
|
|
516
|
-
if isinstance(block_data_tag, IntTag):
|
|
517
|
-
block_data = block_data_tag.py_int
|
|
518
|
-
# if block_data >= 16:
|
|
519
|
-
# block_data = 0
|
|
520
|
-
else:
|
|
521
|
-
block_data = 0
|
|
522
|
-
|
|
523
|
-
palette[index] = NamedTag(
|
|
524
|
-
CompoundTag(
|
|
525
|
-
{
|
|
526
|
-
"name": StringTag(block[0][1].namespaced_name),
|
|
527
|
-
"val": ShortTag(block_data),
|
|
528
|
-
}
|
|
529
|
-
)
|
|
530
|
-
)
|
|
531
|
-
chunk = {}
|
|
532
|
-
for cy in range(16):
|
|
533
|
-
if cy in blocks:
|
|
534
|
-
palette_index, sub_chunk = fast_unique(blocks.get_sub_chunk(cy))
|
|
535
|
-
sub_chunk_palette = list(palette[palette_index])
|
|
536
|
-
chunk[cy] = b"\x01" + self._save_palette_subchunk(
|
|
537
|
-
sub_chunk.ravel(), sub_chunk_palette
|
|
538
|
-
)
|
|
539
|
-
else:
|
|
540
|
-
chunk[cy] = None
|
|
541
|
-
return chunk
|
|
542
|
-
|
|
543
|
-
# These arent actual blocks, just ids pointing to the block_palette.
|
|
544
|
-
|
|
545
|
-
@staticmethod
|
|
546
|
-
def _decode_packed_array(data: bytes) -> Tuple[bytes, int, Optional[numpy.ndarray]]:
|
|
547
|
-
"""
|
|
548
|
-
Parse a packed array as documented here
|
|
549
|
-
https://gist.github.com/Tomcc/a96af509e275b1af483b25c543cfbf37
|
|
550
|
-
|
|
551
|
-
:param data: The data to parse
|
|
552
|
-
:return:
|
|
553
|
-
"""
|
|
554
|
-
# Ignore LSB of data (its a flag) and get compacting level
|
|
555
|
-
bits_per_value, data = struct.unpack("b", data[0:1])[0] >> 1, data[1:]
|
|
556
|
-
if bits_per_value > 0:
|
|
557
|
-
values_per_word = (
|
|
558
|
-
32 // bits_per_value
|
|
559
|
-
) # Word = 4 bytes, basis of compacting.
|
|
560
|
-
word_count = -(
|
|
561
|
-
-4096 // values_per_word
|
|
562
|
-
) # Ceiling divide is inverted floor divide
|
|
563
|
-
|
|
564
|
-
arr = numpy.packbits(
|
|
565
|
-
numpy.pad(
|
|
566
|
-
numpy.unpackbits(
|
|
567
|
-
numpy.frombuffer(
|
|
568
|
-
bytes(reversed(data[: 4 * word_count])), dtype="uint8"
|
|
569
|
-
)
|
|
570
|
-
)
|
|
571
|
-
.reshape(-1, 32)[:, -values_per_word * bits_per_value :]
|
|
572
|
-
.reshape(-1, bits_per_value)[-4096:, :],
|
|
573
|
-
[(0, 0), (16 - bits_per_value, 0)],
|
|
574
|
-
"constant",
|
|
575
|
-
)
|
|
576
|
-
).view(dtype=">i2")[::-1]
|
|
577
|
-
arr = arr.reshape((16, 16, 16)).swapaxes(1, 2)
|
|
578
|
-
data = data[4 * word_count :]
|
|
579
|
-
else:
|
|
580
|
-
arr = None
|
|
581
|
-
return data, bits_per_value, arr
|
|
582
|
-
|
|
583
|
-
def _decode_height_3d_biomes(
|
|
584
|
-
self, data: bytes, floor_cy: int
|
|
585
|
-
) -> Tuple[numpy.ndarray, Dict[int, numpy.ndarray]]:
|
|
586
|
-
# The 3D biome format consists of 25 16x arrays with the first array corresponding to the lowest sub-chunk in the world
|
|
587
|
-
# This is -64 in the overworld and 0 in the nether and end
|
|
588
|
-
# TODO: make this support the full 16x
|
|
589
|
-
heightmap, data = (
|
|
590
|
-
numpy.frombuffer(data[:512], "<i2").reshape((16, 16)),
|
|
591
|
-
data[512:],
|
|
592
|
-
)
|
|
593
|
-
biomes = {}
|
|
594
|
-
cy = floor_cy
|
|
595
|
-
while data:
|
|
596
|
-
data, bits_per_value, arr = self._decode_packed_array(data)
|
|
597
|
-
if bits_per_value == 0:
|
|
598
|
-
value, data = struct.unpack(f"<I", data[:4])[0], data[4:]
|
|
599
|
-
# TODO: when the new biome system supports ints just return the value
|
|
600
|
-
biomes[cy] = numpy.full((4, 4, 4), value, dtype=numpy.uint32)
|
|
601
|
-
elif bits_per_value > 0:
|
|
602
|
-
arr = arr[::4, ::4, ::4]
|
|
603
|
-
palette_len, data = struct.unpack("<I", data[:4])[0], data[4:]
|
|
604
|
-
biomes[cy] = numpy.frombuffer(data, "<i4", palette_len)[arr].astype(
|
|
605
|
-
numpy.uint32
|
|
606
|
-
)
|
|
607
|
-
data = data[4 * palette_len :]
|
|
608
|
-
cy += 1
|
|
609
|
-
return heightmap, biomes
|
|
610
|
-
|
|
611
|
-
def _encode_height(self, chunk) -> bytes:
|
|
612
|
-
height = chunk.misc.get("height")
|
|
613
|
-
if isinstance(height, numpy.ndarray) and height.size == 256:
|
|
614
|
-
return height.ravel().astype("<i2").tobytes()
|
|
615
|
-
else:
|
|
616
|
-
return b"\x00" * 512
|
|
617
|
-
|
|
618
|
-
def _encode_height_3d_biomes(
|
|
619
|
-
self, chunk: Chunk, floor_cy: int, ceil_cy: int
|
|
620
|
-
) -> bytes:
|
|
621
|
-
d2d: List[bytes] = [self._encode_height(chunk)]
|
|
622
|
-
chunk.biomes.convert_to_3d()
|
|
623
|
-
# at least one biome array needs to be defined
|
|
624
|
-
# all biome arrays below the highest biome array must be populated.
|
|
625
|
-
highest = next(
|
|
626
|
-
(cy for cy in range(ceil_cy, floor_cy - 1, -1) if cy in chunk.biomes), None
|
|
627
|
-
)
|
|
628
|
-
if highest is None:
|
|
629
|
-
# populate lowest array
|
|
630
|
-
chunk.biomes.create_section(floor_cy)
|
|
631
|
-
else:
|
|
632
|
-
for cy in range(highest - 1, floor_cy - 1, -1):
|
|
633
|
-
if cy not in chunk.biomes:
|
|
634
|
-
chunk.biomes.add_section(
|
|
635
|
-
cy,
|
|
636
|
-
numpy.repeat(
|
|
637
|
-
# get the array for the sub-chunk above and get the lowest slice
|
|
638
|
-
chunk.biomes.get_section(cy + 1)[:, :1, :],
|
|
639
|
-
4, # Repeat this slice 4 times in the first axis
|
|
640
|
-
1, # TODO: When biome editing supports 16x this will need to be changed.
|
|
641
|
-
),
|
|
642
|
-
)
|
|
643
|
-
|
|
644
|
-
for cy in range(floor_cy, floor_cy + 25):
|
|
645
|
-
if cy in chunk.biomes:
|
|
646
|
-
arr = chunk.biomes.get_section(cy)
|
|
647
|
-
palette, arr_uniq = numpy.unique(arr, return_inverse=True)
|
|
648
|
-
if len(palette) == 1:
|
|
649
|
-
d2d.append(b"\x01")
|
|
650
|
-
else:
|
|
651
|
-
d2d.append(
|
|
652
|
-
self._encode_packed_array(
|
|
653
|
-
arr_uniq.reshape(arr.shape)[_scale_grid]
|
|
654
|
-
)
|
|
655
|
-
)
|
|
656
|
-
d2d.append(struct.pack("<I", len(palette)))
|
|
657
|
-
d2d.append(palette.astype("<i4").tobytes())
|
|
658
|
-
else:
|
|
659
|
-
d2d.append(b"\xFF")
|
|
660
|
-
|
|
661
|
-
return b"".join(d2d)
|
|
662
|
-
|
|
663
|
-
def _load_palette_blocks(
|
|
664
|
-
self,
|
|
665
|
-
data: bytes,
|
|
666
|
-
) -> Tuple[numpy.ndarray, List[NamedTag], bytes]:
|
|
667
|
-
data, _, blocks = self._decode_packed_array(data)
|
|
668
|
-
if blocks is None:
|
|
669
|
-
blocks = numpy.zeros((16, 16, 16), dtype=numpy.int16)
|
|
670
|
-
palette_len = 1
|
|
671
|
-
else:
|
|
672
|
-
palette_len, data = struct.unpack("<I", data[:4])[0], data[4:]
|
|
673
|
-
|
|
674
|
-
if palette_len:
|
|
675
|
-
read_context = ReadContext()
|
|
676
|
-
palette = load_many(
|
|
677
|
-
data,
|
|
678
|
-
compressed=False,
|
|
679
|
-
count=palette_len,
|
|
680
|
-
little_endian=True,
|
|
681
|
-
read_context=read_context,
|
|
682
|
-
)
|
|
683
|
-
data = data[read_context.offset :]
|
|
684
|
-
else:
|
|
685
|
-
palette = [
|
|
686
|
-
NamedTag(
|
|
687
|
-
CompoundTag(
|
|
688
|
-
{
|
|
689
|
-
"name": StringTag("minecraft:air"),
|
|
690
|
-
"states": CompoundTag(),
|
|
691
|
-
"version": IntTag(17694723),
|
|
692
|
-
}
|
|
693
|
-
)
|
|
694
|
-
)
|
|
695
|
-
]
|
|
696
|
-
|
|
697
|
-
return blocks, palette, data
|
|
698
|
-
|
|
699
|
-
@staticmethod
|
|
700
|
-
def _encode_packed_array(arr: numpy.ndarray, min_bit_size=1) -> bytes:
|
|
701
|
-
bits_per_value = max(int(numpy.amax(arr)).bit_length(), min_bit_size)
|
|
702
|
-
if bits_per_value == 7:
|
|
703
|
-
bits_per_value = 8
|
|
704
|
-
elif 9 <= bits_per_value <= 15:
|
|
705
|
-
bits_per_value = 16
|
|
706
|
-
header = bytes([bits_per_value << 1])
|
|
707
|
-
|
|
708
|
-
values_per_word = 32 // bits_per_value # Word = 4 bytes, basis of compacting.
|
|
709
|
-
word_count = -(
|
|
710
|
-
-4096 // values_per_word
|
|
711
|
-
) # Ceiling divide is inverted floor divide
|
|
712
|
-
|
|
713
|
-
arr = arr.swapaxes(1, 2).ravel()
|
|
714
|
-
packed_arr = bytes(
|
|
715
|
-
reversed(
|
|
716
|
-
numpy.packbits(
|
|
717
|
-
numpy.pad(
|
|
718
|
-
numpy.pad(
|
|
719
|
-
numpy.unpackbits(
|
|
720
|
-
numpy.ascontiguousarray(arr[::-1], dtype=">i").view(
|
|
721
|
-
dtype="uint8"
|
|
722
|
-
)
|
|
723
|
-
).reshape(4096, -1)[:, -bits_per_value:],
|
|
724
|
-
[(word_count * values_per_word - 4096, 0), (0, 0)],
|
|
725
|
-
"constant",
|
|
726
|
-
).reshape(-1, values_per_word * bits_per_value),
|
|
727
|
-
[(0, 0), (32 - values_per_word * bits_per_value, 0)],
|
|
728
|
-
"constant",
|
|
729
|
-
)
|
|
730
|
-
)
|
|
731
|
-
.view(dtype=">i4")
|
|
732
|
-
.tobytes()
|
|
733
|
-
)
|
|
734
|
-
)
|
|
735
|
-
return header + packed_arr
|
|
736
|
-
|
|
737
|
-
def _save_palette_subchunk(
|
|
738
|
-
self, blocks: numpy.ndarray, palette: List[NamedTag]
|
|
739
|
-
) -> bytes:
|
|
740
|
-
"""Save a single layer of blocks in the block_palette format"""
|
|
741
|
-
return b"".join(
|
|
742
|
-
[self._encode_packed_array(blocks), struct.pack("<I", len(palette))]
|
|
743
|
-
+ [
|
|
744
|
-
block.save_to(
|
|
745
|
-
compressed=False,
|
|
746
|
-
little_endian=True,
|
|
747
|
-
string_encoder=utf8_escape_encoder,
|
|
748
|
-
)
|
|
749
|
-
for block in palette
|
|
750
|
-
]
|
|
751
|
-
)
|
|
752
|
-
|
|
753
|
-
@staticmethod
|
|
754
|
-
def _unpack_nbt_list(raw_nbt: bytes) -> List[NamedTag]:
|
|
755
|
-
nbt_list = []
|
|
756
|
-
while raw_nbt:
|
|
757
|
-
read_context = ReadContext()
|
|
758
|
-
nbt = load_nbt(
|
|
759
|
-
raw_nbt,
|
|
760
|
-
little_endian=True,
|
|
761
|
-
read_context=read_context,
|
|
762
|
-
string_decoder=utf8_escape_decoder,
|
|
763
|
-
)
|
|
764
|
-
raw_nbt = raw_nbt[read_context.offset :]
|
|
765
|
-
nbt_list.append(nbt)
|
|
766
|
-
return nbt_list
|
|
767
|
-
|
|
768
|
-
@staticmethod
|
|
769
|
-
def _pack_nbt_list(nbt_list: List[NamedTag]):
|
|
770
|
-
return b"".join(
|
|
771
|
-
[
|
|
772
|
-
nbt.save_to(
|
|
773
|
-
compressed=False,
|
|
774
|
-
little_endian=True,
|
|
775
|
-
string_encoder=utf8_escape_encoder,
|
|
776
|
-
)
|
|
777
|
-
for nbt in nbt_list
|
|
778
|
-
if isinstance(nbt, NamedTag)
|
|
779
|
-
]
|
|
780
|
-
)
|
|
781
|
-
|
|
782
|
-
def _decode_entity_list(self, entities: List[NamedTag]) -> List["Entity"]:
|
|
783
|
-
entities_out = []
|
|
784
|
-
for nbt in entities:
|
|
785
|
-
entity = self._decode_entity(
|
|
786
|
-
nbt,
|
|
787
|
-
self._features["entity_format"],
|
|
788
|
-
self._features["entity_coord_format"],
|
|
789
|
-
)
|
|
790
|
-
if entity is not None:
|
|
791
|
-
entities_out.append(entity)
|
|
792
|
-
|
|
793
|
-
return entities_out
|
|
794
|
-
|
|
795
|
-
def _encode_entity_list(self, entities: "EntityList") -> List[NamedTag]:
|
|
796
|
-
entities_out = []
|
|
797
|
-
for entity in entities:
|
|
798
|
-
nbt = self._encode_entity(
|
|
799
|
-
entity,
|
|
800
|
-
self._features["entity_format"],
|
|
801
|
-
self._features["entity_coord_format"],
|
|
802
|
-
)
|
|
803
|
-
if nbt is not None:
|
|
804
|
-
entities_out.append(nbt)
|
|
805
|
-
|
|
806
|
-
return entities_out
|
|
807
|
-
|
|
808
|
-
def _decode_block_entity_list(
|
|
809
|
-
self, block_entities: List[NamedTag]
|
|
810
|
-
) -> List["BlockEntity"]:
|
|
811
|
-
entities_out = []
|
|
812
|
-
for nbt in block_entities:
|
|
813
|
-
entity = self._decode_block_entity(
|
|
814
|
-
nbt,
|
|
815
|
-
self._features["block_entity_format"],
|
|
816
|
-
self._features["block_entity_coord_format"],
|
|
817
|
-
)
|
|
818
|
-
if entity is not None:
|
|
819
|
-
entities_out.append(entity)
|
|
820
|
-
|
|
821
|
-
return entities_out
|
|
822
|
-
|
|
823
|
-
def _encode_block_entity_list(
|
|
824
|
-
self, block_entities: Iterable["BlockEntity"]
|
|
825
|
-
) -> List[NamedTag]:
|
|
826
|
-
entities_out = []
|
|
827
|
-
for entity in block_entities:
|
|
828
|
-
nbt = self._encode_block_entity(
|
|
829
|
-
entity,
|
|
830
|
-
self._features["block_entity_format"],
|
|
831
|
-
self._features["block_entity_coord_format"],
|
|
832
|
-
)
|
|
833
|
-
if nbt is not None:
|
|
834
|
-
entities_out.append(nbt)
|
|
835
|
-
|
|
836
|
-
return entities_out
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Tuple, Dict, List, Union, Iterable, Optional, TYPE_CHECKING, Any
|
|
4
|
+
import struct
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
import numpy
|
|
8
|
+
from amulet_nbt import (
|
|
9
|
+
ShortTag,
|
|
10
|
+
IntTag,
|
|
11
|
+
StringTag,
|
|
12
|
+
CompoundTag,
|
|
13
|
+
NamedTag,
|
|
14
|
+
load as load_nbt,
|
|
15
|
+
load_many,
|
|
16
|
+
ReadContext,
|
|
17
|
+
utf8_escape_decoder,
|
|
18
|
+
utf8_escape_encoder,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
import amulet
|
|
22
|
+
from amulet.api.block import Block
|
|
23
|
+
from amulet.api.chunk import Chunk, StatusFormats
|
|
24
|
+
|
|
25
|
+
from amulet.utils.numpy_helpers import brute_sort_objects
|
|
26
|
+
from amulet.utils.world_utils import fast_unique, from_nibble_array
|
|
27
|
+
from amulet.api.wrapper import Interface
|
|
28
|
+
from amulet.api.data_types import (
|
|
29
|
+
AnyNDArray,
|
|
30
|
+
SubChunkNDArray,
|
|
31
|
+
PlatformType,
|
|
32
|
+
VersionNumberTuple,
|
|
33
|
+
VersionIdentifierTuple,
|
|
34
|
+
)
|
|
35
|
+
from amulet.level import loader
|
|
36
|
+
from amulet.api.wrapper import EntityIDType, EntityCoordType
|
|
37
|
+
from .leveldb_chunk_versions import chunk_to_game_version
|
|
38
|
+
from amulet.api.chunk.entity_list import EntityList
|
|
39
|
+
from amulet.level.formats.leveldb_world.chunk import ChunkData
|
|
40
|
+
|
|
41
|
+
if TYPE_CHECKING:
|
|
42
|
+
from amulet.api.block_entity import BlockEntity
|
|
43
|
+
from amulet.api.entity import Entity
|
|
44
|
+
from amulet.api.chunk.blocks import Blocks
|
|
45
|
+
from amulet.api.wrapper import Translator
|
|
46
|
+
|
|
47
|
+
log = logging.getLogger(__name__)
|
|
48
|
+
|
|
49
|
+
# This is here to scale a 4x array to a 16x array. This can be removed when we natively support 16x array
|
|
50
|
+
_scale_grid = tuple(numpy.meshgrid(*[numpy.arange(16) // 4] * 3, indexing="ij"))
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class BaseLevelDBInterface(Interface):
|
|
54
|
+
chunk_version: int = None
|
|
55
|
+
|
|
56
|
+
def __init__(self):
|
|
57
|
+
self._feature_options = {
|
|
58
|
+
"finalised_state": ["int0-2"],
|
|
59
|
+
"data_2d": ["height512|biome256", "height512|biome4096"],
|
|
60
|
+
"block_entities": ["31list"],
|
|
61
|
+
"block_entity_format": [EntityIDType.namespace_str_id, EntityIDType.str_id],
|
|
62
|
+
"block_entity_coord_format": [EntityCoordType.xyz_int],
|
|
63
|
+
"entities": ["32list", "actor"],
|
|
64
|
+
"entity_format": [
|
|
65
|
+
EntityIDType.namespace_str_identifier,
|
|
66
|
+
EntityIDType.int_id,
|
|
67
|
+
],
|
|
68
|
+
"entity_coord_format": [EntityCoordType.Pos_list_float],
|
|
69
|
+
"terrain": ["30array", "2farray", "2f1palette", "2fnpalette"],
|
|
70
|
+
}
|
|
71
|
+
self._features = {key: None for key in self._feature_options.keys()}
|
|
72
|
+
|
|
73
|
+
def _set_feature(self, feature: str, option: Any):
|
|
74
|
+
assert feature in self._feature_options, f"{feature} is not a valid feature."
|
|
75
|
+
assert (
|
|
76
|
+
option in self._feature_options[feature]
|
|
77
|
+
), f'Invalid option {option} for feature "{feature}"'
|
|
78
|
+
self._features[feature] = option
|
|
79
|
+
|
|
80
|
+
def is_valid(self, key: Tuple) -> bool:
|
|
81
|
+
return key[0] == "bedrock" and key[1] == self.chunk_version
|
|
82
|
+
|
|
83
|
+
def get_translator(
|
|
84
|
+
self,
|
|
85
|
+
max_world_version: Tuple[PlatformType, VersionNumberTuple],
|
|
86
|
+
data: ChunkData = None,
|
|
87
|
+
) -> Tuple["Translator", VersionNumberTuple]:
|
|
88
|
+
"""
|
|
89
|
+
Get the Translator class for the requested version.
|
|
90
|
+
:param max_world_version: The game version the world was last opened in. Version number tuple or data version number.
|
|
91
|
+
:param data: Optional data to get translator based on chunk version rather than world version
|
|
92
|
+
:return: Tuple[Translator, version number for PyMCTranslate to use]
|
|
93
|
+
"""
|
|
94
|
+
if data:
|
|
95
|
+
if b"," in data:
|
|
96
|
+
chunk_version = data[b","][0]
|
|
97
|
+
else:
|
|
98
|
+
chunk_version = data[b"v"][0]
|
|
99
|
+
game_version = chunk_to_game_version(max_world_version[1], chunk_version)
|
|
100
|
+
else:
|
|
101
|
+
game_version = max_world_version[1]
|
|
102
|
+
return loader.Translators.get(("bedrock", game_version)), game_version
|
|
103
|
+
|
|
104
|
+
@staticmethod
|
|
105
|
+
def _chunk_key_to_sub_chunk(cy: int, min_y: int) -> int:
|
|
106
|
+
"""Convert the database sub-chunk key to the sub-chunk index."""
|
|
107
|
+
return cy
|
|
108
|
+
|
|
109
|
+
@staticmethod
|
|
110
|
+
def _get_sub_chunk_storage_byte(cy: int, min_y: int) -> bytes:
|
|
111
|
+
return struct.pack("b", cy)
|
|
112
|
+
|
|
113
|
+
def decode(
|
|
114
|
+
self, cx: int, cz: int, chunk_data: ChunkData, bounds: Tuple[int, int]
|
|
115
|
+
) -> Tuple[Chunk, AnyNDArray]:
|
|
116
|
+
"""
|
|
117
|
+
Create an amulet.api.chunk.Chunk object from raw data given by the format
|
|
118
|
+
:param cx: chunk x coordinate
|
|
119
|
+
:param cz: chunk z coordinate
|
|
120
|
+
:param chunk_data: Raw chunk data provided by the format.
|
|
121
|
+
:param bounds: The minimum and maximum height of the chunk.
|
|
122
|
+
:return: Chunk object in version-specific format, along with the block_palette for that chunk.
|
|
123
|
+
"""
|
|
124
|
+
chunk = Chunk(cx, cz)
|
|
125
|
+
chunk_palette = numpy.empty(0, dtype=object)
|
|
126
|
+
chunk.misc = {"bedrock_chunk_data": chunk_data}
|
|
127
|
+
|
|
128
|
+
chunk_data.pop(b"v", None)
|
|
129
|
+
chunk_data.pop(b",", None)
|
|
130
|
+
|
|
131
|
+
if self._features["terrain"].startswith(
|
|
132
|
+
"2f"
|
|
133
|
+
): # ["2farray", "2f1palette", "2fnpalette"]
|
|
134
|
+
subchunks = {}
|
|
135
|
+
for key in chunk_data.copy().keys():
|
|
136
|
+
if len(key) == 2 and key[0:1] == b"\x2F":
|
|
137
|
+
cy = struct.unpack("b", key[1:2])[0]
|
|
138
|
+
subchunks[
|
|
139
|
+
self._chunk_key_to_sub_chunk(cy, bounds[0] >> 4)
|
|
140
|
+
] = chunk_data.pop(key)
|
|
141
|
+
chunk.blocks, chunk_palette = self._load_subchunks(subchunks)
|
|
142
|
+
elif self._features["terrain"] == "30array":
|
|
143
|
+
section_data = chunk_data.pop(b"\x30", None)
|
|
144
|
+
if section_data is not None:
|
|
145
|
+
block_ids = numpy.frombuffer(
|
|
146
|
+
section_data[: 2**15], dtype=numpy.uint8
|
|
147
|
+
).astype(numpy.uint16)
|
|
148
|
+
block_data = from_nibble_array(
|
|
149
|
+
numpy.frombuffer(
|
|
150
|
+
section_data[2**15 : 2**15 + 2**14], dtype=numpy.uint8
|
|
151
|
+
)
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# there is other data here but we are going to skip over it
|
|
155
|
+
combined_palette, block_array = fast_unique(
|
|
156
|
+
numpy.transpose(
|
|
157
|
+
((block_ids << 4) + block_data).reshape(16, 16, 128), (0, 2, 1)
|
|
158
|
+
)
|
|
159
|
+
)
|
|
160
|
+
chunk.blocks = {
|
|
161
|
+
i: block_array[:, i * 16 : (i + 1) * 16, :] for i in range(8)
|
|
162
|
+
}
|
|
163
|
+
palette: AnyNDArray = numpy.array(
|
|
164
|
+
[combined_palette >> 4, combined_palette & 15]
|
|
165
|
+
).T
|
|
166
|
+
chunk_palette = numpy.empty(len(palette), dtype=object)
|
|
167
|
+
for i, b in enumerate(palette):
|
|
168
|
+
chunk_palette[i] = ((None, tuple(b)),)
|
|
169
|
+
|
|
170
|
+
else:
|
|
171
|
+
raise Exception
|
|
172
|
+
|
|
173
|
+
if self._features["finalised_state"] == "int0-2":
|
|
174
|
+
state = chunk_data.pop(b"\x36", None)
|
|
175
|
+
val = 2
|
|
176
|
+
if isinstance(state, bytes):
|
|
177
|
+
if len(state) == 1:
|
|
178
|
+
# old versions of the game store this as a byte
|
|
179
|
+
val = struct.unpack("b", state)[0]
|
|
180
|
+
elif len(state) == 4:
|
|
181
|
+
# newer versions store it as an int
|
|
182
|
+
val = struct.unpack("<i", state)[0]
|
|
183
|
+
chunk.status = val
|
|
184
|
+
|
|
185
|
+
if b"+" in chunk_data:
|
|
186
|
+
height, biome = self._decode_height_3d_biomes(
|
|
187
|
+
chunk_data[b"+"], bounds[0] >> 4
|
|
188
|
+
)
|
|
189
|
+
chunk.misc["height"] = height
|
|
190
|
+
chunk.biomes = biome
|
|
191
|
+
elif b"\x2D" in chunk_data:
|
|
192
|
+
d2d = chunk_data[b"\x2D"]
|
|
193
|
+
height, biome = (
|
|
194
|
+
numpy.frombuffer(d2d[:512], "<i2").reshape((16, 16)),
|
|
195
|
+
d2d[512:],
|
|
196
|
+
)
|
|
197
|
+
chunk.misc["height"] = height
|
|
198
|
+
chunk.biomes = numpy.frombuffer(biome, dtype="uint8").reshape(16, 16).T
|
|
199
|
+
|
|
200
|
+
# TODO: implement key support
|
|
201
|
+
# \x2D heightmap and biomes
|
|
202
|
+
# \x31 block entity
|
|
203
|
+
# \x32 entity
|
|
204
|
+
# \x33 ticks
|
|
205
|
+
# \x34 block extra data
|
|
206
|
+
# \x35 biome state
|
|
207
|
+
# \x39 7 ints and an end (03)? Honestly don't know what this is
|
|
208
|
+
# \x3A fire tick?
|
|
209
|
+
|
|
210
|
+
# \x2E 2d legacy
|
|
211
|
+
# \x30 legacy terrain
|
|
212
|
+
|
|
213
|
+
# unpack block entities and entities
|
|
214
|
+
if self._features["block_entities"] == "31list":
|
|
215
|
+
block_entities = self._unpack_nbt_list(chunk_data.pop(b"\x31", b""))
|
|
216
|
+
chunk.block_entities = self._decode_block_entity_list(block_entities)
|
|
217
|
+
|
|
218
|
+
if self._features["entities"] in ("32list", "actor"):
|
|
219
|
+
entities = self._unpack_nbt_list(chunk_data.pop(b"\x32", b""))
|
|
220
|
+
if amulet.entity_support:
|
|
221
|
+
chunk.entities = self._decode_entity_list(entities)
|
|
222
|
+
else:
|
|
223
|
+
chunk._native_entities = EntityList(self._decode_entity_list(entities))
|
|
224
|
+
chunk._native_version = (
|
|
225
|
+
"bedrock",
|
|
226
|
+
(0, 0, 0),
|
|
227
|
+
) # TODO: find a way to determine entity version
|
|
228
|
+
|
|
229
|
+
if self._features["entities"] == "actor":
|
|
230
|
+
if chunk_data.entity_actor:
|
|
231
|
+
if amulet.entity_support:
|
|
232
|
+
chunk.entities += self._decode_entity_list(chunk_data.entity_actor)
|
|
233
|
+
else:
|
|
234
|
+
chunk._native_entities += self._decode_entity_list(
|
|
235
|
+
chunk_data.entity_actor
|
|
236
|
+
)
|
|
237
|
+
chunk._native_version = ("bedrock", (0, 0, 0))
|
|
238
|
+
chunk_data.entity_actor.clear()
|
|
239
|
+
|
|
240
|
+
return chunk, chunk_palette
|
|
241
|
+
|
|
242
|
+
def encode(
|
|
243
|
+
self,
|
|
244
|
+
chunk: Chunk,
|
|
245
|
+
palette: AnyNDArray,
|
|
246
|
+
max_world_version: VersionIdentifierTuple,
|
|
247
|
+
bounds: Tuple[int, int],
|
|
248
|
+
) -> Dict[bytes, Optional[bytes]]:
|
|
249
|
+
chunk_data = chunk.misc.get("bedrock_chunk_data", {})
|
|
250
|
+
if isinstance(chunk_data, ChunkData):
|
|
251
|
+
pass
|
|
252
|
+
elif isinstance(chunk_data, dict):
|
|
253
|
+
chunk_data = ChunkData(
|
|
254
|
+
{
|
|
255
|
+
k: v
|
|
256
|
+
for k, v in chunk_data.items()
|
|
257
|
+
if isinstance(k, bytes) and isinstance(v, bytes)
|
|
258
|
+
}
|
|
259
|
+
)
|
|
260
|
+
else:
|
|
261
|
+
chunk_data = ChunkData()
|
|
262
|
+
|
|
263
|
+
chunk_data: ChunkData
|
|
264
|
+
|
|
265
|
+
# chunk version
|
|
266
|
+
if self.chunk_version is not None:
|
|
267
|
+
chunk_data[b"v" if self.chunk_version <= 20 else b","] = bytes(
|
|
268
|
+
[self.chunk_version]
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
# terrain data
|
|
272
|
+
terrain = self._encode_subchunks(
|
|
273
|
+
chunk.blocks, palette, bounds, max_world_version
|
|
274
|
+
)
|
|
275
|
+
min_y = bounds[0] // 16
|
|
276
|
+
for cy, sub_chunk in terrain.items():
|
|
277
|
+
chunk_data[
|
|
278
|
+
b"\x2F" + self._get_sub_chunk_storage_byte(cy, min_y)
|
|
279
|
+
] = sub_chunk
|
|
280
|
+
|
|
281
|
+
# chunk status
|
|
282
|
+
if self._features["finalised_state"] == "int0-2":
|
|
283
|
+
chunk_data[b"\x36"] = struct.pack(
|
|
284
|
+
"<i", chunk.status.as_type(StatusFormats.Bedrock)
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
# biome and height data
|
|
288
|
+
if self._features["data_2d"] == "height512|biome256":
|
|
289
|
+
d2d: List[bytes] = [self._encode_height(chunk)]
|
|
290
|
+
chunk.biomes.convert_to_2d()
|
|
291
|
+
d2d.append(chunk.biomes.astype("uint8").T.tobytes())
|
|
292
|
+
chunk_data[b"\x2D"] = b"".join(d2d)
|
|
293
|
+
if b"+" in chunk_data:
|
|
294
|
+
chunk_data[b"+"] = None
|
|
295
|
+
elif self._features["data_2d"] == "height512|biome4096":
|
|
296
|
+
chunk_data[b"+"] = self._encode_height_3d_biomes(
|
|
297
|
+
chunk, bounds[0] >> 4, bounds[1] >> 4
|
|
298
|
+
)
|
|
299
|
+
if b"\x2D" in chunk_data:
|
|
300
|
+
chunk_data[b"\x2D"] = None
|
|
301
|
+
|
|
302
|
+
# pack block entities and entities
|
|
303
|
+
if self._features["block_entities"] == "31list":
|
|
304
|
+
block_entities_out = self._encode_block_entity_list(chunk.block_entities)
|
|
305
|
+
|
|
306
|
+
if block_entities_out:
|
|
307
|
+
chunk_data[b"\x31"] = self._pack_nbt_list(block_entities_out)
|
|
308
|
+
else:
|
|
309
|
+
chunk_data[b"\x31"] = None
|
|
310
|
+
|
|
311
|
+
if self._features["entities"] == "32list":
|
|
312
|
+
|
|
313
|
+
def save_entities(entities_out):
|
|
314
|
+
if entities_out:
|
|
315
|
+
chunk_data[b"\x32"] = self._pack_nbt_list(entities_out)
|
|
316
|
+
else:
|
|
317
|
+
chunk_data[b"\x32"] = None
|
|
318
|
+
|
|
319
|
+
if amulet.entity_support:
|
|
320
|
+
save_entities(self._encode_entity_list(chunk.entities))
|
|
321
|
+
else:
|
|
322
|
+
try:
|
|
323
|
+
if chunk._native_version[0] == "bedrock":
|
|
324
|
+
save_entities(self._encode_entity_list(chunk._native_entities))
|
|
325
|
+
except:
|
|
326
|
+
pass
|
|
327
|
+
|
|
328
|
+
elif self._features["entities"] == "actor":
|
|
329
|
+
chunk_data.entity_actor.clear()
|
|
330
|
+
if amulet.entity_support:
|
|
331
|
+
chunk_data.entity_actor.extend(self._encode_entity_list(chunk.entities))
|
|
332
|
+
else:
|
|
333
|
+
try:
|
|
334
|
+
if chunk._native_version[0] == "bedrock":
|
|
335
|
+
chunk_data.entity_actor.extend(
|
|
336
|
+
self._encode_entity_list(chunk._native_entities)
|
|
337
|
+
)
|
|
338
|
+
except:
|
|
339
|
+
pass
|
|
340
|
+
|
|
341
|
+
return chunk_data
|
|
342
|
+
|
|
343
|
+
def _load_subchunks(
|
|
344
|
+
self, subchunks: Dict[int, Optional[bytes]]
|
|
345
|
+
) -> Tuple[Dict[int, SubChunkNDArray], AnyNDArray]:
|
|
346
|
+
"""
|
|
347
|
+
Load a list of bytes objects which contain chunk data
|
|
348
|
+
This function should be able to load all sub-chunk formats (technically before it)
|
|
349
|
+
All sub-chunks will almost certainly all have the same sub-chunk version but
|
|
350
|
+
it should be able to handle a case where that is not true.
|
|
351
|
+
|
|
352
|
+
As such this function will return a Chunk and a rather complicated block_palette
|
|
353
|
+
The newer formats allow multiple blocks to occupy the same space and the
|
|
354
|
+
newer versions also include a version ber block. So this will also need
|
|
355
|
+
returning for the translator to handle.
|
|
356
|
+
|
|
357
|
+
The block_palette will be a numpy array containing tuple objects
|
|
358
|
+
The tuple represents the "block" however can contain more than one Block object.
|
|
359
|
+
Inside the tuple are one or more tuples.
|
|
360
|
+
These include the block version number and the block itself
|
|
361
|
+
The block version number will be either None if no block version is given
|
|
362
|
+
or a tuple containing 4 ints.
|
|
363
|
+
|
|
364
|
+
The block will be either a Block class for the newer formats or a tuple of two ints for the older formats
|
|
365
|
+
"""
|
|
366
|
+
blocks: Dict[int, SubChunkNDArray] = {}
|
|
367
|
+
palette: List[
|
|
368
|
+
Tuple[Tuple[Optional[int], Union[Tuple[int, int], Block]], ...]
|
|
369
|
+
] = [
|
|
370
|
+
(
|
|
371
|
+
(
|
|
372
|
+
17563649,
|
|
373
|
+
Block(
|
|
374
|
+
namespace="minecraft",
|
|
375
|
+
base_name="air",
|
|
376
|
+
properties={"block_data": IntTag(0)},
|
|
377
|
+
),
|
|
378
|
+
),
|
|
379
|
+
)
|
|
380
|
+
]
|
|
381
|
+
for cy, data in subchunks.items():
|
|
382
|
+
if data is None:
|
|
383
|
+
continue
|
|
384
|
+
if data[0] in {0, 2, 3, 4, 5, 6, 7}:
|
|
385
|
+
block_ids = numpy.frombuffer(
|
|
386
|
+
data[1 : 1 + 2**12], dtype=numpy.uint8
|
|
387
|
+
).astype(numpy.uint16)
|
|
388
|
+
block_data = from_nibble_array(
|
|
389
|
+
numpy.frombuffer(
|
|
390
|
+
data[1 + 2**12 : 1 + 2**12 + 2**11], dtype=numpy.uint8
|
|
391
|
+
)
|
|
392
|
+
)
|
|
393
|
+
combined_palette, block_array = fast_unique(
|
|
394
|
+
numpy.transpose(
|
|
395
|
+
((block_ids << 4) + block_data).reshape(16, 16, 16), (0, 2, 1)
|
|
396
|
+
)
|
|
397
|
+
)
|
|
398
|
+
blocks[cy] = block_array + len(palette)
|
|
399
|
+
for b in numpy.array([combined_palette >> 4, combined_palette & 15]).T:
|
|
400
|
+
palette.append(((None, tuple(b)),))
|
|
401
|
+
|
|
402
|
+
else:
|
|
403
|
+
if data[0] == 1:
|
|
404
|
+
storage_count = 1
|
|
405
|
+
data = data[1:]
|
|
406
|
+
elif data[0] == 8:
|
|
407
|
+
storage_count, data = data[1], data[2:]
|
|
408
|
+
elif data[0] == 9:
|
|
409
|
+
# There is an extra byte in this format storing the cy value
|
|
410
|
+
storage_count, cy, data = (
|
|
411
|
+
data[1],
|
|
412
|
+
struct.unpack("b", data[2:3])[0],
|
|
413
|
+
data[3:],
|
|
414
|
+
)
|
|
415
|
+
else:
|
|
416
|
+
raise Exception(f"sub-chunk version {data[0]} is not known.")
|
|
417
|
+
|
|
418
|
+
sub_chunk_blocks = numpy.zeros(
|
|
419
|
+
(16, 16, 16, storage_count), dtype=numpy.uint32
|
|
420
|
+
)
|
|
421
|
+
sub_chunk_palette: List[List[Tuple[Optional[int], Block]]] = []
|
|
422
|
+
for storage_index in range(storage_count):
|
|
423
|
+
(
|
|
424
|
+
sub_chunk_blocks[:, :, :, storage_index],
|
|
425
|
+
palette_data,
|
|
426
|
+
data,
|
|
427
|
+
) = self._load_palette_blocks(data)
|
|
428
|
+
palette_data_out: List[Tuple[Optional[int], Block]] = []
|
|
429
|
+
for block in palette_data:
|
|
430
|
+
block = block.compound
|
|
431
|
+
*namespace_, base_name = block["name"].py_str.split(":", 1)
|
|
432
|
+
namespace = namespace_[0] if namespace_ else "minecraft"
|
|
433
|
+
if "version" in block:
|
|
434
|
+
version: Optional[int] = block.get_int("version").py_int
|
|
435
|
+
else:
|
|
436
|
+
version = None
|
|
437
|
+
|
|
438
|
+
if "states" in block or "val" not in block: # 1.13 format
|
|
439
|
+
properties = block.get_compound(
|
|
440
|
+
"states", CompoundTag()
|
|
441
|
+
).py_dict
|
|
442
|
+
if version is None:
|
|
443
|
+
version = 17694720 # 1, 14, 0, 0
|
|
444
|
+
else:
|
|
445
|
+
properties = {"block_data": IntTag(block["val"].py_int)}
|
|
446
|
+
palette_data_out.append(
|
|
447
|
+
(
|
|
448
|
+
version,
|
|
449
|
+
Block(
|
|
450
|
+
namespace=namespace,
|
|
451
|
+
base_name=base_name,
|
|
452
|
+
properties=properties,
|
|
453
|
+
),
|
|
454
|
+
)
|
|
455
|
+
)
|
|
456
|
+
sub_chunk_palette.append(palette_data_out)
|
|
457
|
+
|
|
458
|
+
if storage_count == 1:
|
|
459
|
+
blocks[cy] = sub_chunk_blocks[:, :, :, 0] + len(palette)
|
|
460
|
+
palette += [(val,) for val in sub_chunk_palette[0]]
|
|
461
|
+
elif storage_count > 1:
|
|
462
|
+
# we have two or more storages so need to find the unique block combinations and merge them together
|
|
463
|
+
sub_chunk_palette_, sub_chunk_blocks = numpy.unique(
|
|
464
|
+
sub_chunk_blocks.reshape(-1, storage_count),
|
|
465
|
+
return_inverse=True,
|
|
466
|
+
axis=0,
|
|
467
|
+
)
|
|
468
|
+
blocks[cy] = sub_chunk_blocks.reshape(16, 16, 16).astype(
|
|
469
|
+
numpy.uint32
|
|
470
|
+
) + len(palette)
|
|
471
|
+
palette += [
|
|
472
|
+
tuple(
|
|
473
|
+
sub_chunk_palette[storage_index][index]
|
|
474
|
+
for storage_index, index in enumerate(palette_indexes)
|
|
475
|
+
if not (
|
|
476
|
+
storage_index > 0
|
|
477
|
+
and sub_chunk_palette[storage_index][index][
|
|
478
|
+
1
|
|
479
|
+
].namespaced_name
|
|
480
|
+
== "minecraft:air"
|
|
481
|
+
)
|
|
482
|
+
)
|
|
483
|
+
for palette_indexes in sub_chunk_palette_
|
|
484
|
+
]
|
|
485
|
+
else:
|
|
486
|
+
continue
|
|
487
|
+
|
|
488
|
+
# block_palette should now look like this
|
|
489
|
+
# List[
|
|
490
|
+
# Tuple[
|
|
491
|
+
# Tuple[version, Block], ...
|
|
492
|
+
# ]
|
|
493
|
+
# ]
|
|
494
|
+
|
|
495
|
+
numpy_palette, lut = brute_sort_objects(palette)
|
|
496
|
+
for cy in blocks.keys():
|
|
497
|
+
blocks[cy] = lut[blocks[cy]]
|
|
498
|
+
|
|
499
|
+
return blocks, numpy_palette
|
|
500
|
+
|
|
501
|
+
def _encode_subchunks(
|
|
502
|
+
self,
|
|
503
|
+
blocks: "Blocks",
|
|
504
|
+
palette: AnyNDArray,
|
|
505
|
+
bounds: Tuple[int, int],
|
|
506
|
+
max_world_version: VersionIdentifierTuple,
|
|
507
|
+
) -> Dict[int, Optional[bytes]]:
|
|
508
|
+
raise NotImplementedError
|
|
509
|
+
|
|
510
|
+
def _save_subchunks_1(
|
|
511
|
+
self, blocks: "Blocks", palette: AnyNDArray
|
|
512
|
+
) -> Dict[int, Optional[bytes]]:
|
|
513
|
+
for index, block in enumerate(palette):
|
|
514
|
+
block: Tuple[Tuple[None, Block], ...]
|
|
515
|
+
block_data_tag = block[0][1].properties.get("block_data", IntTag(0))
|
|
516
|
+
if isinstance(block_data_tag, IntTag):
|
|
517
|
+
block_data = block_data_tag.py_int
|
|
518
|
+
# if block_data >= 16:
|
|
519
|
+
# block_data = 0
|
|
520
|
+
else:
|
|
521
|
+
block_data = 0
|
|
522
|
+
|
|
523
|
+
palette[index] = NamedTag(
|
|
524
|
+
CompoundTag(
|
|
525
|
+
{
|
|
526
|
+
"name": StringTag(block[0][1].namespaced_name),
|
|
527
|
+
"val": ShortTag(block_data),
|
|
528
|
+
}
|
|
529
|
+
)
|
|
530
|
+
)
|
|
531
|
+
chunk = {}
|
|
532
|
+
for cy in range(16):
|
|
533
|
+
if cy in blocks:
|
|
534
|
+
palette_index, sub_chunk = fast_unique(blocks.get_sub_chunk(cy))
|
|
535
|
+
sub_chunk_palette = list(palette[palette_index])
|
|
536
|
+
chunk[cy] = b"\x01" + self._save_palette_subchunk(
|
|
537
|
+
sub_chunk.ravel(), sub_chunk_palette
|
|
538
|
+
)
|
|
539
|
+
else:
|
|
540
|
+
chunk[cy] = None
|
|
541
|
+
return chunk
|
|
542
|
+
|
|
543
|
+
# These arent actual blocks, just ids pointing to the block_palette.
|
|
544
|
+
|
|
545
|
+
@staticmethod
|
|
546
|
+
def _decode_packed_array(data: bytes) -> Tuple[bytes, int, Optional[numpy.ndarray]]:
|
|
547
|
+
"""
|
|
548
|
+
Parse a packed array as documented here
|
|
549
|
+
https://gist.github.com/Tomcc/a96af509e275b1af483b25c543cfbf37
|
|
550
|
+
|
|
551
|
+
:param data: The data to parse
|
|
552
|
+
:return:
|
|
553
|
+
"""
|
|
554
|
+
# Ignore LSB of data (its a flag) and get compacting level
|
|
555
|
+
bits_per_value, data = struct.unpack("b", data[0:1])[0] >> 1, data[1:]
|
|
556
|
+
if bits_per_value > 0:
|
|
557
|
+
values_per_word = (
|
|
558
|
+
32 // bits_per_value
|
|
559
|
+
) # Word = 4 bytes, basis of compacting.
|
|
560
|
+
word_count = -(
|
|
561
|
+
-4096 // values_per_word
|
|
562
|
+
) # Ceiling divide is inverted floor divide
|
|
563
|
+
|
|
564
|
+
arr = numpy.packbits(
|
|
565
|
+
numpy.pad(
|
|
566
|
+
numpy.unpackbits(
|
|
567
|
+
numpy.frombuffer(
|
|
568
|
+
bytes(reversed(data[: 4 * word_count])), dtype="uint8"
|
|
569
|
+
)
|
|
570
|
+
)
|
|
571
|
+
.reshape(-1, 32)[:, -values_per_word * bits_per_value :]
|
|
572
|
+
.reshape(-1, bits_per_value)[-4096:, :],
|
|
573
|
+
[(0, 0), (16 - bits_per_value, 0)],
|
|
574
|
+
"constant",
|
|
575
|
+
)
|
|
576
|
+
).view(dtype=">i2")[::-1]
|
|
577
|
+
arr = arr.reshape((16, 16, 16)).swapaxes(1, 2)
|
|
578
|
+
data = data[4 * word_count :]
|
|
579
|
+
else:
|
|
580
|
+
arr = None
|
|
581
|
+
return data, bits_per_value, arr
|
|
582
|
+
|
|
583
|
+
def _decode_height_3d_biomes(
|
|
584
|
+
self, data: bytes, floor_cy: int
|
|
585
|
+
) -> Tuple[numpy.ndarray, Dict[int, numpy.ndarray]]:
|
|
586
|
+
# The 3D biome format consists of 25 16x arrays with the first array corresponding to the lowest sub-chunk in the world
|
|
587
|
+
# This is -64 in the overworld and 0 in the nether and end
|
|
588
|
+
# TODO: make this support the full 16x
|
|
589
|
+
heightmap, data = (
|
|
590
|
+
numpy.frombuffer(data[:512], "<i2").reshape((16, 16)),
|
|
591
|
+
data[512:],
|
|
592
|
+
)
|
|
593
|
+
biomes = {}
|
|
594
|
+
cy = floor_cy
|
|
595
|
+
while data:
|
|
596
|
+
data, bits_per_value, arr = self._decode_packed_array(data)
|
|
597
|
+
if bits_per_value == 0:
|
|
598
|
+
value, data = struct.unpack(f"<I", data[:4])[0], data[4:]
|
|
599
|
+
# TODO: when the new biome system supports ints just return the value
|
|
600
|
+
biomes[cy] = numpy.full((4, 4, 4), value, dtype=numpy.uint32)
|
|
601
|
+
elif bits_per_value > 0:
|
|
602
|
+
arr = arr[::4, ::4, ::4]
|
|
603
|
+
palette_len, data = struct.unpack("<I", data[:4])[0], data[4:]
|
|
604
|
+
biomes[cy] = numpy.frombuffer(data, "<i4", palette_len)[arr].astype(
|
|
605
|
+
numpy.uint32
|
|
606
|
+
)
|
|
607
|
+
data = data[4 * palette_len :]
|
|
608
|
+
cy += 1
|
|
609
|
+
return heightmap, biomes
|
|
610
|
+
|
|
611
|
+
def _encode_height(self, chunk) -> bytes:
|
|
612
|
+
height = chunk.misc.get("height")
|
|
613
|
+
if isinstance(height, numpy.ndarray) and height.size == 256:
|
|
614
|
+
return height.ravel().astype("<i2").tobytes()
|
|
615
|
+
else:
|
|
616
|
+
return b"\x00" * 512
|
|
617
|
+
|
|
618
|
+
def _encode_height_3d_biomes(
|
|
619
|
+
self, chunk: Chunk, floor_cy: int, ceil_cy: int
|
|
620
|
+
) -> bytes:
|
|
621
|
+
d2d: List[bytes] = [self._encode_height(chunk)]
|
|
622
|
+
chunk.biomes.convert_to_3d()
|
|
623
|
+
# at least one biome array needs to be defined
|
|
624
|
+
# all biome arrays below the highest biome array must be populated.
|
|
625
|
+
highest = next(
|
|
626
|
+
(cy for cy in range(ceil_cy, floor_cy - 1, -1) if cy in chunk.biomes), None
|
|
627
|
+
)
|
|
628
|
+
if highest is None:
|
|
629
|
+
# populate lowest array
|
|
630
|
+
chunk.biomes.create_section(floor_cy)
|
|
631
|
+
else:
|
|
632
|
+
for cy in range(highest - 1, floor_cy - 1, -1):
|
|
633
|
+
if cy not in chunk.biomes:
|
|
634
|
+
chunk.biomes.add_section(
|
|
635
|
+
cy,
|
|
636
|
+
numpy.repeat(
|
|
637
|
+
# get the array for the sub-chunk above and get the lowest slice
|
|
638
|
+
chunk.biomes.get_section(cy + 1)[:, :1, :],
|
|
639
|
+
4, # Repeat this slice 4 times in the first axis
|
|
640
|
+
1, # TODO: When biome editing supports 16x this will need to be changed.
|
|
641
|
+
),
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
for cy in range(floor_cy, floor_cy + 25):
|
|
645
|
+
if cy in chunk.biomes:
|
|
646
|
+
arr = chunk.biomes.get_section(cy)
|
|
647
|
+
palette, arr_uniq = numpy.unique(arr, return_inverse=True)
|
|
648
|
+
if len(palette) == 1:
|
|
649
|
+
d2d.append(b"\x01")
|
|
650
|
+
else:
|
|
651
|
+
d2d.append(
|
|
652
|
+
self._encode_packed_array(
|
|
653
|
+
arr_uniq.reshape(arr.shape)[_scale_grid]
|
|
654
|
+
)
|
|
655
|
+
)
|
|
656
|
+
d2d.append(struct.pack("<I", len(palette)))
|
|
657
|
+
d2d.append(palette.astype("<i4").tobytes())
|
|
658
|
+
else:
|
|
659
|
+
d2d.append(b"\xFF")
|
|
660
|
+
|
|
661
|
+
return b"".join(d2d)
|
|
662
|
+
|
|
663
|
+
def _load_palette_blocks(
|
|
664
|
+
self,
|
|
665
|
+
data: bytes,
|
|
666
|
+
) -> Tuple[numpy.ndarray, List[NamedTag], bytes]:
|
|
667
|
+
data, _, blocks = self._decode_packed_array(data)
|
|
668
|
+
if blocks is None:
|
|
669
|
+
blocks = numpy.zeros((16, 16, 16), dtype=numpy.int16)
|
|
670
|
+
palette_len = 1
|
|
671
|
+
else:
|
|
672
|
+
palette_len, data = struct.unpack("<I", data[:4])[0], data[4:]
|
|
673
|
+
|
|
674
|
+
if palette_len:
|
|
675
|
+
read_context = ReadContext()
|
|
676
|
+
palette = load_many(
|
|
677
|
+
data,
|
|
678
|
+
compressed=False,
|
|
679
|
+
count=palette_len,
|
|
680
|
+
little_endian=True,
|
|
681
|
+
read_context=read_context,
|
|
682
|
+
)
|
|
683
|
+
data = data[read_context.offset :]
|
|
684
|
+
else:
|
|
685
|
+
palette = [
|
|
686
|
+
NamedTag(
|
|
687
|
+
CompoundTag(
|
|
688
|
+
{
|
|
689
|
+
"name": StringTag("minecraft:air"),
|
|
690
|
+
"states": CompoundTag(),
|
|
691
|
+
"version": IntTag(17694723),
|
|
692
|
+
}
|
|
693
|
+
)
|
|
694
|
+
)
|
|
695
|
+
]
|
|
696
|
+
|
|
697
|
+
return blocks, palette, data
|
|
698
|
+
|
|
699
|
+
@staticmethod
|
|
700
|
+
def _encode_packed_array(arr: numpy.ndarray, min_bit_size=1) -> bytes:
|
|
701
|
+
bits_per_value = max(int(numpy.amax(arr)).bit_length(), min_bit_size)
|
|
702
|
+
if bits_per_value == 7:
|
|
703
|
+
bits_per_value = 8
|
|
704
|
+
elif 9 <= bits_per_value <= 15:
|
|
705
|
+
bits_per_value = 16
|
|
706
|
+
header = bytes([bits_per_value << 1])
|
|
707
|
+
|
|
708
|
+
values_per_word = 32 // bits_per_value # Word = 4 bytes, basis of compacting.
|
|
709
|
+
word_count = -(
|
|
710
|
+
-4096 // values_per_word
|
|
711
|
+
) # Ceiling divide is inverted floor divide
|
|
712
|
+
|
|
713
|
+
arr = arr.swapaxes(1, 2).ravel()
|
|
714
|
+
packed_arr = bytes(
|
|
715
|
+
reversed(
|
|
716
|
+
numpy.packbits(
|
|
717
|
+
numpy.pad(
|
|
718
|
+
numpy.pad(
|
|
719
|
+
numpy.unpackbits(
|
|
720
|
+
numpy.ascontiguousarray(arr[::-1], dtype=">i").view(
|
|
721
|
+
dtype="uint8"
|
|
722
|
+
)
|
|
723
|
+
).reshape(4096, -1)[:, -bits_per_value:],
|
|
724
|
+
[(word_count * values_per_word - 4096, 0), (0, 0)],
|
|
725
|
+
"constant",
|
|
726
|
+
).reshape(-1, values_per_word * bits_per_value),
|
|
727
|
+
[(0, 0), (32 - values_per_word * bits_per_value, 0)],
|
|
728
|
+
"constant",
|
|
729
|
+
)
|
|
730
|
+
)
|
|
731
|
+
.view(dtype=">i4")
|
|
732
|
+
.tobytes()
|
|
733
|
+
)
|
|
734
|
+
)
|
|
735
|
+
return header + packed_arr
|
|
736
|
+
|
|
737
|
+
def _save_palette_subchunk(
|
|
738
|
+
self, blocks: numpy.ndarray, palette: List[NamedTag]
|
|
739
|
+
) -> bytes:
|
|
740
|
+
"""Save a single layer of blocks in the block_palette format"""
|
|
741
|
+
return b"".join(
|
|
742
|
+
[self._encode_packed_array(blocks), struct.pack("<I", len(palette))]
|
|
743
|
+
+ [
|
|
744
|
+
block.save_to(
|
|
745
|
+
compressed=False,
|
|
746
|
+
little_endian=True,
|
|
747
|
+
string_encoder=utf8_escape_encoder,
|
|
748
|
+
)
|
|
749
|
+
for block in palette
|
|
750
|
+
]
|
|
751
|
+
)
|
|
752
|
+
|
|
753
|
+
@staticmethod
|
|
754
|
+
def _unpack_nbt_list(raw_nbt: bytes) -> List[NamedTag]:
|
|
755
|
+
nbt_list = []
|
|
756
|
+
while raw_nbt:
|
|
757
|
+
read_context = ReadContext()
|
|
758
|
+
nbt = load_nbt(
|
|
759
|
+
raw_nbt,
|
|
760
|
+
little_endian=True,
|
|
761
|
+
read_context=read_context,
|
|
762
|
+
string_decoder=utf8_escape_decoder,
|
|
763
|
+
)
|
|
764
|
+
raw_nbt = raw_nbt[read_context.offset :]
|
|
765
|
+
nbt_list.append(nbt)
|
|
766
|
+
return nbt_list
|
|
767
|
+
|
|
768
|
+
@staticmethod
|
|
769
|
+
def _pack_nbt_list(nbt_list: List[NamedTag]):
|
|
770
|
+
return b"".join(
|
|
771
|
+
[
|
|
772
|
+
nbt.save_to(
|
|
773
|
+
compressed=False,
|
|
774
|
+
little_endian=True,
|
|
775
|
+
string_encoder=utf8_escape_encoder,
|
|
776
|
+
)
|
|
777
|
+
for nbt in nbt_list
|
|
778
|
+
if isinstance(nbt, NamedTag)
|
|
779
|
+
]
|
|
780
|
+
)
|
|
781
|
+
|
|
782
|
+
def _decode_entity_list(self, entities: List[NamedTag]) -> List["Entity"]:
|
|
783
|
+
entities_out = []
|
|
784
|
+
for nbt in entities:
|
|
785
|
+
entity = self._decode_entity(
|
|
786
|
+
nbt,
|
|
787
|
+
self._features["entity_format"],
|
|
788
|
+
self._features["entity_coord_format"],
|
|
789
|
+
)
|
|
790
|
+
if entity is not None:
|
|
791
|
+
entities_out.append(entity)
|
|
792
|
+
|
|
793
|
+
return entities_out
|
|
794
|
+
|
|
795
|
+
def _encode_entity_list(self, entities: "EntityList") -> List[NamedTag]:
|
|
796
|
+
entities_out = []
|
|
797
|
+
for entity in entities:
|
|
798
|
+
nbt = self._encode_entity(
|
|
799
|
+
entity,
|
|
800
|
+
self._features["entity_format"],
|
|
801
|
+
self._features["entity_coord_format"],
|
|
802
|
+
)
|
|
803
|
+
if nbt is not None:
|
|
804
|
+
entities_out.append(nbt)
|
|
805
|
+
|
|
806
|
+
return entities_out
|
|
807
|
+
|
|
808
|
+
def _decode_block_entity_list(
|
|
809
|
+
self, block_entities: List[NamedTag]
|
|
810
|
+
) -> List["BlockEntity"]:
|
|
811
|
+
entities_out = []
|
|
812
|
+
for nbt in block_entities:
|
|
813
|
+
entity = self._decode_block_entity(
|
|
814
|
+
nbt,
|
|
815
|
+
self._features["block_entity_format"],
|
|
816
|
+
self._features["block_entity_coord_format"],
|
|
817
|
+
)
|
|
818
|
+
if entity is not None:
|
|
819
|
+
entities_out.append(entity)
|
|
820
|
+
|
|
821
|
+
return entities_out
|
|
822
|
+
|
|
823
|
+
def _encode_block_entity_list(
|
|
824
|
+
self, block_entities: Iterable["BlockEntity"]
|
|
825
|
+
) -> List[NamedTag]:
|
|
826
|
+
entities_out = []
|
|
827
|
+
for entity in block_entities:
|
|
828
|
+
nbt = self._encode_block_entity(
|
|
829
|
+
entity,
|
|
830
|
+
self._features["block_entity_format"],
|
|
831
|
+
self._features["block_entity_coord_format"],
|
|
832
|
+
)
|
|
833
|
+
if nbt is not None:
|
|
834
|
+
entities_out.append(nbt)
|
|
835
|
+
|
|
836
|
+
return entities_out
|