arkparser 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arkparser/__init__.py +117 -0
- arkparser/common/__init__.py +72 -0
- arkparser/common/binary_reader.py +402 -0
- arkparser/common/exceptions.py +99 -0
- arkparser/common/map_config.py +166 -0
- arkparser/common/types.py +249 -0
- arkparser/common/version_detection.py +195 -0
- arkparser/data_models.py +801 -0
- arkparser/export.py +485 -0
- arkparser/files/__init__.py +25 -0
- arkparser/files/base.py +309 -0
- arkparser/files/cloud_inventory.py +259 -0
- arkparser/files/profile.py +205 -0
- arkparser/files/tribe.py +155 -0
- arkparser/files/world_save.py +699 -0
- arkparser/game_objects/__init__.py +32 -0
- arkparser/game_objects/container.py +180 -0
- arkparser/game_objects/game_object.py +273 -0
- arkparser/game_objects/location.py +87 -0
- arkparser/models/__init__.py +29 -0
- arkparser/models/character.py +227 -0
- arkparser/models/creature.py +642 -0
- arkparser/models/item.py +207 -0
- arkparser/models/player.py +263 -0
- arkparser/models/stats.py +226 -0
- arkparser/models/structure.py +176 -0
- arkparser/models/tribe.py +291 -0
- arkparser/properties/__init__.py +77 -0
- arkparser/properties/base.py +329 -0
- arkparser/properties/byte_property.py +230 -0
- arkparser/properties/compound.py +1125 -0
- arkparser/properties/primitives.py +803 -0
- arkparser/properties/registry.py +236 -0
- arkparser/py.typed +0 -0
- arkparser/structs/__init__.py +60 -0
- arkparser/structs/base.py +63 -0
- arkparser/structs/colors.py +108 -0
- arkparser/structs/misc.py +133 -0
- arkparser/structs/property_list.py +101 -0
- arkparser/structs/registry.py +140 -0
- arkparser/structs/vectors.py +221 -0
- arkparser-0.1.0.dist-info/METADATA +833 -0
- arkparser-0.1.0.dist-info/RECORD +46 -0
- arkparser-0.1.0.dist-info/WHEEL +5 -0
- arkparser-0.1.0.dist-info/licenses/LICENSE +21 -0
- arkparser-0.1.0.dist-info/top_level.txt +1 -0
arkparser/__init__.py
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ARK Save Parser - Parse ARK: Survival Evolved/Ascended save files.
|
|
3
|
+
|
|
4
|
+
This package provides tools to parse various ARK save file formats:
|
|
5
|
+
|
|
6
|
+
- Profile: Player profile data (.arkprofile)
|
|
7
|
+
- Tribe: Tribe data (.arktribe)
|
|
8
|
+
- CloudInventory: Obelisk/cloud inventory data (no extension)
|
|
9
|
+
- WorldSave: World save data (.ark) — auto-detects ASE binary and ASA SQLite
|
|
10
|
+
|
|
11
|
+
Supports both ASE (ARK: Survival Evolved) and ASA (ARK: Survival Ascended)
|
|
12
|
+
formats with automatic detection.
|
|
13
|
+
|
|
14
|
+
Example usage:
|
|
15
|
+
>>> from arkparser import Profile, Tribe, CloudInventory, WorldSave
|
|
16
|
+
>>>
|
|
17
|
+
>>> # Load a player profile
|
|
18
|
+
>>> profile = Profile.load("path/to/profile.arkprofile")
|
|
19
|
+
>>> print(f"Player: {profile.player_name}")
|
|
20
|
+
>>>
|
|
21
|
+
>>> # Load tribe data
|
|
22
|
+
>>> tribe = Tribe.load("path/to/tribe.arktribe")
|
|
23
|
+
>>> print(f"Tribe: {tribe.name}, Members: {tribe.member_count}")
|
|
24
|
+
>>>
|
|
25
|
+
>>> # Load cloud inventory (obelisk data)
|
|
26
|
+
>>> inv = CloudInventory.load("path/to/obelisk_file")
|
|
27
|
+
>>> print(f"Creatures: {inv.creature_count}, Items: {inv.item_count}")
|
|
28
|
+
>>>
|
|
29
|
+
>>> # Load any world save — ASE or ASA, auto-detected
|
|
30
|
+
>>> save = WorldSave.load("path/to/TheIsland.ark") # ASE
|
|
31
|
+
>>> save = WorldSave.load("path/to/Extinction_WP.ark") # ASA
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
from arkparser.common.exceptions import ArkParseError
|
|
35
|
+
from arkparser.common.map_config import MapConfig, get_map_config, get_map_config_by_name
|
|
36
|
+
from arkparser.common.version_detection import ArkFileFormat, ArkFileType, detect_file_type, detect_format
|
|
37
|
+
from arkparser.data_models import DinoStats, UploadedCreature, UploadedItem
|
|
38
|
+
from arkparser.export import (
|
|
39
|
+
export_all,
|
|
40
|
+
export_players,
|
|
41
|
+
export_structures,
|
|
42
|
+
export_tamed,
|
|
43
|
+
export_to_files,
|
|
44
|
+
export_tribe_logs,
|
|
45
|
+
export_tribes,
|
|
46
|
+
export_wild,
|
|
47
|
+
)
|
|
48
|
+
from arkparser.files import CloudInventory, Profile, Tribe, WorldSave
|
|
49
|
+
from arkparser.game_objects import GameObject, GameObjectContainer, LocationData
|
|
50
|
+
|
|
51
|
+
# Convenience alias - users may know this as "Obelisk" from the game
|
|
52
|
+
Obelisk = CloudInventory
|
|
53
|
+
from arkparser.models import (
|
|
54
|
+
Character,
|
|
55
|
+
Creature,
|
|
56
|
+
CreatureStats,
|
|
57
|
+
Item,
|
|
58
|
+
Location,
|
|
59
|
+
Player,
|
|
60
|
+
Structure,
|
|
61
|
+
TamedCreature,
|
|
62
|
+
TribeLogEntry,
|
|
63
|
+
TribeMember,
|
|
64
|
+
WildCreature,
|
|
65
|
+
)
|
|
66
|
+
from arkparser.models import Tribe as TribeModel
|
|
67
|
+
|
|
68
|
+
__all__ = [
|
|
69
|
+
# File parsers
|
|
70
|
+
"Profile",
|
|
71
|
+
"Tribe",
|
|
72
|
+
"CloudInventory",
|
|
73
|
+
"Obelisk",
|
|
74
|
+
"WorldSave",
|
|
75
|
+
# Data models (legacy)
|
|
76
|
+
"UploadedCreature",
|
|
77
|
+
"UploadedItem",
|
|
78
|
+
"DinoStats",
|
|
79
|
+
# Game objects
|
|
80
|
+
"GameObject",
|
|
81
|
+
"GameObjectContainer",
|
|
82
|
+
"LocationData",
|
|
83
|
+
# Models
|
|
84
|
+
"Creature",
|
|
85
|
+
"TamedCreature",
|
|
86
|
+
"WildCreature",
|
|
87
|
+
"Item",
|
|
88
|
+
"Player",
|
|
89
|
+
"TribeModel",
|
|
90
|
+
"TribeMember",
|
|
91
|
+
"TribeLogEntry",
|
|
92
|
+
"Structure",
|
|
93
|
+
"Character",
|
|
94
|
+
"CreatureStats",
|
|
95
|
+
"Location",
|
|
96
|
+
# Map config
|
|
97
|
+
"MapConfig",
|
|
98
|
+
"get_map_config",
|
|
99
|
+
"get_map_config_by_name",
|
|
100
|
+
# Export
|
|
101
|
+
"export_all",
|
|
102
|
+
"export_tamed",
|
|
103
|
+
"export_wild",
|
|
104
|
+
"export_players",
|
|
105
|
+
"export_tribes",
|
|
106
|
+
"export_structures",
|
|
107
|
+
"export_tribe_logs",
|
|
108
|
+
"export_to_files",
|
|
109
|
+
# Utilities
|
|
110
|
+
"detect_format",
|
|
111
|
+
"detect_file_type",
|
|
112
|
+
"ArkFileFormat",
|
|
113
|
+
"ArkFileType",
|
|
114
|
+
"ArkParseError",
|
|
115
|
+
]
|
|
116
|
+
|
|
117
|
+
__version__ = "0.1.0"
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ARK Parser - Common Utilities.
|
|
3
|
+
|
|
4
|
+
This module provides the foundational building blocks for parsing ARK save files:
|
|
5
|
+
|
|
6
|
+
- BinaryReader: Low-level binary reading with position tracking
|
|
7
|
+
- ArkName: Unreal Engine FName type (name + instance)
|
|
8
|
+
- ObjectReference: References to game objects
|
|
9
|
+
- ArkFileFormat: Enum for ASE vs ASA format detection
|
|
10
|
+
- Exceptions: Custom error types for parse failures
|
|
11
|
+
|
|
12
|
+
Example:
|
|
13
|
+
>>> from arkparser.common import BinaryReader, detect_format
|
|
14
|
+
>>> format = detect_format("save.ark")
|
|
15
|
+
>>> with BinaryReader.from_file("save.ark") as reader:
|
|
16
|
+
... version = reader.read_int32()
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from .binary_reader import BinaryReader
|
|
20
|
+
from .exceptions import (
|
|
21
|
+
ArkParseError,
|
|
22
|
+
CorruptDataError,
|
|
23
|
+
EndOfDataError,
|
|
24
|
+
UnexpectedDataError,
|
|
25
|
+
UnknownPropertyError,
|
|
26
|
+
UnknownStructError,
|
|
27
|
+
)
|
|
28
|
+
from .map_config import (
|
|
29
|
+
DEFAULT_MAP_CONFIG,
|
|
30
|
+
MapConfig,
|
|
31
|
+
get_map_config,
|
|
32
|
+
get_map_config_by_name,
|
|
33
|
+
list_maps,
|
|
34
|
+
)
|
|
35
|
+
from .types import (
|
|
36
|
+
NAME_NONE,
|
|
37
|
+
ArkName,
|
|
38
|
+
ObjectReference,
|
|
39
|
+
PropertyValue,
|
|
40
|
+
)
|
|
41
|
+
from .version_detection import (
|
|
42
|
+
ArkFileFormat,
|
|
43
|
+
detect_format,
|
|
44
|
+
get_save_version,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
__all__ = [
|
|
48
|
+
# Binary reading
|
|
49
|
+
"BinaryReader",
|
|
50
|
+
# Types
|
|
51
|
+
"ArkName",
|
|
52
|
+
"ObjectReference",
|
|
53
|
+
"PropertyValue",
|
|
54
|
+
"NAME_NONE",
|
|
55
|
+
# Format detection
|
|
56
|
+
"ArkFileFormat",
|
|
57
|
+
"detect_format",
|
|
58
|
+
"get_save_version",
|
|
59
|
+
# Map config
|
|
60
|
+
"MapConfig",
|
|
61
|
+
"DEFAULT_MAP_CONFIG",
|
|
62
|
+
"get_map_config",
|
|
63
|
+
"get_map_config_by_name",
|
|
64
|
+
"list_maps",
|
|
65
|
+
# Exceptions
|
|
66
|
+
"ArkParseError",
|
|
67
|
+
"CorruptDataError",
|
|
68
|
+
"EndOfDataError",
|
|
69
|
+
"UnexpectedDataError",
|
|
70
|
+
"UnknownPropertyError",
|
|
71
|
+
"UnknownStructError",
|
|
72
|
+
]
|
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Binary Reader - Low-level binary reading utilities.
|
|
3
|
+
|
|
4
|
+
ARK save files are binary and use little-endian byte order throughout.
|
|
5
|
+
This module provides a simple interface for reading all the primitive
|
|
6
|
+
types found in ARK files.
|
|
7
|
+
|
|
8
|
+
Binary Format Basics:
|
|
9
|
+
- All integers are little-endian
|
|
10
|
+
- Strings are length-prefixed (negative length = UTF-16)
|
|
11
|
+
- GUIDs are 16 bytes, read as little-endian
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
>>> reader = BinaryReader.from_file("save.ark")
|
|
15
|
+
>>> version = reader.read_int32()
|
|
16
|
+
>>> name = reader.read_string()
|
|
17
|
+
>>> reader.close()
|
|
18
|
+
|
|
19
|
+
# Or use as context manager:
|
|
20
|
+
>>> with BinaryReader.from_file("save.ark") as reader:
|
|
21
|
+
... version = reader.read_int32()
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
from __future__ import annotations
|
|
25
|
+
|
|
26
|
+
import struct
|
|
27
|
+
import typing as t
|
|
28
|
+
from io import BytesIO
|
|
29
|
+
from pathlib import Path
|
|
30
|
+
from uuid import UUID
|
|
31
|
+
|
|
32
|
+
from .exceptions import EndOfDataError
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class BinaryReader:
|
|
36
|
+
"""
|
|
37
|
+
Binary data reader for ARK save files.
|
|
38
|
+
|
|
39
|
+
All ARK files use little-endian byte order. This class provides
|
|
40
|
+
methods for reading all primitive types used in the format.
|
|
41
|
+
|
|
42
|
+
The reader tracks its position and supports slicing to create
|
|
43
|
+
sub-readers for parsing nested structures.
|
|
44
|
+
|
|
45
|
+
Attributes:
|
|
46
|
+
position: Current read position in bytes.
|
|
47
|
+
size: Total size of the data in bytes.
|
|
48
|
+
remaining: Number of bytes left to read.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
# Struct format characters (little-endian)
|
|
52
|
+
_FMT_INT8 = "<b"
|
|
53
|
+
_FMT_UINT8 = "<B"
|
|
54
|
+
_FMT_INT16 = "<h"
|
|
55
|
+
_FMT_UINT16 = "<H"
|
|
56
|
+
_FMT_INT32 = "<i"
|
|
57
|
+
_FMT_UINT32 = "<I"
|
|
58
|
+
_FMT_INT64 = "<q"
|
|
59
|
+
_FMT_UINT64 = "<Q"
|
|
60
|
+
_FMT_FLOAT = "<f"
|
|
61
|
+
_FMT_DOUBLE = "<d"
|
|
62
|
+
|
|
63
|
+
def __init__(self, stream: t.BinaryIO) -> None:
|
|
64
|
+
"""
|
|
65
|
+
Initialize with a binary stream.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
stream: A binary file-like object supporting read/seek/tell.
|
|
69
|
+
"""
|
|
70
|
+
self._stream = stream
|
|
71
|
+
# Determine size by seeking to end
|
|
72
|
+
self._stream.seek(0, 2)
|
|
73
|
+
self._size = self._stream.tell()
|
|
74
|
+
self._stream.seek(0)
|
|
75
|
+
|
|
76
|
+
def __enter__(self) -> BinaryReader:
|
|
77
|
+
"""Context manager entry."""
|
|
78
|
+
return self
|
|
79
|
+
|
|
80
|
+
def __exit__(self, *args: object) -> None:
|
|
81
|
+
"""Context manager exit - closes the stream."""
|
|
82
|
+
self.close()
|
|
83
|
+
|
|
84
|
+
def close(self) -> None:
|
|
85
|
+
"""Close the underlying stream."""
|
|
86
|
+
self._stream.close()
|
|
87
|
+
|
|
88
|
+
# =========================================================================
|
|
89
|
+
# Factory Methods
|
|
90
|
+
# =========================================================================
|
|
91
|
+
|
|
92
|
+
@classmethod
|
|
93
|
+
def from_file(cls, path: str | Path) -> BinaryReader:
|
|
94
|
+
"""
|
|
95
|
+
Create a reader from a file path.
|
|
96
|
+
|
|
97
|
+
The entire file is read into memory for faster access.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
path: Path to the binary file.
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
A new BinaryReader instance.
|
|
104
|
+
"""
|
|
105
|
+
data = Path(path).read_bytes()
|
|
106
|
+
return cls(BytesIO(data))
|
|
107
|
+
|
|
108
|
+
@classmethod
|
|
109
|
+
def from_bytes(cls, data: bytes) -> BinaryReader:
|
|
110
|
+
"""
|
|
111
|
+
Create a reader from raw bytes.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
data: Raw bytes to read from.
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
A new BinaryReader instance.
|
|
118
|
+
"""
|
|
119
|
+
return cls(BytesIO(data))
|
|
120
|
+
|
|
121
|
+
# =========================================================================
|
|
122
|
+
# Position Management
|
|
123
|
+
# =========================================================================
|
|
124
|
+
|
|
125
|
+
@property
|
|
126
|
+
def position(self) -> int:
|
|
127
|
+
"""Current read position in the stream (0-indexed)."""
|
|
128
|
+
return self._stream.tell()
|
|
129
|
+
|
|
130
|
+
@position.setter
|
|
131
|
+
def position(self, value: int) -> None:
|
|
132
|
+
"""
|
|
133
|
+
Set the read position.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
value: New position (0-indexed byte offset).
|
|
137
|
+
"""
|
|
138
|
+
self._stream.seek(value)
|
|
139
|
+
|
|
140
|
+
@property
|
|
141
|
+
def size(self) -> int:
|
|
142
|
+
"""Total size of the data in bytes."""
|
|
143
|
+
return self._size
|
|
144
|
+
|
|
145
|
+
@property
|
|
146
|
+
def remaining(self) -> int:
|
|
147
|
+
"""Number of bytes remaining to read."""
|
|
148
|
+
return self._size - self.position
|
|
149
|
+
|
|
150
|
+
def skip(self, count: int) -> None:
|
|
151
|
+
"""
|
|
152
|
+
Skip forward by the specified number of bytes.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
count: Number of bytes to skip (can be negative to go back).
|
|
156
|
+
"""
|
|
157
|
+
self._stream.seek(count, 1) # SEEK_CUR
|
|
158
|
+
|
|
159
|
+
def slice(self, size: int) -> BinaryReader:
|
|
160
|
+
"""
|
|
161
|
+
Create a sub-reader for the next `size` bytes.
|
|
162
|
+
|
|
163
|
+
This reads `size` bytes from the current position and returns
|
|
164
|
+
a new BinaryReader for just those bytes. The original reader's
|
|
165
|
+
position advances past the sliced region.
|
|
166
|
+
|
|
167
|
+
Useful for parsing nested structures with known sizes.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
size: Number of bytes to slice.
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
A new BinaryReader for the sliced bytes.
|
|
174
|
+
|
|
175
|
+
Raises:
|
|
176
|
+
EndOfDataError: If not enough bytes remain.
|
|
177
|
+
"""
|
|
178
|
+
if size > self.remaining:
|
|
179
|
+
raise EndOfDataError(size, self.remaining)
|
|
180
|
+
data = self._stream.read(size)
|
|
181
|
+
return BinaryReader.from_bytes(data)
|
|
182
|
+
|
|
183
|
+
# =========================================================================
|
|
184
|
+
# Raw Bytes
|
|
185
|
+
# =========================================================================
|
|
186
|
+
|
|
187
|
+
def read_bytes(self, count: int) -> bytes:
|
|
188
|
+
"""
|
|
189
|
+
Read raw bytes from the stream.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
count: Number of bytes to read.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
The raw bytes.
|
|
196
|
+
|
|
197
|
+
Raises:
|
|
198
|
+
EndOfDataError: If not enough bytes remain.
|
|
199
|
+
"""
|
|
200
|
+
if count > self.remaining:
|
|
201
|
+
raise EndOfDataError(count, self.remaining)
|
|
202
|
+
return self._stream.read(count)
|
|
203
|
+
|
|
204
|
+
# =========================================================================
|
|
205
|
+
# Integer Types
|
|
206
|
+
# =========================================================================
|
|
207
|
+
|
|
208
|
+
def read_int8(self) -> int:
|
|
209
|
+
"""Read a signed 8-bit integer (-128 to 127)."""
|
|
210
|
+
return struct.unpack(self._FMT_INT8, self._stream.read(1))[0]
|
|
211
|
+
|
|
212
|
+
def read_uint8(self) -> int:
|
|
213
|
+
"""Read an unsigned 8-bit integer (0 to 255)."""
|
|
214
|
+
return struct.unpack(self._FMT_UINT8, self._stream.read(1))[0]
|
|
215
|
+
|
|
216
|
+
def read_int16(self) -> int:
|
|
217
|
+
"""Read a signed 16-bit integer."""
|
|
218
|
+
return struct.unpack(self._FMT_INT16, self._stream.read(2))[0]
|
|
219
|
+
|
|
220
|
+
def read_uint16(self) -> int:
|
|
221
|
+
"""Read an unsigned 16-bit integer."""
|
|
222
|
+
return struct.unpack(self._FMT_UINT16, self._stream.read(2))[0]
|
|
223
|
+
|
|
224
|
+
def read_int32(self) -> int:
|
|
225
|
+
"""Read a signed 32-bit integer."""
|
|
226
|
+
return struct.unpack(self._FMT_INT32, self._stream.read(4))[0]
|
|
227
|
+
|
|
228
|
+
def read_uint32(self) -> int:
|
|
229
|
+
"""Read an unsigned 32-bit integer."""
|
|
230
|
+
return struct.unpack(self._FMT_UINT32, self._stream.read(4))[0]
|
|
231
|
+
|
|
232
|
+
def read_int64(self) -> int:
|
|
233
|
+
"""Read a signed 64-bit integer."""
|
|
234
|
+
return struct.unpack(self._FMT_INT64, self._stream.read(8))[0]
|
|
235
|
+
|
|
236
|
+
def read_uint64(self) -> int:
|
|
237
|
+
"""Read an unsigned 64-bit integer."""
|
|
238
|
+
return struct.unpack(self._FMT_UINT64, self._stream.read(8))[0]
|
|
239
|
+
|
|
240
|
+
# =========================================================================
|
|
241
|
+
# Floating Point Types
|
|
242
|
+
# =========================================================================
|
|
243
|
+
|
|
244
|
+
def read_float(self) -> float:
|
|
245
|
+
"""Read a 32-bit IEEE 754 float."""
|
|
246
|
+
return struct.unpack(self._FMT_FLOAT, self._stream.read(4))[0]
|
|
247
|
+
|
|
248
|
+
def read_double(self) -> float:
|
|
249
|
+
"""Read a 64-bit IEEE 754 double."""
|
|
250
|
+
return struct.unpack(self._FMT_DOUBLE, self._stream.read(8))[0]
|
|
251
|
+
|
|
252
|
+
# =========================================================================
|
|
253
|
+
# Boolean
|
|
254
|
+
# =========================================================================
|
|
255
|
+
|
|
256
|
+
def read_bool32(self) -> bool:
|
|
257
|
+
"""
|
|
258
|
+
Read a boolean stored as a 32-bit integer.
|
|
259
|
+
|
|
260
|
+
This is how booleans are stored outside of BoolProperty in ASE.
|
|
261
|
+
Returns True if the value is non-zero.
|
|
262
|
+
"""
|
|
263
|
+
return self.read_uint32() != 0
|
|
264
|
+
|
|
265
|
+
def read_bool16(self) -> bool:
|
|
266
|
+
"""
|
|
267
|
+
Read a boolean stored as a 16-bit integer.
|
|
268
|
+
|
|
269
|
+
This is how BoolProperty values are stored in ASA.
|
|
270
|
+
Returns True if the value is non-zero.
|
|
271
|
+
"""
|
|
272
|
+
return self.read_int16() != 0
|
|
273
|
+
|
|
274
|
+
def read_bool8(self) -> bool:
|
|
275
|
+
"""
|
|
276
|
+
Read a boolean stored as an 8-bit integer.
|
|
277
|
+
|
|
278
|
+
This is how BoolProperty values are stored in ASE.
|
|
279
|
+
Returns True if the value is non-zero.
|
|
280
|
+
"""
|
|
281
|
+
return self.read_uint8() != 0
|
|
282
|
+
|
|
283
|
+
# =========================================================================
|
|
284
|
+
# Strings
|
|
285
|
+
# =========================================================================
|
|
286
|
+
|
|
287
|
+
def read_string(self) -> str:
|
|
288
|
+
"""
|
|
289
|
+
Read a length-prefixed string.
|
|
290
|
+
|
|
291
|
+
ARK string format:
|
|
292
|
+
- Int32 length (negative = UTF-16, positive = Latin-1/ASCII)
|
|
293
|
+
- String bytes including null terminator
|
|
294
|
+
|
|
295
|
+
Special cases:
|
|
296
|
+
- length = 0: Returns empty string
|
|
297
|
+
- length = 1: Single null byte, returns empty string
|
|
298
|
+
- length = -1: UTF-16 null (2 bytes), returns empty string
|
|
299
|
+
|
|
300
|
+
Returns:
|
|
301
|
+
The decoded string (without null terminator).
|
|
302
|
+
"""
|
|
303
|
+
length = self.read_int32()
|
|
304
|
+
|
|
305
|
+
# Handle special cases
|
|
306
|
+
if length == 0:
|
|
307
|
+
return ""
|
|
308
|
+
if length == 1:
|
|
309
|
+
self.skip(1) # Skip single null byte
|
|
310
|
+
return ""
|
|
311
|
+
if length == -1:
|
|
312
|
+
self.skip(2) # Skip UTF-16 null (2 bytes)
|
|
313
|
+
return ""
|
|
314
|
+
|
|
315
|
+
# Determine encoding based on sign
|
|
316
|
+
if length < 0:
|
|
317
|
+
# UTF-16 (multibyte) - common for non-ASCII characters
|
|
318
|
+
byte_count = abs(length) * 2
|
|
319
|
+
data = self.read_bytes(byte_count)
|
|
320
|
+
# Exclude null terminator (2 bytes for UTF-16)
|
|
321
|
+
return data[:-2].decode("utf-16-le")
|
|
322
|
+
else:
|
|
323
|
+
# Latin-1 (single byte) - most common
|
|
324
|
+
data = self.read_bytes(length)
|
|
325
|
+
# Exclude null terminator (1 byte)
|
|
326
|
+
return data[:-1].decode("latin-1")
|
|
327
|
+
|
|
328
|
+
# =========================================================================
|
|
329
|
+
# GUID (ASA)
|
|
330
|
+
# =========================================================================
|
|
331
|
+
|
|
332
|
+
def read_guid(self) -> UUID:
|
|
333
|
+
"""
|
|
334
|
+
Read a 16-byte GUID (UUID).
|
|
335
|
+
|
|
336
|
+
GUIDs in ARK are stored in little-endian format.
|
|
337
|
+
Used primarily in ASA for object identification.
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
A UUID object.
|
|
341
|
+
"""
|
|
342
|
+
guid_bytes = self.read_bytes(16)
|
|
343
|
+
return UUID(bytes_le=guid_bytes)
|
|
344
|
+
|
|
345
|
+
def read_guid_bytes(self) -> bytes:
|
|
346
|
+
"""
|
|
347
|
+
Read a 16-byte GUID as raw bytes.
|
|
348
|
+
|
|
349
|
+
Useful when you just need to check if a GUID is all zeros.
|
|
350
|
+
|
|
351
|
+
Returns:
|
|
352
|
+
The raw 16 GUID bytes.
|
|
353
|
+
"""
|
|
354
|
+
return self.read_bytes(16)
|
|
355
|
+
|
|
356
|
+
# =========================================================================
|
|
357
|
+
# Debugging
|
|
358
|
+
# =========================================================================
|
|
359
|
+
|
|
360
|
+
def peek_bytes(self, count: int) -> bytes:
|
|
361
|
+
"""
|
|
362
|
+
Peek at the next bytes without advancing position.
|
|
363
|
+
|
|
364
|
+
Useful for debugging or format detection.
|
|
365
|
+
|
|
366
|
+
Args:
|
|
367
|
+
count: Number of bytes to peek.
|
|
368
|
+
|
|
369
|
+
Returns:
|
|
370
|
+
The bytes (position unchanged).
|
|
371
|
+
"""
|
|
372
|
+
pos = self.position
|
|
373
|
+
data = self.read_bytes(min(count, self.remaining))
|
|
374
|
+
self.position = pos
|
|
375
|
+
return data
|
|
376
|
+
|
|
377
|
+
def debug_context(self, before: int = 16, after: int = 16) -> str:
|
|
378
|
+
"""
|
|
379
|
+
Get a hex dump of bytes around the current position.
|
|
380
|
+
|
|
381
|
+
Useful for debugging parse errors.
|
|
382
|
+
|
|
383
|
+
Args:
|
|
384
|
+
before: Bytes to show before current position.
|
|
385
|
+
after: Bytes to show after current position.
|
|
386
|
+
|
|
387
|
+
Returns:
|
|
388
|
+
Formatted hex dump string.
|
|
389
|
+
"""
|
|
390
|
+
start = max(0, self.position - before)
|
|
391
|
+
end = min(self.size, self.position + after)
|
|
392
|
+
|
|
393
|
+
self._stream.seek(start)
|
|
394
|
+
data = self._stream.read(end - start)
|
|
395
|
+
self._stream.seek(self.position) # Restore position
|
|
396
|
+
|
|
397
|
+
# Format as hex with position marker
|
|
398
|
+
hex_str = data.hex(" ")
|
|
399
|
+
marker_pos = (self.position - start) * 3 # 2 hex chars + 1 space per byte
|
|
400
|
+
marker = " " * marker_pos + "^"
|
|
401
|
+
|
|
402
|
+
return f"Position 0x{self.position:X}:\n{hex_str}\n{marker}"
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ARK Parser Exceptions.
|
|
3
|
+
|
|
4
|
+
Custom exception hierarchy for ARK save file parsing errors.
|
|
5
|
+
All exceptions inherit from ArkParseError for easy catching.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ArkParseError(Exception):
|
|
12
|
+
"""
|
|
13
|
+
Base exception for all ARK parsing errors.
|
|
14
|
+
|
|
15
|
+
Catch this to handle any parsing error:
|
|
16
|
+
try:
|
|
17
|
+
data = Obelisk.load("file")
|
|
18
|
+
except ArkParseError as e:
|
|
19
|
+
print(f"Failed to parse: {e}")
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class CorruptDataError(ArkParseError):
|
|
26
|
+
"""
|
|
27
|
+
Raised when the file data appears corrupted or invalid.
|
|
28
|
+
|
|
29
|
+
This typically occurs when:
|
|
30
|
+
- The file header is malformed
|
|
31
|
+
- String lengths are unreasonable
|
|
32
|
+
- Expected data is missing
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class UnknownPropertyError(ArkParseError):
|
|
39
|
+
"""
|
|
40
|
+
Raised when encountering an unknown property type.
|
|
41
|
+
|
|
42
|
+
ARK files contain typed properties (IntProperty, FloatProperty, etc.).
|
|
43
|
+
This is raised when we encounter a type we don't recognize.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def __init__(self, property_type: str, position: int | None = None) -> None:
|
|
47
|
+
self.property_type = property_type
|
|
48
|
+
self.position = position
|
|
49
|
+
msg = f"Unknown property type: {property_type!r}"
|
|
50
|
+
if position is not None:
|
|
51
|
+
msg += f" at position 0x{position:X}"
|
|
52
|
+
super().__init__(msg)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class UnknownStructError(ArkParseError):
|
|
56
|
+
"""
|
|
57
|
+
Raised when encountering an unknown struct type.
|
|
58
|
+
|
|
59
|
+
Structs are nested data structures within properties.
|
|
60
|
+
This is raised when we encounter a struct type we don't recognize.
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
def __init__(self, struct_type: str, position: int | None = None) -> None:
|
|
64
|
+
self.struct_type = struct_type
|
|
65
|
+
self.position = position
|
|
66
|
+
msg = f"Unknown struct type: {struct_type!r}"
|
|
67
|
+
if position is not None:
|
|
68
|
+
msg += f" at position 0x{position:X}"
|
|
69
|
+
super().__init__(msg)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class UnexpectedDataError(ArkParseError):
|
|
73
|
+
"""
|
|
74
|
+
Raised when data doesn't match expected values.
|
|
75
|
+
|
|
76
|
+
For example, when a field that should always be zero isn't,
|
|
77
|
+
or when we expect a specific marker byte that isn't present.
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
def __init__(self, message: str, expected: object = None, actual: object = None) -> None:
|
|
81
|
+
self.expected = expected
|
|
82
|
+
self.actual = actual
|
|
83
|
+
if expected is not None and actual is not None:
|
|
84
|
+
message = f"{message} (expected {expected!r}, got {actual!r})"
|
|
85
|
+
super().__init__(message)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class EndOfDataError(ArkParseError):
|
|
89
|
+
"""
|
|
90
|
+
Raised when trying to read past the end of the data.
|
|
91
|
+
|
|
92
|
+
This usually indicates a parsing error earlier in the file,
|
|
93
|
+
or that the file is truncated.
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
def __init__(self, requested: int, available: int) -> None:
|
|
97
|
+
self.requested = requested
|
|
98
|
+
self.available = available
|
|
99
|
+
super().__init__(f"Attempted to read {requested} bytes, but only {available} bytes available")
|