turbine-lib 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- turbine_lib/__init__.py +0 -0
- turbine_lib/binarydata.py +133 -0
- turbine_lib/database.py +148 -0
- turbine_lib/datexportapi.py +307 -0
- turbine_lib/datfile.py +338 -0
- turbine_lib/ddssubfile.py +305 -0
- turbine_lib/font_convert.py +353 -0
- turbine_lib/fontsubfile.py +34 -0
- turbine_lib/libs/datexport.dll +0 -0
- turbine_lib/libs/msvcp71.dll +0 -0
- turbine_lib/libs/msvcp90.dll +0 -0
- turbine_lib/libs/msvcr71.dll +0 -0
- turbine_lib/libs/zlib1T.dll +0 -0
- turbine_lib/subfile.py +88 -0
- turbine_lib/subfiledata.py +30 -0
- turbine_lib/textsubfile.py +382 -0
- turbine_lib/textutils.py +29 -0
- turbine_lib/utils.py +117 -0
- turbine_lib-0.1.0.dist-info/METADATA +10 -0
- turbine_lib-0.1.0.dist-info/RECORD +22 -0
- turbine_lib-0.1.0.dist-info/WHEEL +5 -0
- turbine_lib-0.1.0.dist-info/top_level.txt +1 -0
turbine_lib/__init__.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
class BinaryData:
|
|
2
|
+
def __init__(self, data=None, size=None):
|
|
3
|
+
if data is None and size is None:
|
|
4
|
+
self.data_ = bytearray()
|
|
5
|
+
self.size_ = 0
|
|
6
|
+
elif isinstance(data, int):
|
|
7
|
+
# Construct with size
|
|
8
|
+
self.data_ = bytearray(data)
|
|
9
|
+
self.size_ = data
|
|
10
|
+
elif isinstance(data, bytes):
|
|
11
|
+
# Construct with bytes data
|
|
12
|
+
self.data_ = bytearray(data)
|
|
13
|
+
self.size_ = len(data) if size is None else size
|
|
14
|
+
elif isinstance(data, bytearray):
|
|
15
|
+
self.data_ = data
|
|
16
|
+
self.size_ = len(data) if size is None else size
|
|
17
|
+
else:
|
|
18
|
+
raise ValueError("Invalid arguments for BinaryData constructor")
|
|
19
|
+
|
|
20
|
+
def __getitem__(self, pos):
|
|
21
|
+
if pos >= self.size_:
|
|
22
|
+
raise IndexError(f"Position {pos} is out of range in BinaryData with size {self.size_}")
|
|
23
|
+
return self.data_[pos]
|
|
24
|
+
|
|
25
|
+
def __setitem__(self, pos, value):
|
|
26
|
+
if pos >= self.size_:
|
|
27
|
+
raise IndexError(f"Position {pos} is out of range in BinaryData with size {self.size_}")
|
|
28
|
+
self.data_[pos] = value
|
|
29
|
+
|
|
30
|
+
def __add__(self, other):
|
|
31
|
+
if not isinstance(other, BinaryData):
|
|
32
|
+
raise TypeError("Can only add BinaryData to BinaryData")
|
|
33
|
+
result_data = self.data_ + other.data_
|
|
34
|
+
result = BinaryData(result_data)
|
|
35
|
+
return result
|
|
36
|
+
|
|
37
|
+
def __eq__(self, other):
|
|
38
|
+
if not isinstance(other, BinaryData):
|
|
39
|
+
return False
|
|
40
|
+
return self.data_ == other.data_
|
|
41
|
+
|
|
42
|
+
def __ne__(self, other):
|
|
43
|
+
return not self.__eq__(other)
|
|
44
|
+
|
|
45
|
+
def __len__(self):
|
|
46
|
+
return self.size_
|
|
47
|
+
|
|
48
|
+
def empty(self):
|
|
49
|
+
return self.size_ == 0
|
|
50
|
+
|
|
51
|
+
def append(self, other, offset=0):
|
|
52
|
+
if offset + other.size_ > self.size_:
|
|
53
|
+
raise ValueError("Data for appending has more bytes than BinaryData size!")
|
|
54
|
+
self.data_[offset:offset + other.size_] = other.data_
|
|
55
|
+
|
|
56
|
+
def to_number(self, t, pos):
|
|
57
|
+
"""Translates T bytes from data into number using UTF-16LE encoding"""
|
|
58
|
+
if pos + t > self.size_:
|
|
59
|
+
raise IndexError(
|
|
60
|
+
f"Reading {t} bytes from {pos} offset with BinaryData size {self.size_} Reached end of BinaryData!")
|
|
61
|
+
|
|
62
|
+
ans = 0
|
|
63
|
+
for i in range(t - 1, -1, -1):
|
|
64
|
+
ans = ((ans << 8) | self.data_[pos + i])
|
|
65
|
+
return ans
|
|
66
|
+
|
|
67
|
+
def to_number_raw(self, t, pos):
|
|
68
|
+
"""Translates T bytes from data into number in raw format"""
|
|
69
|
+
if pos + t > self.size_:
|
|
70
|
+
raise IndexError(
|
|
71
|
+
f"Reading {t} bytes from {pos} offset with BinaryData size {self.size_} Reached end of BinaryData!")
|
|
72
|
+
|
|
73
|
+
ans = 0
|
|
74
|
+
for i in range(t):
|
|
75
|
+
ans = ((ans << 8) | self.data_[pos + i])
|
|
76
|
+
return ans
|
|
77
|
+
|
|
78
|
+
@staticmethod
|
|
79
|
+
def from_number(t, number):
|
|
80
|
+
"""Makes data from specified T bytes of number in Little Endian encoding"""
|
|
81
|
+
if t <= 0:
|
|
82
|
+
raise ValueError("Trying to make data from amount of bytes < 0")
|
|
83
|
+
|
|
84
|
+
data = bytearray(t)
|
|
85
|
+
for i in range(t):
|
|
86
|
+
data[i] = (number >> (8 * i)) & 0xFF
|
|
87
|
+
return BinaryData(data)
|
|
88
|
+
|
|
89
|
+
@staticmethod
|
|
90
|
+
def from_number_raw(t, number):
|
|
91
|
+
"""Makes data from specified T bytes of number in raw format"""
|
|
92
|
+
if t <= 0:
|
|
93
|
+
raise ValueError("Trying to make data from amount of bytes < 0")
|
|
94
|
+
|
|
95
|
+
data = BinaryData.from_number(t, number)
|
|
96
|
+
data.data_ = bytearray(reversed(data.data_))
|
|
97
|
+
return data
|
|
98
|
+
|
|
99
|
+
def size(self):
|
|
100
|
+
return self.size_
|
|
101
|
+
|
|
102
|
+
def data(self):
|
|
103
|
+
return self.data_
|
|
104
|
+
|
|
105
|
+
def write_to_file(self, filename):
|
|
106
|
+
try:
|
|
107
|
+
with open(filename, 'wb') as f:
|
|
108
|
+
f.write(self.data_)
|
|
109
|
+
return True
|
|
110
|
+
except Exception as e:
|
|
111
|
+
print(f"Error writing to file {filename}: {e}")
|
|
112
|
+
return False
|
|
113
|
+
|
|
114
|
+
def read_from_file(self, filename):
|
|
115
|
+
try:
|
|
116
|
+
with open(filename, 'rb') as f:
|
|
117
|
+
file_data = f.read()
|
|
118
|
+
self.data_ = bytearray(file_data)
|
|
119
|
+
self.size_ = len(file_data)
|
|
120
|
+
except Exception as e:
|
|
121
|
+
print(f"Error reading from file {filename}: {e}")
|
|
122
|
+
self.size_ = 0
|
|
123
|
+
self.data_ = bytearray()
|
|
124
|
+
|
|
125
|
+
def cut_data(self, first=0, last=None):
|
|
126
|
+
if last is None:
|
|
127
|
+
last = self.size()
|
|
128
|
+
|
|
129
|
+
if last > self.size():
|
|
130
|
+
raise IndexError("Unable to cut data - parameter last is out of range")
|
|
131
|
+
|
|
132
|
+
new_data = self.data_[first:last]
|
|
133
|
+
return BinaryData(new_data)
|
turbine_lib/database.py
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import sqlite3
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
import yaml
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Database:
|
|
8
|
+
|
|
9
|
+
def __init__(self):
|
|
10
|
+
self.db_ = None
|
|
11
|
+
self.insert_request_ = None
|
|
12
|
+
self.fetch_one_request_ = None
|
|
13
|
+
self.get_rows_number_request_ = None
|
|
14
|
+
|
|
15
|
+
self.create_table_command = """
|
|
16
|
+
CREATE TABLE IF NOT EXISTS patch_data (
|
|
17
|
+
fid INTEGER NOT NULL,
|
|
18
|
+
binary_data BLOB,
|
|
19
|
+
text_data TEXT,
|
|
20
|
+
options TEXT NOT NULL
|
|
21
|
+
);
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
self.insert_file_command = """
|
|
25
|
+
INSERT INTO patch_data (fid,binary_data, text_data, options)
|
|
26
|
+
VALUES (?,?, ?, ?);
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
self.fetch_one_command = "SELECT * FROM patch_data"
|
|
30
|
+
self.clear_table_command = "DELETE FROM patch_data"
|
|
31
|
+
self.get_rows_number_command = "SELECT Count(*) as count FROM patch_data"
|
|
32
|
+
|
|
33
|
+
def close_database(self):
|
|
34
|
+
if self.db_ is None:
|
|
35
|
+
return True
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
self.db_.commit()
|
|
39
|
+
if self.insert_request_:
|
|
40
|
+
self.insert_request_.close()
|
|
41
|
+
if self.fetch_one_request_:
|
|
42
|
+
self.fetch_one_request_.close()
|
|
43
|
+
if self.get_rows_number_request_:
|
|
44
|
+
self.get_rows_number_request_.close()
|
|
45
|
+
self.db_.close()
|
|
46
|
+
self.db_ = None
|
|
47
|
+
return True
|
|
48
|
+
except Exception as e:
|
|
49
|
+
print(f"Database error when closing: {e}")
|
|
50
|
+
return False
|
|
51
|
+
|
|
52
|
+
def __del__(self):
|
|
53
|
+
self.close_database()
|
|
54
|
+
|
|
55
|
+
def init_database(self, filename):
|
|
56
|
+
if not os.path.exists(os.path.dirname(filename) ):
|
|
57
|
+
print(f"Cannot init database: file with name {filename} does not exist!")
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
self.close_database()
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
self.db_ = sqlite3.connect(filename)
|
|
64
|
+
self.db_.execute("PRAGMA synchronous = OFF")
|
|
65
|
+
self.db_.execute("PRAGMA count_changes = OFF")
|
|
66
|
+
self.db_.execute("PRAGMA journal_mode = MEMORY")
|
|
67
|
+
self.db_.execute("PRAGMA temp_store = MEMORY")
|
|
68
|
+
self.db_.execute('PRAGMA encoding = "UTF-8"')
|
|
69
|
+
|
|
70
|
+
self.db_.execute(self.create_table_command)
|
|
71
|
+
self.db_.commit()
|
|
72
|
+
|
|
73
|
+
return True
|
|
74
|
+
except Exception as e:
|
|
75
|
+
print(f"Error initializing database {filename}: {e}")
|
|
76
|
+
if self.db_:
|
|
77
|
+
self.db_.close()
|
|
78
|
+
self.db_ = None
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
def push_file(self, data):
|
|
82
|
+
if self.db_ is None:
|
|
83
|
+
print("Trying to push file to db, which hasn't been opened yet.")
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
# options_str = str(data.options) if data.options else ""
|
|
88
|
+
|
|
89
|
+
options_str = yaml.dump(data.options) if data.options else ""
|
|
90
|
+
|
|
91
|
+
cursor = self.db_.cursor()
|
|
92
|
+
cursor.execute(self.insert_file_command,
|
|
93
|
+
(data.options["fid"],
|
|
94
|
+
bytes(data.binary_data),
|
|
95
|
+
data.text_data,
|
|
96
|
+
options_str))
|
|
97
|
+
self.db_.commit()
|
|
98
|
+
return True
|
|
99
|
+
except Exception as e:
|
|
100
|
+
# 620872987
|
|
101
|
+
print(f"SQLite3 error: {e}")
|
|
102
|
+
return False
|
|
103
|
+
|
|
104
|
+
def get_next_file(self):
|
|
105
|
+
from subfiledata import SubfileData
|
|
106
|
+
from binarydata import BinaryData
|
|
107
|
+
|
|
108
|
+
if self.db_ is None:
|
|
109
|
+
print("Trying to get next file from db, which hasn't been opened yet.")
|
|
110
|
+
return SubfileData()
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
if self.fetch_one_request_ is None:
|
|
114
|
+
self.fetch_one_request_ = self.db_.cursor()
|
|
115
|
+
self.fetch_one_request_.execute(self.fetch_one_command)
|
|
116
|
+
|
|
117
|
+
row = self.fetch_one_request_.fetchone()
|
|
118
|
+
if row is None:
|
|
119
|
+
return SubfileData()
|
|
120
|
+
|
|
121
|
+
data = SubfileData()
|
|
122
|
+
data.fid = row[0]
|
|
123
|
+
data.binary_data = BinaryData(row[1])
|
|
124
|
+
data.text_data = row[2] if row[2] else ""
|
|
125
|
+
|
|
126
|
+
import yaml
|
|
127
|
+
data.options = yaml.safe_load(row[3]) if row[3] else {}
|
|
128
|
+
|
|
129
|
+
return data
|
|
130
|
+
except Exception as e:
|
|
131
|
+
print(f"SQLite3 fetch_one request error: {e}")
|
|
132
|
+
return SubfileData()
|
|
133
|
+
|
|
134
|
+
def count_rows(self):
|
|
135
|
+
if self.db_ is None:
|
|
136
|
+
print("Trying to execute sql query (Count rows) to db, which hasn't been opened yet.")
|
|
137
|
+
return 0
|
|
138
|
+
|
|
139
|
+
try:
|
|
140
|
+
if self.get_rows_number_request_ is None:
|
|
141
|
+
self.get_rows_number_request_ = self.db_.cursor()
|
|
142
|
+
|
|
143
|
+
self.get_rows_number_request_.execute(self.get_rows_number_command)
|
|
144
|
+
result = self.get_rows_number_request_.fetchone()
|
|
145
|
+
return result[0] if result else 0
|
|
146
|
+
except Exception as e:
|
|
147
|
+
print(f"Error when counting rows: {e}")
|
|
148
|
+
return 0
|
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
import ctypes
|
|
2
|
+
import ctypes.util
|
|
3
|
+
import os
|
|
4
|
+
import platform
|
|
5
|
+
from ctypes import wintypes
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DatExportApi:
|
|
10
|
+
def __init__(self):
|
|
11
|
+
# Load the datexport DLL
|
|
12
|
+
|
|
13
|
+
# 校验系统和Python位数
|
|
14
|
+
if os.name != "nt":
|
|
15
|
+
raise RuntimeError("仅支持Windows系统")
|
|
16
|
+
if platform.architecture()[0] != "32bit":
|
|
17
|
+
raise RuntimeError("仅支持32位Python")
|
|
18
|
+
|
|
19
|
+
# 加载32位DLL
|
|
20
|
+
dll_path = Path(__file__).parent / "libs" / "datexport.dll"
|
|
21
|
+
zlib_dll_path = Path(__file__).parent / "libs" / "zlib1T.dll"
|
|
22
|
+
if not dll_path.exists():
|
|
23
|
+
raise FileNotFoundError("缺失32位DLL文件")
|
|
24
|
+
if not zlib_dll_path.exists():
|
|
25
|
+
raise FileNotFoundError("缺失32位DLL文件")
|
|
26
|
+
self.datexport_dll = ctypes.WinDLL(str(dll_path))
|
|
27
|
+
self.zlib_dll = ctypes.WinDLL(str(zlib_dll_path))
|
|
28
|
+
|
|
29
|
+
# Define function prototypes
|
|
30
|
+
try:
|
|
31
|
+
# OpenDatFileEx2
|
|
32
|
+
self.open_dat_file_func = self.datexport_dll.OpenDatFileEx2
|
|
33
|
+
self.open_dat_file_func.argtypes = [
|
|
34
|
+
wintypes.INT, # handle
|
|
35
|
+
ctypes.c_char_p, # filename
|
|
36
|
+
wintypes.UINT, # flags
|
|
37
|
+
ctypes.POINTER(wintypes.INT), # did_master_map
|
|
38
|
+
ctypes.POINTER(wintypes.INT), # block_size
|
|
39
|
+
ctypes.POINTER(wintypes.INT), # vnum_dat_file
|
|
40
|
+
ctypes.POINTER(wintypes.INT), # vnum_game_data
|
|
41
|
+
ctypes.POINTER(wintypes.ULONG), # dat_file_id
|
|
42
|
+
ctypes.c_void_p, # dat_id_stamp
|
|
43
|
+
ctypes.c_void_p # first_iter_guid
|
|
44
|
+
]
|
|
45
|
+
self.open_dat_file_func.restype = wintypes.INT
|
|
46
|
+
|
|
47
|
+
# GetNumSubfiles
|
|
48
|
+
self.get_num_subfiles_func = self.datexport_dll.GetNumSubfiles
|
|
49
|
+
self.get_num_subfiles_func.argtypes = [wintypes.INT] # handle
|
|
50
|
+
self.get_num_subfiles_func.restype = wintypes.INT
|
|
51
|
+
|
|
52
|
+
# GetSubfileSizes
|
|
53
|
+
self.get_subfile_sizes_func = self.datexport_dll.GetSubfileSizes
|
|
54
|
+
self.get_subfile_sizes_func.argtypes = [
|
|
55
|
+
wintypes.INT, # handle
|
|
56
|
+
ctypes.POINTER(wintypes.UINT), # file_id list pointer
|
|
57
|
+
ctypes.POINTER(wintypes.INT), # size list pointer
|
|
58
|
+
ctypes.POINTER(wintypes.INT), # iteration list pointer
|
|
59
|
+
wintypes.INT, # offset
|
|
60
|
+
wintypes.INT # count
|
|
61
|
+
]
|
|
62
|
+
self.get_subfile_sizes_func.restype = wintypes.INT
|
|
63
|
+
|
|
64
|
+
# GetSubfileVersion
|
|
65
|
+
self.get_subfile_version_func = self.datexport_dll.GetSubfileVersion
|
|
66
|
+
self.get_subfile_version_func.argtypes = [
|
|
67
|
+
wintypes.INT, # handle
|
|
68
|
+
wintypes.INT # file_id
|
|
69
|
+
]
|
|
70
|
+
self.get_subfile_version_func.restype = wintypes.INT
|
|
71
|
+
|
|
72
|
+
# GetSubfileData
|
|
73
|
+
self.get_subfile_data_func = self.datexport_dll.GetSubfileData
|
|
74
|
+
self.get_subfile_data_func.argtypes = [
|
|
75
|
+
wintypes.INT, # handle
|
|
76
|
+
wintypes.INT, # file_id
|
|
77
|
+
ctypes.c_void_p, # buffer for storing data
|
|
78
|
+
wintypes.INT, # 0
|
|
79
|
+
ctypes.POINTER(wintypes.INT) # version
|
|
80
|
+
]
|
|
81
|
+
self.get_subfile_data_func.restype = wintypes.INT
|
|
82
|
+
|
|
83
|
+
# CloseDatFile
|
|
84
|
+
self.close_dat_file_func = self.datexport_dll.CloseDatFile
|
|
85
|
+
self.close_dat_file_func.argtypes = [wintypes.INT] # handle
|
|
86
|
+
self.close_dat_file_func.restype = wintypes.INT
|
|
87
|
+
|
|
88
|
+
# PurgeSubfileData
|
|
89
|
+
self.purge_subfile_data_func = self.datexport_dll.PurgeSubfileData
|
|
90
|
+
self.purge_subfile_data_func.argtypes = [
|
|
91
|
+
wintypes.INT, # handle
|
|
92
|
+
wintypes.INT # file_id
|
|
93
|
+
]
|
|
94
|
+
self.purge_subfile_data_func.restype = wintypes.INT
|
|
95
|
+
|
|
96
|
+
# PutSubfileData
|
|
97
|
+
self.put_subfile_data_func = self.datexport_dll.PutSubfileData
|
|
98
|
+
self.put_subfile_data_func.argtypes = [
|
|
99
|
+
wintypes.INT, # handle
|
|
100
|
+
wintypes.INT, # file_id
|
|
101
|
+
ctypes.c_void_p, # buffer with subfile data
|
|
102
|
+
wintypes.INT, # offset
|
|
103
|
+
wintypes.INT, # size of data in bytes
|
|
104
|
+
wintypes.INT, # version
|
|
105
|
+
wintypes.INT, # iteration
|
|
106
|
+
wintypes.BOOL # compress
|
|
107
|
+
]
|
|
108
|
+
self.put_subfile_data_func.restype = wintypes.INT
|
|
109
|
+
|
|
110
|
+
# Flush
|
|
111
|
+
self.flush_func = self.datexport_dll.Flush
|
|
112
|
+
self.flush_func.argtypes = [wintypes.INT] # handle
|
|
113
|
+
self.flush_func.restype = wintypes.INT
|
|
114
|
+
|
|
115
|
+
# GetSubfileCompressionFlag
|
|
116
|
+
self.get_subfile_compression_flag_func = self.datexport_dll.GetSubfileCompressionFlag
|
|
117
|
+
self.get_subfile_compression_flag_func.argtypes = [
|
|
118
|
+
wintypes.INT, # handle
|
|
119
|
+
wintypes.INT # file_id
|
|
120
|
+
]
|
|
121
|
+
self.get_subfile_compression_flag_func.restype = wintypes.BYTE
|
|
122
|
+
|
|
123
|
+
# uncompress
|
|
124
|
+
self.uncompress_func = self.zlib_dll.uncompress
|
|
125
|
+
self.uncompress_func.argtypes = [
|
|
126
|
+
ctypes.POINTER(ctypes.c_ubyte), # dest
|
|
127
|
+
ctypes.POINTER(wintypes.INT), # destLen
|
|
128
|
+
ctypes.POINTER(ctypes.c_ubyte), # source
|
|
129
|
+
wintypes.INT # sourceLen
|
|
130
|
+
]
|
|
131
|
+
self.uncompress_func.restype = wintypes.INT
|
|
132
|
+
|
|
133
|
+
# compress
|
|
134
|
+
self.compress_func = self.zlib_dll.compress
|
|
135
|
+
self.compress_func.argtypes = [
|
|
136
|
+
ctypes.POINTER(ctypes.c_ubyte), # dest:输出缓冲区指针
|
|
137
|
+
ctypes.POINTER(wintypes.INT), # destLen:输出缓冲区长度(指针,传入变量地址)
|
|
138
|
+
ctypes.POINTER(ctypes.c_ubyte), # source:输入缓冲区指针
|
|
139
|
+
wintypes.INT # sourceLen:输入数据长度
|
|
140
|
+
]
|
|
141
|
+
self.compress_func.restype = wintypes.INT # 返回值:错误码(int)
|
|
142
|
+
|
|
143
|
+
# compress2
|
|
144
|
+
self.compress2_func = self.zlib_dll.compress2
|
|
145
|
+
self.compress2_func.argtypes = [
|
|
146
|
+
ctypes.POINTER(ctypes.c_ubyte), # dest:输出缓冲区指针
|
|
147
|
+
ctypes.POINTER(wintypes.INT), # destLen:输出缓冲区长度(指针,传入变量地址)
|
|
148
|
+
ctypes.POINTER(ctypes.c_ubyte), # source:输入缓冲区指针
|
|
149
|
+
wintypes.INT, # sourceLen:输入数据长度
|
|
150
|
+
wintypes.INT # level:压缩级别(0~9)
|
|
151
|
+
]
|
|
152
|
+
self.compress_func.restype = wintypes.INT # 返回值:错误码(int)
|
|
153
|
+
|
|
154
|
+
except Exception as e:
|
|
155
|
+
raise RuntimeError(f"Error while parsing runtime library function: {e}")
|
|
156
|
+
|
|
157
|
+
def open_dat_file(self, handle, filename, flags):
|
|
158
|
+
did_master_map = wintypes.INT()
|
|
159
|
+
block_size = wintypes.INT()
|
|
160
|
+
vnum_dat_file = wintypes.INT()
|
|
161
|
+
vnum_game_data = wintypes.INT()
|
|
162
|
+
dat_file_id = wintypes.ULONG()
|
|
163
|
+
dat_id_stamp = ctypes.create_string_buffer(64)
|
|
164
|
+
first_iter_guid = ctypes.create_string_buffer(64)
|
|
165
|
+
|
|
166
|
+
result = self.open_dat_file_func(
|
|
167
|
+
handle,
|
|
168
|
+
filename.encode('utf-8'),
|
|
169
|
+
flags,
|
|
170
|
+
ctypes.byref(did_master_map),
|
|
171
|
+
ctypes.byref(block_size),
|
|
172
|
+
ctypes.byref(vnum_dat_file),
|
|
173
|
+
ctypes.byref(vnum_game_data),
|
|
174
|
+
ctypes.byref(dat_file_id),
|
|
175
|
+
dat_id_stamp,
|
|
176
|
+
first_iter_guid
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
return result
|
|
180
|
+
|
|
181
|
+
def get_num_subfiles(self, handle):
|
|
182
|
+
return self.get_num_subfiles_func(handle)
|
|
183
|
+
|
|
184
|
+
def get_subfile_sizes(self, handle, file_ids, sizes, iterations, offset, count):
|
|
185
|
+
# Convert Python lists to C arrays
|
|
186
|
+
file_ids_array = (wintypes.UINT * len(file_ids))(*file_ids)
|
|
187
|
+
sizes_array = (wintypes.INT * len(sizes))(*sizes)
|
|
188
|
+
iterations_array = (wintypes.INT * len(iterations))(*iterations)
|
|
189
|
+
|
|
190
|
+
self.get_subfile_sizes_func(
|
|
191
|
+
handle,
|
|
192
|
+
file_ids_array,
|
|
193
|
+
sizes_array,
|
|
194
|
+
iterations_array,
|
|
195
|
+
offset,
|
|
196
|
+
count
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# Copy back the results
|
|
200
|
+
for i in range(len(file_ids)):
|
|
201
|
+
file_ids[i] = file_ids_array[i]
|
|
202
|
+
sizes[i] = sizes_array[i]
|
|
203
|
+
iterations[i] = iterations_array[i]
|
|
204
|
+
|
|
205
|
+
def get_subfile_version(self, handle, file_id):
|
|
206
|
+
return self.get_subfile_version_func(handle, file_id)
|
|
207
|
+
|
|
208
|
+
def get_subfile_data(self, handle, file_id, target_buf, version):
|
|
209
|
+
version_ref = wintypes.INT(version)
|
|
210
|
+
buf_ptr = (ctypes.c_ubyte * len(target_buf)).from_buffer(target_buf)
|
|
211
|
+
size = self.get_subfile_data_func(
|
|
212
|
+
handle,
|
|
213
|
+
file_id,
|
|
214
|
+
buf_ptr,
|
|
215
|
+
0,
|
|
216
|
+
ctypes.byref(version_ref)
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# 如果文件被压缩,则解压
|
|
220
|
+
compression_flag = self.get_subfile_compression_flag(handle, file_id)
|
|
221
|
+
if compression_flag:
|
|
222
|
+
# Handle compressed data
|
|
223
|
+
buffer = target_buf
|
|
224
|
+
decompressed_length = (buffer[3] << 24) | (buffer[2] << 16) | (buffer[1] << 8) | buffer[0]
|
|
225
|
+
compressed_length = size - 4
|
|
226
|
+
|
|
227
|
+
dst = bytearray(decompressed_length)
|
|
228
|
+
src = bytearray(compressed_length)
|
|
229
|
+
|
|
230
|
+
for i in range(4, size):
|
|
231
|
+
src[i - 4] = buffer[i]
|
|
232
|
+
|
|
233
|
+
result = self.uncompress(dst, decompressed_length, src, compressed_length)
|
|
234
|
+
if result != 0:
|
|
235
|
+
raise RuntimeError(f"Decompression failed with error code: {result}")
|
|
236
|
+
|
|
237
|
+
# 将解压后的数据复制回target_buf
|
|
238
|
+
for i in range(decompressed_length):
|
|
239
|
+
target_buf[i] = dst[i]
|
|
240
|
+
size = decompressed_length
|
|
241
|
+
|
|
242
|
+
return size, version_ref.value
|
|
243
|
+
|
|
244
|
+
def close_dat_file(self, handle):
|
|
245
|
+
return self.close_dat_file_func(handle)
|
|
246
|
+
|
|
247
|
+
def purge_subfile_data(self, handle, file_id):
|
|
248
|
+
return self.purge_subfile_data_func(handle, file_id)
|
|
249
|
+
|
|
250
|
+
def put_subfile_data(self, handle, file_id, data, offset, size, version, iteration, compress=False):
|
|
251
|
+
data_ptr = (ctypes.c_ubyte * len(data)).from_buffer(data)
|
|
252
|
+
return self.put_subfile_data_func(
|
|
253
|
+
handle,
|
|
254
|
+
file_id,
|
|
255
|
+
data_ptr,
|
|
256
|
+
offset,
|
|
257
|
+
size,
|
|
258
|
+
version,
|
|
259
|
+
iteration,
|
|
260
|
+
compress
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
def flush(self, handle):
|
|
264
|
+
return self.flush_func(handle)
|
|
265
|
+
|
|
266
|
+
def get_subfile_compression_flag(self, handle, file_id):
|
|
267
|
+
return self.get_subfile_compression_flag_func(handle, file_id)
|
|
268
|
+
|
|
269
|
+
def uncompress(self, dest, dest_len, source, source_len):
|
|
270
|
+
# Convert bytearray to proper ctypes arrays
|
|
271
|
+
dest_array = (ctypes.c_ubyte * len(dest)).from_buffer(dest)
|
|
272
|
+
source_array = (ctypes.c_ubyte * len(source)).from_buffer(source)
|
|
273
|
+
|
|
274
|
+
# Create pointers for length parameters
|
|
275
|
+
dest_len_ref = ctypes.byref(ctypes.c_int(dest_len))
|
|
276
|
+
return self.uncompress_func(dest_array, dest_len_ref, source_array, source_len)
|
|
277
|
+
|
|
278
|
+
def compress(self, dest, dest_len, source, source_len):
|
|
279
|
+
# Convert bytearray to proper ctypes arrays
|
|
280
|
+
dest_array = (ctypes.c_ubyte * len(dest)).from_buffer(dest)
|
|
281
|
+
source_array = (ctypes.c_ubyte * len(source)).from_buffer(source)
|
|
282
|
+
|
|
283
|
+
# Create pointers for length parameter
|
|
284
|
+
dest_len_vlaue = ctypes.c_int(dest_len)
|
|
285
|
+
dest_len_ref = ctypes.byref(dest_len_vlaue)
|
|
286
|
+
result = self.compress_func(dest_array, dest_len_ref, source_array, source_len)
|
|
287
|
+
if result != 0:
|
|
288
|
+
raise RuntimeError(f"Error while compressing data: {result}")
|
|
289
|
+
return dest_len_vlaue.value
|
|
290
|
+
|
|
291
|
+
def compress2(self, dest, dest_len, source, source_len, level):
|
|
292
|
+
# Convert bytearray to proper ctypes arrays
|
|
293
|
+
dest_array = (ctypes.c_ubyte * len(dest)).from_buffer(dest)
|
|
294
|
+
source_array = (ctypes.c_ubyte * len(source)).from_buffer(source)
|
|
295
|
+
|
|
296
|
+
# Create pointers for length parameter
|
|
297
|
+
dest_len_vlaue = ctypes.c_int(dest_len)
|
|
298
|
+
dest_len_ref = ctypes.byref(dest_len_vlaue)
|
|
299
|
+
result = self.compress2_func(dest_array, dest_len_ref, source_array, source_len, level)
|
|
300
|
+
if result != 0:
|
|
301
|
+
raise RuntimeError(f"Error while compressing data: {result}")
|
|
302
|
+
return dest_len_vlaue.value
|
|
303
|
+
|
|
304
|
+
def __del__(self):
|
|
305
|
+
if hasattr(self, 'datexport_dll'):
|
|
306
|
+
# Note: In Python, we don't typically free DLLs explicitly
|
|
307
|
+
pass
|