rl-item-mod 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +59 -0
- package/dist/assets.js +110 -0
- package/dist/index.js +277 -0
- package/dist/scratch/ts_debug_exports.js +33 -0
- package/dist/swapper.js +81 -0
- package/dist/upk.js +376 -0
- package/package.json +40 -0
- package/python/items.json +82850 -0
- package/python/keys.txt +1049 -0
- package/python/rl_asset_swapper.py +991 -0
- package/python/rl_upk_editor.py +3859 -0
|
@@ -0,0 +1,991 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import argparse
|
|
3
|
+
import base64
|
|
4
|
+
import importlib
|
|
5
|
+
import importlib.util
|
|
6
|
+
import io
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import queue
|
|
10
|
+
import shutil
|
|
11
|
+
import struct
|
|
12
|
+
import sys
|
|
13
|
+
import threading
|
|
14
|
+
import traceback
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Callable, Dict, Iterable, List, Optional, Sequence, Tuple
|
|
18
|
+
|
|
19
|
+
import tkinter as tk
|
|
20
|
+
from tkinter import filedialog, messagebox, ttk
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(frozen=True)
|
|
24
|
+
class Item:
|
|
25
|
+
id: int
|
|
26
|
+
product: str
|
|
27
|
+
quality: str
|
|
28
|
+
slot: str
|
|
29
|
+
asset_package: str
|
|
30
|
+
asset_path: str
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def package_stem(self) -> str:
|
|
34
|
+
return Path(self.asset_package).stem
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def asset_parts(self) -> List[str]:
|
|
38
|
+
return [p for p in self.asset_path.split(".") if p]
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def asset_base(self) -> str:
|
|
42
|
+
parts = self.asset_parts
|
|
43
|
+
return parts[0] if parts else self.package_stem.removesuffix("_SF")
|
|
44
|
+
|
|
45
|
+
@property
|
|
46
|
+
def thumbnail_package(self) -> str:
|
|
47
|
+
return f"{self.asset_base}_T_SF.upk"
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def label(self) -> str:
|
|
51
|
+
quality = f" / {self.quality}" if self.quality else ""
|
|
52
|
+
slot = f" / {self.slot}" if self.slot else ""
|
|
53
|
+
return f"[{self.id}] {self.product}{quality}{slot} ({self.asset_package})"
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dataclass
|
|
57
|
+
class SwapOptions:
|
|
58
|
+
items_path: Path
|
|
59
|
+
keys_path: Optional[Path]
|
|
60
|
+
donor_dir: Path
|
|
61
|
+
output_dir: Path
|
|
62
|
+
key_source_dir: Optional[Path]
|
|
63
|
+
include_thumbnails: bool
|
|
64
|
+
preserve_header_offsets: bool
|
|
65
|
+
overwrite: bool
|
|
66
|
+
logger: Optional[Callable[[str], None]] = None
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def script_dir() -> Path:
|
|
70
|
+
if getattr(sys, "frozen", False):
|
|
71
|
+
return Path(sys.executable).resolve().parent
|
|
72
|
+
return Path(__file__).resolve().parent
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def default_path(names: Sequence[str]) -> Path:
|
|
76
|
+
here = script_dir()
|
|
77
|
+
for name in names:
|
|
78
|
+
p = here / name
|
|
79
|
+
if p.exists():
|
|
80
|
+
return p
|
|
81
|
+
return here / names[0]
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def import_rl_upk_editor():
|
|
85
|
+
try:
|
|
86
|
+
return importlib.import_module("rl_upk_editor")
|
|
87
|
+
except Exception:
|
|
88
|
+
pass
|
|
89
|
+
|
|
90
|
+
here = script_dir()
|
|
91
|
+
candidates = [
|
|
92
|
+
here / "rl_upk_editor.py",
|
|
93
|
+
here / "rl_upk_editor(1).py",
|
|
94
|
+
Path.cwd() / "rl_upk_editor.py",
|
|
95
|
+
Path.cwd() / "rl_upk_editor(1).py",
|
|
96
|
+
]
|
|
97
|
+
for candidate in candidates:
|
|
98
|
+
if not candidate.exists():
|
|
99
|
+
continue
|
|
100
|
+
spec = importlib.util.spec_from_file_location("rl_upk_editor", candidate)
|
|
101
|
+
if spec is None or spec.loader is None:
|
|
102
|
+
continue
|
|
103
|
+
module = importlib.util.module_from_spec(spec)
|
|
104
|
+
sys.modules["rl_upk_editor"] = module
|
|
105
|
+
spec.loader.exec_module(module)
|
|
106
|
+
return module
|
|
107
|
+
|
|
108
|
+
raise ImportError("Put this script next to rl_upk_editor.py or rl_upk_editor(1).py")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def load_items(path: Path) -> List[Item]:
|
|
112
|
+
raw = json.loads(path.read_text(encoding="utf-8-sig"))
|
|
113
|
+
rows = raw.get("Items", raw if isinstance(raw, list) else [])
|
|
114
|
+
out: List[Item] = []
|
|
115
|
+
for row in rows:
|
|
116
|
+
try:
|
|
117
|
+
pkg = str(row.get("AssetPackage", "") or "")
|
|
118
|
+
asset_path = str(row.get("AssetPath", "") or "")
|
|
119
|
+
if not pkg or not asset_path:
|
|
120
|
+
continue
|
|
121
|
+
out.append(Item(
|
|
122
|
+
id=int(row.get("ID", 0) or 0),
|
|
123
|
+
product=str(row.get("Product", "") or ""),
|
|
124
|
+
quality=str(row.get("Quality", "") or ""),
|
|
125
|
+
slot=str(row.get("Slot", "") or ""),
|
|
126
|
+
asset_package=pkg,
|
|
127
|
+
asset_path=asset_path,
|
|
128
|
+
))
|
|
129
|
+
except Exception:
|
|
130
|
+
continue
|
|
131
|
+
out.sort(key=lambda x: (x.slot.lower(), x.product.lower(), x.id))
|
|
132
|
+
return out
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def find_item(items: Sequence[Item], value: str, slot: str = "") -> Item:
|
|
136
|
+
value = str(value).strip()
|
|
137
|
+
rows = [x for x in items if not slot or x.slot.lower() == slot.lower()]
|
|
138
|
+
if value.isdigit():
|
|
139
|
+
wanted = int(value)
|
|
140
|
+
matches = [x for x in rows if x.id == wanted]
|
|
141
|
+
else:
|
|
142
|
+
q = value.lower()
|
|
143
|
+
matches = [x for x in rows if q in x.product.lower() or q in x.asset_package.lower() or q in x.asset_path.lower()]
|
|
144
|
+
if not matches:
|
|
145
|
+
raise ValueError(f"No item matched {value!r}" + (f" in slot {slot!r}" if slot else ""))
|
|
146
|
+
if len(matches) > 1:
|
|
147
|
+
exact = [x for x in matches if x.product.lower() == value.lower() or x.asset_package.lower() == value.lower()]
|
|
148
|
+
if len(exact) == 1:
|
|
149
|
+
return exact[0]
|
|
150
|
+
raise ValueError("Ambiguous item match:\n" + "\n".join(x.label for x in matches[:20]))
|
|
151
|
+
return matches[0]
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def add_pair(pairs: List[Tuple[str, str]], old: str, new: str) -> None:
|
|
155
|
+
old = (old or "").strip()
|
|
156
|
+
new = (new or "").strip()
|
|
157
|
+
if not old or not new or old == new:
|
|
158
|
+
return
|
|
159
|
+
if (old, new) not in pairs:
|
|
160
|
+
pairs.append((old, new))
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def infer_name_pairs(target: Item, donor: Item) -> List[Tuple[str, str]]:
|
|
164
|
+
pairs: List[Tuple[str, str]] = []
|
|
165
|
+
donor_parts = donor.asset_parts
|
|
166
|
+
target_parts = target.asset_parts
|
|
167
|
+
if len(donor_parts) == len(target_parts):
|
|
168
|
+
for old, new in zip(donor_parts, target_parts):
|
|
169
|
+
add_pair(pairs, old, new)
|
|
170
|
+
else:
|
|
171
|
+
if donor_parts and target_parts:
|
|
172
|
+
add_pair(pairs, donor_parts[0], target_parts[0])
|
|
173
|
+
add_pair(pairs, donor_parts[-1], target_parts[-1])
|
|
174
|
+
for old, new in zip(donor_parts, target_parts):
|
|
175
|
+
add_pair(pairs, old, new)
|
|
176
|
+
add_pair(pairs, donor.package_stem, target.package_stem)
|
|
177
|
+
return pairs
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def infer_thumbnail_pairs(target: Item, donor: Item) -> List[Tuple[str, str]]:
|
|
181
|
+
return [
|
|
182
|
+
(f"{donor.asset_base}_T", f"{target.asset_base}_T"),
|
|
183
|
+
(f"{donor.asset_base}_T_SF", f"{target.asset_base}_T_SF"),
|
|
184
|
+
]
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def clean_name(text: str) -> str:
|
|
188
|
+
return str(text).split("\x00", 1)[0].strip()
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def find_name_indices(package, name: str) -> Tuple[List[int], bool]:
|
|
192
|
+
exact = [n.index for n in package.names if clean_name(n.name) == name]
|
|
193
|
+
if exact:
|
|
194
|
+
return exact, False
|
|
195
|
+
q = name.lower()
|
|
196
|
+
fuzzy = [n.index for n in package.names if clean_name(n.name).lower() == q]
|
|
197
|
+
return fuzzy, bool(fuzzy)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def name_exists(package, name: str) -> bool:
|
|
201
|
+
return bool(find_name_indices(package, name)[0])
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def parse_name_entry_spans(upk, package) -> List[Tuple[int, int, int, int]]:
|
|
205
|
+
data = package.file_bytes
|
|
206
|
+
pos = package.summary.name_offset
|
|
207
|
+
spans: List[Tuple[int, int, int, int]] = []
|
|
208
|
+
for _ in range(package.summary.name_count):
|
|
209
|
+
start = pos
|
|
210
|
+
if pos + 4 > len(data):
|
|
211
|
+
raise ValueError("Name table is truncated")
|
|
212
|
+
length = struct.unpack_from("<i", data, pos)[0]
|
|
213
|
+
pos += 4
|
|
214
|
+
if length > 0:
|
|
215
|
+
byte_count = length
|
|
216
|
+
pos += byte_count
|
|
217
|
+
elif length < 0:
|
|
218
|
+
byte_count = -length * 2
|
|
219
|
+
pos += byte_count
|
|
220
|
+
else:
|
|
221
|
+
byte_count = 0
|
|
222
|
+
flags_offset = pos
|
|
223
|
+
pos += 8
|
|
224
|
+
spans.append((start, flags_offset + 8, length, flags_offset))
|
|
225
|
+
return spans
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def make_fixed_fstring(old_len: int, new_text: str) -> Optional[bytes]:
|
|
229
|
+
if old_len > 0:
|
|
230
|
+
try:
|
|
231
|
+
raw = new_text.encode("ascii")
|
|
232
|
+
except UnicodeEncodeError:
|
|
233
|
+
return None
|
|
234
|
+
if len(raw) + 1 > old_len:
|
|
235
|
+
return None
|
|
236
|
+
return struct.pack("<i", old_len) + raw + b"\x00" + (b"\x00" * (old_len - len(raw) - 1))
|
|
237
|
+
if old_len < 0:
|
|
238
|
+
char_count = -old_len
|
|
239
|
+
raw = new_text.encode("utf-16-le")
|
|
240
|
+
if len(new_text) + 1 > char_count:
|
|
241
|
+
return None
|
|
242
|
+
pad_chars = char_count - len(new_text) - 1
|
|
243
|
+
return struct.pack("<i", old_len) + raw + b"\x00\x00" + (b"\x00\x00" * pad_chars)
|
|
244
|
+
return None
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def fixed_rename_name_entry(upk, package, name_index: int, new_text: str):
|
|
248
|
+
spans = parse_name_entry_spans(upk, package)
|
|
249
|
+
start, end, old_len, flags_offset = spans[name_index]
|
|
250
|
+
payload = make_fixed_fstring(old_len, new_text)
|
|
251
|
+
if payload is None:
|
|
252
|
+
return None, 0
|
|
253
|
+
flags = package.file_bytes[flags_offset:flags_offset + 8]
|
|
254
|
+
replacement = payload + flags
|
|
255
|
+
if len(replacement) != end - start:
|
|
256
|
+
raise ValueError("Fixed name replacement length mismatch")
|
|
257
|
+
data = bytearray(package.file_bytes)
|
|
258
|
+
data[start:end] = replacement
|
|
259
|
+
result = upk.parse_decrypted_package_bytes(package.file_path, bytes(data))
|
|
260
|
+
old_display = clean_name(package.names[name_index].name)
|
|
261
|
+
pad = max(0, abs(old_len) - len(new_text) - 1)
|
|
262
|
+
setattr(result, "_fixed_rename_index", name_index)
|
|
263
|
+
setattr(result, "_fixed_rename_old", old_display)
|
|
264
|
+
setattr(result, "_fixed_rename_new", new_text)
|
|
265
|
+
setattr(result, "_fixed_rename_pad", pad)
|
|
266
|
+
return result, pad
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def patch_header_object_name_refs(upk, package, old_name: str, new_name: str) -> Tuple[object, List[str]]:
|
|
270
|
+
old_indices, _ = find_name_indices(package, old_name)
|
|
271
|
+
new_indices, _ = find_name_indices(package, new_name)
|
|
272
|
+
if not old_indices or not new_indices:
|
|
273
|
+
return package, []
|
|
274
|
+
old_set = set(old_indices)
|
|
275
|
+
new_idx = new_indices[0]
|
|
276
|
+
data = bytearray(package.file_bytes)
|
|
277
|
+
log: List[str] = []
|
|
278
|
+
|
|
279
|
+
if hasattr(upk, "get_export_entry_offsets"):
|
|
280
|
+
offsets = upk.get_export_entry_offsets(package)
|
|
281
|
+
for exp, off in zip(package.exports, offsets):
|
|
282
|
+
if exp.object_name.name_index in old_set:
|
|
283
|
+
data[off + 12:off + 16] = struct.pack("<i", new_idx)
|
|
284
|
+
log.append(f"PATCHED: export[{exp.table_index}] object_name {old_name!r} -> existing {new_name!r}")
|
|
285
|
+
|
|
286
|
+
import_off = package.summary.import_offset
|
|
287
|
+
for imp in package.imports:
|
|
288
|
+
off = import_off + imp.table_index * 28
|
|
289
|
+
if imp.object_name.name_index in old_set:
|
|
290
|
+
data[off + 20:off + 24] = struct.pack("<i", new_idx)
|
|
291
|
+
log.append(f"PATCHED: import[{imp.table_index}] object_name {old_name!r} -> existing {new_name!r}")
|
|
292
|
+
|
|
293
|
+
if not log:
|
|
294
|
+
return package, []
|
|
295
|
+
return upk.parse_decrypted_package_bytes(package.file_path, bytes(data)), log
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def apply_name_pairs(upk, package, pairs: Sequence[Tuple[str, str]], preserve_header_offsets: bool) -> Tuple[object, List[str]]:
|
|
299
|
+
current = package
|
|
300
|
+
log: List[str] = []
|
|
301
|
+
for old, new in pairs:
|
|
302
|
+
indices, case_match = find_name_indices(current, old)
|
|
303
|
+
if not indices:
|
|
304
|
+
log.append(f"MISS: no name-table entry matching {old!r}")
|
|
305
|
+
continue
|
|
306
|
+
if case_match:
|
|
307
|
+
log.append(f"CASE: matched {old!r} case-insensitively")
|
|
308
|
+
|
|
309
|
+
# FIX: Instead of only patching header refs (which causes a Header/Body desync crash),
|
|
310
|
+
# we free up the target name if it already exists in the file's dictionary.
|
|
311
|
+
colliding_indices, _ = find_name_indices(current, new)
|
|
312
|
+
for c_idx in colliding_indices:
|
|
313
|
+
dummy_name = f"FREEDNAME{c_idx}" # No underscores, engine treats as pure base name
|
|
314
|
+
try:
|
|
315
|
+
current = upk.rename_name_entry(current, c_idx, dummy_name)
|
|
316
|
+
log.append(f"FREED: Renamed colliding name at index {c_idx} to {dummy_name}")
|
|
317
|
+
except Exception as e:
|
|
318
|
+
log.append(f"WARN: Could not free colliding name: {e}")
|
|
319
|
+
|
|
320
|
+
# Now force the physical text replacement so body and header stay perfectly synced
|
|
321
|
+
for idx in indices:
|
|
322
|
+
old_actual = clean_name(current.names[idx].name)
|
|
323
|
+
if preserve_header_offsets:
|
|
324
|
+
fixed, pad = fixed_rename_name_entry(upk, current, idx, new)
|
|
325
|
+
if fixed is not None:
|
|
326
|
+
current = fixed
|
|
327
|
+
log.append(f"FIXED: name[{idx}] {old_actual!r} -> {new!r} in-place; preserved header offsets; pad={pad}.")
|
|
328
|
+
continue
|
|
329
|
+
try:
|
|
330
|
+
current = upk.rename_name_entry(current, idx, new)
|
|
331
|
+
log.append(f"RENAMED: name[{idx}] {old_actual!r} -> {new!r}; header offsets may change.")
|
|
332
|
+
except Exception as e:
|
|
333
|
+
log.append(f"ERROR: could not rename {old_actual!r}: {e}")
|
|
334
|
+
|
|
335
|
+
return current, log
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
def load_provider(upk, keys_path: Optional[Path], donor_path: Path, script_path: Path):
|
|
339
|
+
if keys_path and keys_path.exists():
|
|
340
|
+
return upk.DecryptionProvider(str(keys_path)), keys_path
|
|
341
|
+
found = upk.find_keys_path(script_path, donor_path) if hasattr(upk, "find_keys_path") else None
|
|
342
|
+
if found:
|
|
343
|
+
return upk.DecryptionProvider(str(found)), Path(found)
|
|
344
|
+
return None, None
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def resolve_with_optional_keys(upk, input_path: Path, temp_dir: Path, keys_path: Optional[Path]):
|
|
348
|
+
if not keys_path:
|
|
349
|
+
return upk.resolve_input_package(input_path, temp_dir, script_dir())
|
|
350
|
+
old_find = getattr(upk, "find_keys_path", None)
|
|
351
|
+
if old_find is None:
|
|
352
|
+
return upk.resolve_input_package(input_path, temp_dir, script_dir())
|
|
353
|
+
def forced(_script_dir, _selected_file):
|
|
354
|
+
return keys_path
|
|
355
|
+
upk.find_keys_path = forced
|
|
356
|
+
try:
|
|
357
|
+
return upk.resolve_input_package(input_path, temp_dir, script_dir())
|
|
358
|
+
finally:
|
|
359
|
+
upk.find_keys_path = old_find
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
def summary_line(package) -> str:
|
|
363
|
+
return f"names={package.summary.name_count}, depends={package.summary.depends_offset}, first_export={package.exports[0].serial_offset if package.exports else 0}"
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
def build_reencrypted_package_with_output_key(upk, original_encrypted_path: Path, modified_decrypted_bytes: bytes, provider, output_path: Path, output_key: bytes) -> Path:
|
|
368
|
+
summary, meta, original_encrypted_data, donor_key = upk.find_valid_key(original_encrypted_path, provider)
|
|
369
|
+
modified_summary = upk.parse_file_summary(io.BytesIO(modified_decrypted_bytes))
|
|
370
|
+
original_plain = bytearray(upk.DecryptionProvider.decrypt_ecb(donor_key, original_encrypted_data))
|
|
371
|
+
original_chunks = upk.parse_rl_compressed_chunks(bytes(original_plain), meta.compressed_chunks_offset)
|
|
372
|
+
if not original_chunks:
|
|
373
|
+
raise ValueError("No compressed chunks were found in original encrypted header")
|
|
374
|
+
|
|
375
|
+
new_chunk_table_offset = modified_summary.depends_offset - modified_summary.name_offset
|
|
376
|
+
patch_limit = max(0, new_chunk_table_offset)
|
|
377
|
+
chunk_shift = modified_summary.depends_offset - original_chunks[0].uncompressed_offset
|
|
378
|
+
|
|
379
|
+
rebuilt_chunks = []
|
|
380
|
+
rebuilt_chunk_payloads = []
|
|
381
|
+
chunk_table_placeholder = upk.serialize_rl_chunk_table([
|
|
382
|
+
upk.FCompressedChunk(0, 0, 0, 0) for _ in original_chunks
|
|
383
|
+
])
|
|
384
|
+
required_plain_len = new_chunk_table_offset + len(chunk_table_placeholder)
|
|
385
|
+
encrypted_plain_len = (required_plain_len + 15) & ~15
|
|
386
|
+
header_plain = bytearray(encrypted_plain_len)
|
|
387
|
+
copy_len = min(len(original_plain), encrypted_plain_len)
|
|
388
|
+
header_plain[:copy_len] = original_plain[:copy_len]
|
|
389
|
+
|
|
390
|
+
new_total_header_size = modified_summary.name_offset + encrypted_plain_len + meta.garbage_size
|
|
391
|
+
current_compressed_offset = new_total_header_size
|
|
392
|
+
for i, chunk in enumerate(original_chunks):
|
|
393
|
+
start = chunk.uncompressed_offset + chunk_shift
|
|
394
|
+
if i + 1 < len(original_chunks):
|
|
395
|
+
end = original_chunks[i + 1].uncompressed_offset + chunk_shift
|
|
396
|
+
if end > len(modified_decrypted_bytes):
|
|
397
|
+
raise ValueError("Modified decrypted package changed size too early for the rebuilt chunk layout")
|
|
398
|
+
else:
|
|
399
|
+
end = len(modified_decrypted_bytes)
|
|
400
|
+
if end < start:
|
|
401
|
+
raise ValueError("Invalid rebuilt chunk bounds")
|
|
402
|
+
payload = upk.compress_chunk_payload(modified_decrypted_bytes[start:end])
|
|
403
|
+
rebuilt_chunk_payloads.append(payload)
|
|
404
|
+
rebuilt_chunks.append(upk.FCompressedChunk(
|
|
405
|
+
uncompressed_offset=start,
|
|
406
|
+
uncompressed_size=end - start,
|
|
407
|
+
compressed_offset=current_compressed_offset,
|
|
408
|
+
compressed_size=len(payload),
|
|
409
|
+
))
|
|
410
|
+
current_compressed_offset += len(payload)
|
|
411
|
+
|
|
412
|
+
if patch_limit > len(header_plain):
|
|
413
|
+
raise ValueError("Modified decrypted header exceeds encrypted header capacity")
|
|
414
|
+
if patch_limit > 0:
|
|
415
|
+
header_plain[:patch_limit] = modified_decrypted_bytes[summary.name_offset:modified_summary.depends_offset]
|
|
416
|
+
|
|
417
|
+
chunk_table = upk.serialize_rl_chunk_table(rebuilt_chunks)
|
|
418
|
+
table_end = new_chunk_table_offset + len(chunk_table)
|
|
419
|
+
if table_end > len(header_plain):
|
|
420
|
+
raise ValueError("Rebuilt compressed chunk table does not fit inside encrypted header")
|
|
421
|
+
header_plain[new_chunk_table_offset:table_end] = chunk_table
|
|
422
|
+
encrypted_header = upk.DecryptionProvider.encrypt_ecb(output_key, bytes(header_plain))
|
|
423
|
+
|
|
424
|
+
original_bytes = Path(original_encrypted_path).read_bytes()
|
|
425
|
+
prefix = bytearray(original_bytes[:summary.name_offset])
|
|
426
|
+
summary_offsets = upk._find_summary_offsets(modified_decrypted_bytes)
|
|
427
|
+
upk.patch_i32_le(prefix, summary_offsets["total_header_size_offset"], new_total_header_size)
|
|
428
|
+
upk.patch_i32_le(prefix, summary_offsets["name_count_offset"], modified_summary.name_count)
|
|
429
|
+
upk.patch_i32_le(prefix, summary_offsets["name_offset_offset"], modified_summary.name_offset)
|
|
430
|
+
upk.patch_i32_le(prefix, summary_offsets["export_count_offset"], modified_summary.export_count)
|
|
431
|
+
upk.patch_i32_le(prefix, summary_offsets["export_offset_offset"], modified_summary.export_offset)
|
|
432
|
+
upk.patch_i32_le(prefix, summary_offsets["import_count_offset"], modified_summary.import_count)
|
|
433
|
+
upk.patch_i32_le(prefix, summary_offsets["import_offset_offset"], modified_summary.import_offset)
|
|
434
|
+
upk.patch_i32_le(prefix, summary_offsets["depends_offset_offset"], modified_summary.depends_offset)
|
|
435
|
+
upk.patch_i32_le(prefix, summary_offsets["import_export_guids_offset_offset"], modified_summary.import_export_guids_offset)
|
|
436
|
+
if "thumbnail_table_offset_offset" in summary_offsets:
|
|
437
|
+
upk.patch_i32_le(prefix, summary_offsets["thumbnail_table_offset_offset"], modified_summary.thumbnail_table_offset)
|
|
438
|
+
upk._patch_generation_counts(prefix, summary_offsets, modified_summary.export_count, modified_summary.name_count)
|
|
439
|
+
with original_encrypted_path.open("rb") as src:
|
|
440
|
+
meta_offsets = upk._find_file_compression_metadata_offsets(src)
|
|
441
|
+
upk.patch_i32_le(prefix, meta_offsets["compressed_chunks_offset_offset"], new_chunk_table_offset)
|
|
442
|
+
if rebuilt_chunks:
|
|
443
|
+
upk.patch_i32_le(prefix, meta_offsets["last_block_size_offset"], rebuilt_chunks[-1].uncompressed_size)
|
|
444
|
+
|
|
445
|
+
output = bytearray()
|
|
446
|
+
output += prefix
|
|
447
|
+
output += encrypted_header
|
|
448
|
+
gap_start = modified_summary.name_offset + len(encrypted_header)
|
|
449
|
+
original_gap_start = summary.name_offset + len(original_encrypted_data)
|
|
450
|
+
original_gap_end = original_chunks[0].compressed_offset
|
|
451
|
+
gap_bytes = original_bytes[original_gap_start:original_gap_end]
|
|
452
|
+
if len(gap_bytes) != meta.garbage_size:
|
|
453
|
+
gap_bytes = original_bytes[original_gap_end - meta.garbage_size:original_gap_end]
|
|
454
|
+
output += gap_bytes
|
|
455
|
+
for payload in rebuilt_chunk_payloads:
|
|
456
|
+
output += payload
|
|
457
|
+
|
|
458
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
459
|
+
output_path.write_bytes(output)
|
|
460
|
+
return output_path
|
|
461
|
+
|
|
462
|
+
def build_output(upk, donor_path: Path, target_key_path: Path, modified, provider, output_path: Path, was_encrypted: bool, log: List[str]) -> None:
|
|
463
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
464
|
+
if was_encrypted and provider is not None:
|
|
465
|
+
override_key = None
|
|
466
|
+
if target_key_path.exists() and hasattr(upk, "find_key_for_encrypted_upk"):
|
|
467
|
+
override_key = upk.find_key_for_encrypted_upk(target_key_path, provider)
|
|
468
|
+
log.append(f"Output key source: {target_key_path}")
|
|
469
|
+
log.append(f"Encrypting with key from target/original {target_key_path.name}: {base64.b64encode(override_key).decode()}")
|
|
470
|
+
elif target_key_path.exists():
|
|
471
|
+
log.append(f"Output key source exists but rl_upk_editor has no find_key_for_encrypted_upk: {target_key_path}")
|
|
472
|
+
else:
|
|
473
|
+
log.append(f"WARN: target key source missing, falling back to donor key: {target_key_path}")
|
|
474
|
+
build_reencrypted_package_with_output_key(upk, donor_path, modified.file_bytes, provider, output_path, override_key) if override_key is not None else upk.build_reencrypted_package(donor_path, modified.file_bytes, provider, output_path)
|
|
475
|
+
if override_key is not None:
|
|
476
|
+
try:
|
|
477
|
+
check_provider = upk.DecryptionProvider(None)
|
|
478
|
+
check_provider.decryption_keys = [override_key]
|
|
479
|
+
upk.find_valid_key(output_path, check_provider)
|
|
480
|
+
log.append("Verified output decrypts with the target/original package key.")
|
|
481
|
+
except Exception as exc:
|
|
482
|
+
log.append(f"WARN: output key verification failed: {exc}")
|
|
483
|
+
log.append("Saved encrypted/compressed output.")
|
|
484
|
+
else:
|
|
485
|
+
output_path.write_bytes(modified.file_bytes)
|
|
486
|
+
log.append("Saved decrypted/decompressed output because input was not encrypted.")
|
|
487
|
+
|
|
488
|
+
|
|
489
|
+
def swap_one_package(upk, source_path: Path, output_path: Path, key_source_path: Path, pairs: Sequence[Tuple[str, str]], options: SwapOptions) -> Tuple[Path, List[str]]:
|
|
490
|
+
log: List[str] = []
|
|
491
|
+
if not source_path.exists():
|
|
492
|
+
raise FileNotFoundError(f"Source package not found: {source_path}")
|
|
493
|
+
if output_path.exists() and not options.overwrite:
|
|
494
|
+
raise FileExistsError(f"Output already exists: {output_path}")
|
|
495
|
+
|
|
496
|
+
temp_dir = script_dir() / "AssetSwapper_Decrypted"
|
|
497
|
+
temp_dir.mkdir(exist_ok=True)
|
|
498
|
+
|
|
499
|
+
log.append(f"Input source: {source_path}")
|
|
500
|
+
log.append(f"Output target: {output_path}")
|
|
501
|
+
log.append(f"Key source target: {key_source_path}")
|
|
502
|
+
|
|
503
|
+
resolved_path, package, provider, actual_keys_path, was_encrypted = resolve_with_optional_keys(upk, source_path, temp_dir, options.keys_path)
|
|
504
|
+
log.append(f"Resolved package: {resolved_path}")
|
|
505
|
+
if actual_keys_path:
|
|
506
|
+
log.append(f"Keys file: {actual_keys_path}")
|
|
507
|
+
log.append(f"Original offsets: {summary_line(package)}")
|
|
508
|
+
|
|
509
|
+
log.append("Name-table changes:")
|
|
510
|
+
for old, new in pairs:
|
|
511
|
+
log.append(f" {old!r} -> {new!r}")
|
|
512
|
+
|
|
513
|
+
modified, rename_log = apply_name_pairs(upk, package, pairs, options.preserve_header_offsets)
|
|
514
|
+
log.extend(rename_log)
|
|
515
|
+
log.append(f"Modified offsets: {summary_line(modified)}")
|
|
516
|
+
|
|
517
|
+
if output_path.exists() and options.overwrite:
|
|
518
|
+
backup_path = output_path.with_suffix(output_path.suffix + ".bak")
|
|
519
|
+
shutil.copy2(output_path, backup_path)
|
|
520
|
+
log.append(f"Backup written: {backup_path}")
|
|
521
|
+
|
|
522
|
+
build_output(upk, source_path, key_source_path, modified, provider, output_path, was_encrypted, log)
|
|
523
|
+
return output_path, log
|
|
524
|
+
|
|
525
|
+
|
|
526
|
+
def swap_asset(upk, target: Item, donor: Item, options: SwapOptions) -> Tuple[List[Path], List[str]]:
|
|
527
|
+
if target.slot != donor.slot:
|
|
528
|
+
raise ValueError(f"Slot mismatch: target={target.slot!r}, donor={donor.slot!r}")
|
|
529
|
+
key_dir = options.key_source_dir or options.donor_dir
|
|
530
|
+
all_paths: List[Path] = []
|
|
531
|
+
all_log: List[str] = []
|
|
532
|
+
all_log.append(f"Target/replaced item: {target.label}")
|
|
533
|
+
all_log.append(f"Donor/visual item: {donor.label}")
|
|
534
|
+
main_path, main_log = swap_one_package(
|
|
535
|
+
upk,
|
|
536
|
+
options.donor_dir / donor.asset_package,
|
|
537
|
+
options.output_dir / target.asset_package,
|
|
538
|
+
key_dir / target.asset_package,
|
|
539
|
+
infer_name_pairs(target, donor),
|
|
540
|
+
options,
|
|
541
|
+
)
|
|
542
|
+
all_paths.append(main_path)
|
|
543
|
+
all_log.extend(main_log)
|
|
544
|
+
|
|
545
|
+
if options.include_thumbnails:
|
|
546
|
+
donor_thumb = options.donor_dir / donor.thumbnail_package
|
|
547
|
+
target_thumb = options.output_dir / target.thumbnail_package
|
|
548
|
+
key_thumb = key_dir / target.thumbnail_package
|
|
549
|
+
if donor_thumb.exists() and key_thumb.exists():
|
|
550
|
+
all_log.append("")
|
|
551
|
+
all_log.append("Thumbnail/_T_SF pass:")
|
|
552
|
+
thumb_path, thumb_log = swap_one_package(upk, donor_thumb, target_thumb, key_thumb, infer_thumbnail_pairs(target, donor), options)
|
|
553
|
+
all_paths.append(thumb_path)
|
|
554
|
+
all_log.extend(thumb_log)
|
|
555
|
+
else:
|
|
556
|
+
all_log.append(f"SKIP thumbnails: missing {donor_thumb if not donor_thumb.exists() else key_thumb}")
|
|
557
|
+
else:
|
|
558
|
+
all_log.append("SKIP thumbnails: disabled.")
|
|
559
|
+
|
|
560
|
+
return all_paths, all_log
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
def revert_item(target: Item, options: SwapOptions) -> Tuple[List[Path], List[str]]:
|
|
564
|
+
src_dir = options.key_source_dir or options.donor_dir
|
|
565
|
+
paths: List[Path] = []
|
|
566
|
+
log: List[str] = []
|
|
567
|
+
pairs = [(src_dir / target.asset_package, options.output_dir / target.asset_package)]
|
|
568
|
+
if options.include_thumbnails:
|
|
569
|
+
pairs.append((src_dir / target.thumbnail_package, options.output_dir / target.thumbnail_package))
|
|
570
|
+
for src, dst in pairs:
|
|
571
|
+
if not src.exists():
|
|
572
|
+
log.append(f"MISS: revert source not found: {src}")
|
|
573
|
+
continue
|
|
574
|
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
575
|
+
if dst.exists() and options.overwrite:
|
|
576
|
+
backup_path = dst.with_suffix(dst.suffix + ".bak")
|
|
577
|
+
shutil.copy2(dst, backup_path)
|
|
578
|
+
log.append(f"Backup written: {backup_path}")
|
|
579
|
+
shutil.copy2(src, dst)
|
|
580
|
+
paths.append(dst)
|
|
581
|
+
log.append(f"Reverted: {src} -> {dst}")
|
|
582
|
+
return paths, log
|
|
583
|
+
|
|
584
|
+
|
|
585
|
+
class AssetSwapperApp:
|
|
586
|
+
def __init__(self, root: tk.Tk, args: Optional[argparse.Namespace] = None):
|
|
587
|
+
self.root = root
|
|
588
|
+
self.root.title("RL Asset Swapper")
|
|
589
|
+
self.root.geometry("1200x800")
|
|
590
|
+
self.upk = import_rl_upk_editor()
|
|
591
|
+
args = args or argparse.Namespace()
|
|
592
|
+
|
|
593
|
+
self.items_path = tk.StringVar(value=str(getattr(args, "items", None) or default_path(("items.json", "items(4).json"))))
|
|
594
|
+
self.keys_path = tk.StringVar(value=str(getattr(args, "keys", None) or default_path(("keys.txt", "keys(1).txt"))))
|
|
595
|
+
self.donor_dir = tk.StringVar(value=str(getattr(args, "donor_dir", "") or ""))
|
|
596
|
+
self.out_dir = tk.StringVar(value=str(getattr(args, "output_dir", "") or ""))
|
|
597
|
+
self.key_source_dir = tk.StringVar(value=str(getattr(args, "key_source_dir", "") or ""))
|
|
598
|
+
self.slot_var = tk.StringVar(value=str(getattr(args, "slot", "") or ""))
|
|
599
|
+
self.target_search = tk.StringVar(value=str(getattr(args, "target", "") or ""))
|
|
600
|
+
self.donor_search = tk.StringVar(value=str(getattr(args, "donor", "") or ""))
|
|
601
|
+
self.overwrite_var = tk.BooleanVar(value=bool(getattr(args, "overwrite", True)))
|
|
602
|
+
self.thumbnails_var = tk.BooleanVar(value=bool(getattr(args, "include_thumbnails", False)))
|
|
603
|
+
self.preserve_offsets_var = tk.BooleanVar(value=bool(getattr(args, "preserve_header_offsets", True)))
|
|
604
|
+
self.status_var = tk.StringVar(value="Load items.json, select folders, choose slot, then choose target and donor items.")
|
|
605
|
+
|
|
606
|
+
self.items: List[Item] = []
|
|
607
|
+
self.target_items: List[Item] = []
|
|
608
|
+
self.donor_items: List[Item] = []
|
|
609
|
+
self.worker_queue: queue.Queue = queue.Queue()
|
|
610
|
+
self.slot_values: List[str] = []
|
|
611
|
+
|
|
612
|
+
self.build_ui()
|
|
613
|
+
if Path(self.items_path.get()).exists():
|
|
614
|
+
self.reload_items()
|
|
615
|
+
self.root.after(100, self.poll_worker_queue)
|
|
616
|
+
|
|
617
|
+
def build_ui(self) -> None:
|
|
618
|
+
main = ttk.Frame(self.root, padding=8)
|
|
619
|
+
main.pack(fill="both", expand=True)
|
|
620
|
+
|
|
621
|
+
files = ttk.LabelFrame(main, text="Files")
|
|
622
|
+
files.pack(fill="x")
|
|
623
|
+
files.columnconfigure(1, weight=1)
|
|
624
|
+
files.columnconfigure(4, weight=1)
|
|
625
|
+
|
|
626
|
+
ttk.Label(files, text="items.json").grid(row=0, column=0, sticky="w", padx=4, pady=3)
|
|
627
|
+
ttk.Entry(files, textvariable=self.items_path).grid(row=0, column=1, sticky="ew", padx=4, pady=3)
|
|
628
|
+
ttk.Button(files, text="Browse", command=self.browse_items).grid(row=0, column=2, padx=4, pady=3)
|
|
629
|
+
ttk.Button(files, text="Reload", command=self.reload_items).grid(row=0, column=3, padx=4, pady=3)
|
|
630
|
+
ttk.Label(files, text="keys.txt").grid(row=0, column=4, sticky="e", padx=4, pady=3)
|
|
631
|
+
ttk.Entry(files, textvariable=self.keys_path, width=35).grid(row=0, column=5, sticky="ew", padx=4, pady=3)
|
|
632
|
+
ttk.Button(files, text="Browse", command=self.browse_keys).grid(row=0, column=6, padx=4, pady=3)
|
|
633
|
+
|
|
634
|
+
ttk.Label(files, text="Donor/input directory").grid(row=1, column=0, sticky="w", padx=4, pady=3)
|
|
635
|
+
ttk.Entry(files, textvariable=self.donor_dir).grid(row=1, column=1, columnspan=5, sticky="ew", padx=4, pady=3)
|
|
636
|
+
ttk.Button(files, text="Browse", command=self.browse_donor_dir).grid(row=1, column=6, padx=4, pady=3)
|
|
637
|
+
|
|
638
|
+
ttk.Label(files, text="Output directory").grid(row=2, column=0, sticky="w", padx=4, pady=3)
|
|
639
|
+
ttk.Entry(files, textvariable=self.out_dir).grid(row=2, column=1, columnspan=5, sticky="ew", padx=4, pady=3)
|
|
640
|
+
ttk.Button(files, text="Browse", command=self.browse_out_dir).grid(row=2, column=6, padx=4, pady=3)
|
|
641
|
+
|
|
642
|
+
ttk.Label(files, text="Key/revert source dir").grid(row=3, column=0, sticky="w", padx=4, pady=3)
|
|
643
|
+
ttk.Entry(files, textvariable=self.key_source_dir).grid(row=3, column=1, columnspan=5, sticky="ew", padx=4, pady=3)
|
|
644
|
+
ttk.Button(files, text="Browse", command=self.browse_key_source_dir).grid(row=3, column=6, padx=4, pady=3)
|
|
645
|
+
|
|
646
|
+
top = ttk.Frame(main)
|
|
647
|
+
top.pack(fill="x", pady=(8, 4))
|
|
648
|
+
ttk.Label(top, text="Slot").pack(side="left")
|
|
649
|
+
self.slot_combo = ttk.Combobox(top, textvariable=self.slot_var, state="readonly", width=36)
|
|
650
|
+
self.slot_combo.pack(side="left", padx=(6, 12))
|
|
651
|
+
self.slot_combo.bind("<<ComboboxSelected>>", lambda _e: self.refresh_lists(clear_selection=True))
|
|
652
|
+
ttk.Checkbutton(top, text="Also swap thumbnails/_T_SF", variable=self.thumbnails_var, command=self.update_preview).pack(side="left", padx=4)
|
|
653
|
+
ttk.Checkbutton(top, text="Preserve header offsets for shorter names", variable=self.preserve_offsets_var, command=self.update_preview).pack(side="left", padx=4)
|
|
654
|
+
ttk.Checkbutton(top, text="Overwrite + .bak", variable=self.overwrite_var).pack(side="left", padx=4)
|
|
655
|
+
ttk.Button(top, text="Revert selected target", command=self.start_revert).pack(side="right", padx=4)
|
|
656
|
+
ttk.Button(top, text="Swap", command=self.start_swap).pack(side="right", padx=4)
|
|
657
|
+
|
|
658
|
+
lists = ttk.Frame(main)
|
|
659
|
+
lists.pack(fill="both", expand=True)
|
|
660
|
+
lists.columnconfigure(0, weight=1)
|
|
661
|
+
lists.columnconfigure(1, weight=1)
|
|
662
|
+
lists.rowconfigure(2, weight=1)
|
|
663
|
+
|
|
664
|
+
ttk.Label(lists, text="Target item to replace").grid(row=0, column=0, sticky="w")
|
|
665
|
+
ttk.Label(lists, text="Replacement/donor item").grid(row=0, column=1, sticky="w")
|
|
666
|
+
|
|
667
|
+
ttk.Entry(lists, textvariable=self.target_search).grid(row=1, column=0, sticky="ew", padx=(0, 5), pady=(0, 4))
|
|
668
|
+
ttk.Entry(lists, textvariable=self.donor_search).grid(row=1, column=1, sticky="ew", padx=(5, 0), pady=(0, 4))
|
|
669
|
+
self.target_search.trace_add("write", lambda *_: self.refresh_target_list())
|
|
670
|
+
self.donor_search.trace_add("write", lambda *_: self.refresh_donor_list())
|
|
671
|
+
|
|
672
|
+
left = ttk.Frame(lists)
|
|
673
|
+
right = ttk.Frame(lists)
|
|
674
|
+
left.grid(row=2, column=0, sticky="nsew", padx=(0, 5))
|
|
675
|
+
right.grid(row=2, column=1, sticky="nsew", padx=(5, 0))
|
|
676
|
+
for frame in (left, right):
|
|
677
|
+
frame.rowconfigure(0, weight=1)
|
|
678
|
+
frame.columnconfigure(0, weight=1)
|
|
679
|
+
|
|
680
|
+
self.target_list = tk.Listbox(left, activestyle="dotbox", exportselection=False)
|
|
681
|
+
self.target_list.grid(row=0, column=0, sticky="nsew")
|
|
682
|
+
target_scroll = ttk.Scrollbar(left, orient="vertical", command=self.target_list.yview)
|
|
683
|
+
target_scroll.grid(row=0, column=1, sticky="ns")
|
|
684
|
+
self.target_list.configure(yscrollcommand=target_scroll.set)
|
|
685
|
+
self.target_list.bind("<<ListboxSelect>>", lambda _e: self.update_preview())
|
|
686
|
+
|
|
687
|
+
self.donor_list = tk.Listbox(right, activestyle="dotbox", exportselection=False)
|
|
688
|
+
self.donor_list.grid(row=0, column=0, sticky="nsew")
|
|
689
|
+
donor_scroll = ttk.Scrollbar(right, orient="vertical", command=self.donor_list.yview)
|
|
690
|
+
donor_scroll.grid(row=0, column=1, sticky="ns")
|
|
691
|
+
self.donor_list.configure(yscrollcommand=donor_scroll.set)
|
|
692
|
+
self.donor_list.bind("<<ListboxSelect>>", lambda _e: self.update_preview())
|
|
693
|
+
|
|
694
|
+
bottom = ttk.PanedWindow(main, orient="vertical")
|
|
695
|
+
bottom.pack(fill="both", expand=False, pady=(8, 0))
|
|
696
|
+
|
|
697
|
+
preview_frame = ttk.LabelFrame(bottom, text="Preview")
|
|
698
|
+
self.preview = tk.Text(preview_frame, height=7, wrap="none")
|
|
699
|
+
self.preview.pack(fill="both", expand=True)
|
|
700
|
+
bottom.add(preview_frame, weight=1)
|
|
701
|
+
|
|
702
|
+
log_frame = ttk.LabelFrame(bottom, text="Log")
|
|
703
|
+
self.log = tk.Text(log_frame, height=10, wrap="none")
|
|
704
|
+
self.log.pack(fill="both", expand=True)
|
|
705
|
+
bottom.add(log_frame, weight=1)
|
|
706
|
+
|
|
707
|
+
ttk.Label(main, textvariable=self.status_var, anchor="w").pack(fill="x", pady=(4, 0))
|
|
708
|
+
|
|
709
|
+
def browse_items(self) -> None:
|
|
710
|
+
path = filedialog.askopenfilename(title="Select items.json", filetypes=[("JSON", "*.json"), ("All files", "*.*")])
|
|
711
|
+
if path:
|
|
712
|
+
self.items_path.set(path)
|
|
713
|
+
self.reload_items()
|
|
714
|
+
|
|
715
|
+
def browse_keys(self) -> None:
|
|
716
|
+
path = filedialog.askopenfilename(title="Select keys.txt", filetypes=[("Text", "*.txt"), ("All files", "*.*")])
|
|
717
|
+
if path:
|
|
718
|
+
self.keys_path.set(path)
|
|
719
|
+
|
|
720
|
+
def browse_donor_dir(self) -> None:
|
|
721
|
+
path = filedialog.askdirectory(title="Select donor/input UPK directory")
|
|
722
|
+
if path:
|
|
723
|
+
self.donor_dir.set(path)
|
|
724
|
+
if not self.out_dir.get():
|
|
725
|
+
self.out_dir.set(path)
|
|
726
|
+
if not self.key_source_dir.get():
|
|
727
|
+
self.key_source_dir.set(path)
|
|
728
|
+
|
|
729
|
+
def browse_out_dir(self) -> None:
|
|
730
|
+
path = filedialog.askdirectory(title="Select output directory")
|
|
731
|
+
if path:
|
|
732
|
+
self.out_dir.set(path)
|
|
733
|
+
|
|
734
|
+
def browse_key_source_dir(self) -> None:
|
|
735
|
+
path = filedialog.askdirectory(title="Select key/revert source directory")
|
|
736
|
+
if path:
|
|
737
|
+
self.key_source_dir.set(path)
|
|
738
|
+
|
|
739
|
+
def reload_items(self) -> None:
|
|
740
|
+
try:
|
|
741
|
+
self.items = load_items(Path(self.items_path.get()))
|
|
742
|
+
slots = sorted({i.slot for i in self.items if i.slot})
|
|
743
|
+
self.slot_values = slots
|
|
744
|
+
self.slot_combo["values"] = slots
|
|
745
|
+
if slots and self.slot_var.get() not in slots:
|
|
746
|
+
self.slot_var.set(slots[0])
|
|
747
|
+
self.refresh_lists(clear_selection=True)
|
|
748
|
+
self.status_var.set(f"Loaded {len(self.items)} items. Slot filter is active.")
|
|
749
|
+
except Exception as exc:
|
|
750
|
+
messagebox.showerror("Failed to load items", str(exc))
|
|
751
|
+
|
|
752
|
+
def rows_for(self, text: str) -> List[Item]:
|
|
753
|
+
slot = self.slot_var.get()
|
|
754
|
+
q = text.strip().lower()
|
|
755
|
+
rows = [i for i in self.items if i.slot == slot] if slot else list(self.items)
|
|
756
|
+
if q:
|
|
757
|
+
rows = [
|
|
758
|
+
i for i in rows
|
|
759
|
+
if q in i.product.lower()
|
|
760
|
+
or q in i.asset_package.lower()
|
|
761
|
+
or q in i.asset_path.lower()
|
|
762
|
+
or q == str(i.id)
|
|
763
|
+
]
|
|
764
|
+
return rows
|
|
765
|
+
|
|
766
|
+
def refresh_lists(self, clear_selection: bool = False) -> None:
|
|
767
|
+
self.refresh_target_list(clear_selection=clear_selection)
|
|
768
|
+
self.refresh_donor_list(clear_selection=clear_selection)
|
|
769
|
+
self.update_preview()
|
|
770
|
+
|
|
771
|
+
def refresh_target_list(self, clear_selection: bool = False) -> None:
|
|
772
|
+
old_id = self.selected_target().id if self.selected_target() and not clear_selection else None
|
|
773
|
+
self.target_items = self.rows_for(self.target_search.get())
|
|
774
|
+
self.target_list.delete(0, tk.END)
|
|
775
|
+
restore = None
|
|
776
|
+
for idx, item in enumerate(self.target_items):
|
|
777
|
+
self.target_list.insert(tk.END, item.label)
|
|
778
|
+
if old_id is not None and item.id == old_id:
|
|
779
|
+
restore = idx
|
|
780
|
+
if restore is not None:
|
|
781
|
+
self.target_list.selection_set(restore)
|
|
782
|
+
self.target_list.see(restore)
|
|
783
|
+
|
|
784
|
+
def refresh_donor_list(self, clear_selection: bool = False) -> None:
|
|
785
|
+
old_id = self.selected_donor().id if self.selected_donor() and not clear_selection else None
|
|
786
|
+
self.donor_items = self.rows_for(self.donor_search.get())
|
|
787
|
+
self.donor_list.delete(0, tk.END)
|
|
788
|
+
restore = None
|
|
789
|
+
for idx, item in enumerate(self.donor_items):
|
|
790
|
+
self.donor_list.insert(tk.END, item.label)
|
|
791
|
+
if old_id is not None and item.id == old_id:
|
|
792
|
+
restore = idx
|
|
793
|
+
if restore is not None:
|
|
794
|
+
self.donor_list.selection_set(restore)
|
|
795
|
+
self.donor_list.see(restore)
|
|
796
|
+
|
|
797
|
+
def selected_target(self) -> Optional[Item]:
|
|
798
|
+
sel = self.target_list.curselection()
|
|
799
|
+
return self.target_items[sel[0]] if sel else None
|
|
800
|
+
|
|
801
|
+
def selected_donor(self) -> Optional[Item]:
|
|
802
|
+
sel = self.donor_list.curselection()
|
|
803
|
+
return self.donor_items[sel[0]] if sel else None
|
|
804
|
+
|
|
805
|
+
def update_preview(self) -> None:
|
|
806
|
+
target = self.selected_target()
|
|
807
|
+
donor = self.selected_donor()
|
|
808
|
+
self.preview.delete("1.0", tk.END)
|
|
809
|
+
if not target or not donor:
|
|
810
|
+
slot = self.slot_var.get() or "<none>"
|
|
811
|
+
self.preview.insert(tk.END, f"Slot filter: {slot}\nSelect a target item and a donor item.\n")
|
|
812
|
+
return
|
|
813
|
+
lines = [
|
|
814
|
+
f"Slot filter: {self.slot_var.get()}",
|
|
815
|
+
f"Output file: {target.asset_package}",
|
|
816
|
+
f"Input file: {donor.asset_package}",
|
|
817
|
+
f"Preserve shorter-name offsets: {self.preserve_offsets_var.get()}",
|
|
818
|
+
"",
|
|
819
|
+
"Main package replacements:",
|
|
820
|
+
]
|
|
821
|
+
for old, new in infer_name_pairs(target, donor):
|
|
822
|
+
lines.append(f" {old!r} -> {new!r}")
|
|
823
|
+
if self.thumbnails_var.get():
|
|
824
|
+
lines.append("")
|
|
825
|
+
lines.append(f"Thumbnail file: {donor.thumbnail_package} -> {target.thumbnail_package}")
|
|
826
|
+
for old, new in infer_thumbnail_pairs(target, donor):
|
|
827
|
+
lines.append(f" {old!r} -> {new!r}")
|
|
828
|
+
self.preview.insert(tk.END, "\n".join(lines) + "\n")
|
|
829
|
+
|
|
830
|
+
def make_options(self) -> SwapOptions:
|
|
831
|
+
if not self.donor_dir.get():
|
|
832
|
+
raise ValueError("Select donor/input directory")
|
|
833
|
+
if not self.out_dir.get():
|
|
834
|
+
raise ValueError("Select output directory")
|
|
835
|
+
keys = Path(self.keys_path.get()) if self.keys_path.get() else None
|
|
836
|
+
if keys and not keys.exists():
|
|
837
|
+
keys = None
|
|
838
|
+
key_source = Path(self.key_source_dir.get()) if self.key_source_dir.get() else None
|
|
839
|
+
return SwapOptions(
|
|
840
|
+
items_path=Path(self.items_path.get()),
|
|
841
|
+
keys_path=keys,
|
|
842
|
+
donor_dir=Path(self.donor_dir.get()),
|
|
843
|
+
output_dir=Path(self.out_dir.get()),
|
|
844
|
+
key_source_dir=key_source,
|
|
845
|
+
include_thumbnails=self.thumbnails_var.get(),
|
|
846
|
+
preserve_header_offsets=self.preserve_offsets_var.get(),
|
|
847
|
+
overwrite=self.overwrite_var.get(),
|
|
848
|
+
)
|
|
849
|
+
|
|
850
|
+
def append_log(self, text: str) -> None:
|
|
851
|
+
self.log.insert(tk.END, text.rstrip() + "\n")
|
|
852
|
+
self.log.see(tk.END)
|
|
853
|
+
|
|
854
|
+
def start_swap(self) -> None:
|
|
855
|
+
target = self.selected_target()
|
|
856
|
+
donor = self.selected_donor()
|
|
857
|
+
if not target or not donor:
|
|
858
|
+
messagebox.showwarning("Missing selection", "Select both a target item and a donor item.")
|
|
859
|
+
return
|
|
860
|
+
if target.slot != donor.slot:
|
|
861
|
+
messagebox.showerror("Slot mismatch", "Target and donor items must be from the same slot.")
|
|
862
|
+
return
|
|
863
|
+
try:
|
|
864
|
+
options = self.make_options()
|
|
865
|
+
except Exception as exc:
|
|
866
|
+
messagebox.showwarning("Missing input", str(exc))
|
|
867
|
+
return
|
|
868
|
+
self.log.delete("1.0", tk.END)
|
|
869
|
+
self.status_var.set("Working...")
|
|
870
|
+
threading.Thread(target=self.worker_swap, args=(target, donor, options), daemon=True).start()
|
|
871
|
+
|
|
872
|
+
def start_revert(self) -> None:
|
|
873
|
+
target = self.selected_target()
|
|
874
|
+
if not target:
|
|
875
|
+
messagebox.showwarning("Missing selection", "Select the target item to revert.")
|
|
876
|
+
return
|
|
877
|
+
try:
|
|
878
|
+
options = self.make_options()
|
|
879
|
+
except Exception as exc:
|
|
880
|
+
messagebox.showwarning("Missing input", str(exc))
|
|
881
|
+
return
|
|
882
|
+
self.log.delete("1.0", tk.END)
|
|
883
|
+
self.status_var.set("Reverting...")
|
|
884
|
+
threading.Thread(target=self.worker_revert, args=(target, options), daemon=True).start()
|
|
885
|
+
|
|
886
|
+
def worker_swap(self, target: Item, donor: Item, options: SwapOptions) -> None:
|
|
887
|
+
try:
|
|
888
|
+
paths, log = swap_asset(self.upk, target, donor, options)
|
|
889
|
+
self.worker_queue.put(("ok", paths, log))
|
|
890
|
+
except Exception as exc:
|
|
891
|
+
self.worker_queue.put(("err", str(exc), traceback.format_exc()))
|
|
892
|
+
|
|
893
|
+
def worker_revert(self, target: Item, options: SwapOptions) -> None:
|
|
894
|
+
try:
|
|
895
|
+
paths, log = revert_item(target, options)
|
|
896
|
+
self.worker_queue.put(("ok", paths, log))
|
|
897
|
+
except Exception as exc:
|
|
898
|
+
self.worker_queue.put(("err", str(exc), traceback.format_exc()))
|
|
899
|
+
|
|
900
|
+
def poll_worker_queue(self) -> None:
|
|
901
|
+
try:
|
|
902
|
+
while True:
|
|
903
|
+
kind, a, b = self.worker_queue.get_nowait()
|
|
904
|
+
if kind == "ok":
|
|
905
|
+
for line in b:
|
|
906
|
+
self.append_log(line)
|
|
907
|
+
self.status_var.set("Done: " + ", ".join(str(x) for x in a))
|
|
908
|
+
messagebox.showinfo("Complete", "Saved:\n" + "\n".join(str(x) for x in a))
|
|
909
|
+
else:
|
|
910
|
+
self.append_log(b)
|
|
911
|
+
self.status_var.set("Failed")
|
|
912
|
+
messagebox.showerror("Failed", a)
|
|
913
|
+
except queue.Empty:
|
|
914
|
+
pass
|
|
915
|
+
self.root.after(100, self.poll_worker_queue)
|
|
916
|
+
|
|
917
|
+
|
|
918
|
+
def build_arg_parser() -> argparse.ArgumentParser:
|
|
919
|
+
p = argparse.ArgumentParser()
|
|
920
|
+
p.add_argument("--items", type=Path, default=default_path(("items.json", "items(4).json")))
|
|
921
|
+
p.add_argument("--keys", type=Path, default=None)
|
|
922
|
+
p.add_argument("--donor-dir", "--upk-dir", "--input-dir", dest="donor_dir", type=Path, default=None)
|
|
923
|
+
p.add_argument("--output-dir", "--out-dir", dest="output_dir", type=Path, default=None)
|
|
924
|
+
p.add_argument("--key-source-dir", type=Path, default=None)
|
|
925
|
+
p.add_argument("--slot", default="")
|
|
926
|
+
p.add_argument("--target", default="")
|
|
927
|
+
p.add_argument("--donor", default="")
|
|
928
|
+
p.add_argument("--auto-swap", action="store_true")
|
|
929
|
+
p.add_argument("--no-gui", action="store_true")
|
|
930
|
+
p.add_argument("--revert", action="store_true")
|
|
931
|
+
thumbs = p.add_mutually_exclusive_group()
|
|
932
|
+
thumbs.add_argument("--include-thumbnails", dest="include_thumbnails", action="store_true", default=False)
|
|
933
|
+
thumbs.add_argument("--no-thumbnails", dest="include_thumbnails", action="store_false")
|
|
934
|
+
preserve = p.add_mutually_exclusive_group()
|
|
935
|
+
preserve.add_argument("--preserve-header-offsets", dest="preserve_header_offsets", action="store_true", default=True)
|
|
936
|
+
preserve.add_argument("--no-preserve-header-offsets", dest="preserve_header_offsets", action="store_false")
|
|
937
|
+
overwrite = p.add_mutually_exclusive_group()
|
|
938
|
+
overwrite.add_argument("--overwrite", dest="overwrite", action="store_true", default=True)
|
|
939
|
+
overwrite.add_argument("--no-overwrite", dest="overwrite", action="store_false")
|
|
940
|
+
return p
|
|
941
|
+
|
|
942
|
+
|
|
943
|
+
def cli_run(args: argparse.Namespace) -> int:
|
|
944
|
+
if not args.donor_dir or not args.output_dir:
|
|
945
|
+
raise SystemExit("--donor-dir and --output-dir are required for --no-gui/--auto-swap/--revert")
|
|
946
|
+
if args.revert and not args.target:
|
|
947
|
+
raise SystemExit("--target is required for --revert")
|
|
948
|
+
if not args.revert and (not args.target or not args.donor):
|
|
949
|
+
raise SystemExit("--target and --donor are required")
|
|
950
|
+
upk = import_rl_upk_editor()
|
|
951
|
+
items = load_items(args.items)
|
|
952
|
+
target = find_item(items, str(args.target), args.slot)
|
|
953
|
+
donor = find_item(items, str(args.donor), target.slot if not args.slot else args.slot) if args.donor else target
|
|
954
|
+
keys = args.keys
|
|
955
|
+
if keys is None:
|
|
956
|
+
for candidate in (script_dir() / "keys.txt", script_dir() / "keys(1).txt", Path.cwd() / "keys.txt", args.donor_dir / "keys.txt"):
|
|
957
|
+
if candidate.exists():
|
|
958
|
+
keys = candidate
|
|
959
|
+
break
|
|
960
|
+
options = SwapOptions(
|
|
961
|
+
items_path=args.items,
|
|
962
|
+
keys_path=keys,
|
|
963
|
+
donor_dir=args.donor_dir,
|
|
964
|
+
output_dir=args.output_dir,
|
|
965
|
+
key_source_dir=args.key_source_dir,
|
|
966
|
+
include_thumbnails=args.include_thumbnails,
|
|
967
|
+
preserve_header_offsets=args.preserve_header_offsets,
|
|
968
|
+
overwrite=args.overwrite,
|
|
969
|
+
)
|
|
970
|
+
if args.revert:
|
|
971
|
+
_, log = revert_item(target, options)
|
|
972
|
+
else:
|
|
973
|
+
_, log = swap_asset(upk, target, donor, options)
|
|
974
|
+
for line in log:
|
|
975
|
+
print(line)
|
|
976
|
+
return 0
|
|
977
|
+
|
|
978
|
+
|
|
979
|
+
def main() -> int:
|
|
980
|
+
parser = build_arg_parser()
|
|
981
|
+
args = parser.parse_args()
|
|
982
|
+
if args.no_gui or args.auto_swap or args.revert:
|
|
983
|
+
return cli_run(args)
|
|
984
|
+
root = tk.Tk()
|
|
985
|
+
AssetSwapperApp(root, args)
|
|
986
|
+
root.mainloop()
|
|
987
|
+
return 0
|
|
988
|
+
|
|
989
|
+
|
|
990
|
+
if __name__ == "__main__":
|
|
991
|
+
raise SystemExit(main())
|