rl-item-mod 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3859 @@
1
+ #!/usr/bin/env python3
2
+ import argparse
3
+ import base64
4
+ import concurrent.futures
5
+ import ctypes
6
+ import hashlib
7
+ import io
8
+ import os
9
+ import struct
10
+ import sys
11
+ import threading
12
+ import traceback
13
+ import zlib
14
+ from dataclasses import dataclass, field
15
+ import re
16
+ import zipfile
17
+ from pathlib import Path
18
+ from typing import BinaryIO, Dict, List, Optional, Tuple
19
+
20
+ import tkinter as tk
21
+ from tkinter import filedialog, messagebox, simpledialog, ttk
22
+
23
+ from cryptography.hazmat.backends import default_backend
24
+ from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
25
+
26
+ PACKAGE_FILE_TAG = 0x9E2A83C1
27
+ COMPRESS_NONE = 0x00
28
+ COMPRESS_ZLIB = 0x01
29
+ PKG_COOKED = 0x00000008
30
+ DEFAULT_KEY = bytes([
31
+ 0xC7, 0xDF, 0x6B, 0x13, 0x25, 0x2A, 0xCC, 0x71,
32
+ 0x47, 0xBB, 0x51, 0xC9, 0x8A, 0xD7, 0xE3, 0x4B,
33
+ 0x7F, 0xE5, 0x00, 0xB7, 0x7F, 0xA5, 0xFA, 0xB2,
34
+ 0x93, 0xE2, 0xF2, 0x4E, 0x6B, 0x17, 0xE7, 0x79,
35
+ ])
36
+ HEX_PREVIEW_LIMIT = 65536
37
+ COMPACT_INDEX_DEPRECATED = 178
38
+ NUMBER_ADDED_TO_NAME = 343
39
+ ENUM_NAME_ADDED_TO_BYTE_PROPERTY_TAG = 633
40
+ BOOL_VALUE_TO_BYTE_FOR_BOOL_PROPERTY_TAG = 673
41
+
42
+
43
+ class BinaryReader:
44
+ def __init__(self, fh: BinaryIO):
45
+ self.fh = fh
46
+
47
+ def tell(self) -> int:
48
+ return self.fh.tell()
49
+
50
+ def seek(self, offset: int, whence: int = os.SEEK_SET) -> int:
51
+ return self.fh.seek(offset, whence)
52
+
53
+ def read_exact(self, size: int) -> bytes:
54
+ data = self.fh.read(size)
55
+ if len(data) != size:
56
+ raise EOFError(f"Expected {size} bytes, got {len(data)}")
57
+ return data
58
+
59
+ def read_i32(self) -> int:
60
+ return struct.unpack("<i", self.read_exact(4))[0]
61
+
62
+ def read_u32(self) -> int:
63
+ return struct.unpack("<I", self.read_exact(4))[0]
64
+
65
+ def read_u64(self) -> int:
66
+ return struct.unpack("<Q", self.read_exact(8))[0]
67
+
68
+ def read_u16(self) -> int:
69
+ return struct.unpack("<H", self.read_exact(2))[0]
70
+
71
+ def read_i64(self) -> int:
72
+ return struct.unpack("<q", self.read_exact(8))[0]
73
+
74
+ def read_u8(self) -> int:
75
+ return struct.unpack("<B", self.read_exact(1))[0]
76
+
77
+ def read_i8(self) -> int:
78
+ return struct.unpack("<b", self.read_exact(1))[0]
79
+
80
+ def read_f32(self) -> float:
81
+ return struct.unpack("<f", self.read_exact(4))[0]
82
+
83
+ def remaining(self) -> int:
84
+ cur = self.tell()
85
+ self.seek(0, os.SEEK_END)
86
+ end = self.tell()
87
+ self.seek(cur)
88
+ return end - cur
89
+
90
+ def read_fstring(self) -> str:
91
+ length = self.read_i32()
92
+ if length == 0:
93
+ return ""
94
+ if length < 0:
95
+ char_count = -length
96
+ raw = self.read_exact(char_count * 2)
97
+ return raw[:-2].decode("utf-16-le", errors="ignore")
98
+ raw = self.read_exact(length - 1)
99
+ self.read_exact(1)
100
+ # UE3 positive length strings are ANSI/Windows-1252, not UTF-8
101
+ return raw.decode("windows-1252", errors="ignore")
102
+
103
+
104
+ @dataclass
105
+ class FNameRef:
106
+ name_index: int
107
+ instance_number: int
108
+
109
+
110
+ @dataclass
111
+ class NameEntry:
112
+ index: int
113
+ name: str
114
+ flags: int
115
+
116
+
117
+ @dataclass
118
+ class ImportEntry:
119
+ table_index: int
120
+ class_package: FNameRef
121
+ class_name: FNameRef
122
+ outer_index: int
123
+ object_name: FNameRef
124
+
125
+
126
+ @dataclass
127
+ class ExportEntry:
128
+ table_index: int
129
+ class_index: int
130
+ super_index: int
131
+ outer_index: int
132
+ object_name: FNameRef
133
+ archetype_index: int
134
+ object_flags: int
135
+ serial_size: int
136
+ serial_offset: int
137
+ export_flags: int
138
+ net_objects: List[int]
139
+ package_guid: Tuple[int, int, int, int]
140
+ package_flags: int
141
+
142
+
143
+ @dataclass
144
+ class FCompressedChunk:
145
+ uncompressed_offset: int
146
+ uncompressed_size: int
147
+ compressed_offset: int
148
+ compressed_size: int
149
+
150
+
151
+ @dataclass
152
+ class FileSummary:
153
+ tag: int = 0
154
+ file_version: int = 0
155
+ licensee_version: int = 0
156
+ total_header_size: int = 0
157
+ folder_name: str = ""
158
+ package_flags_flags_offset: int = 0
159
+ package_flags: int = 0
160
+ name_count: int = 0
161
+ name_offset: int = 0
162
+ export_count: int = 0
163
+ export_offset: int = 0
164
+ import_count: int = 0
165
+ import_offset: int = 0
166
+ depends_offset: int = 0
167
+ import_export_guids_offset: int = 0
168
+ import_guids_count: int = 0
169
+ export_guids_count: int = 0
170
+ thumbnail_table_offset: int = 0
171
+ guid: Tuple[int, int, int, int] = (0, 0, 0, 0)
172
+ generations: List[Tuple[int, int, int]] = field(default_factory=list)
173
+ engine_version: int = 0
174
+ cooker_version: int = 0
175
+ compression_flags_offset: int = 0
176
+ compression_flags: int = 0
177
+ compressed_chunks: List[FCompressedChunk] = field(default_factory=list)
178
+
179
+
180
+ @dataclass
181
+ class FileCompressionMetaData:
182
+ garbage_size: int
183
+ compressed_chunks_offset: int
184
+ last_block_size: int
185
+
186
+
187
+ @dataclass
188
+ class ParsedPackage:
189
+ file_path: Path
190
+ summary: FileSummary
191
+ names: List[NameEntry]
192
+ imports: List[ImportEntry]
193
+ exports: List[ExportEntry]
194
+ file_bytes: bytes
195
+
196
+ def object_data(self, export: ExportEntry) -> bytes:
197
+ start = export.serial_offset
198
+ end = start + export.serial_size
199
+ if start < 0 or end < start or end > len(self.file_bytes):
200
+ return b""
201
+ return self.file_bytes[start:end]
202
+
203
+ def resolve_name(self, ref: FNameRef) -> str:
204
+ if 0 <= ref.name_index < len(self.names):
205
+ base = self.names[ref.name_index].name
206
+ else:
207
+ base = f"<Name#{ref.name_index}>"
208
+ return f"{base}_{ref.instance_number}" if ref.instance_number > 0 else base
209
+
210
+ def resolve_object_ref(self, index: int) -> str:
211
+ if index == 0:
212
+ return "None"
213
+ if index > 0:
214
+ export_index = index - 1
215
+ if 0 <= export_index < len(self.exports):
216
+ exp = self.exports[export_index]
217
+ return f"Export[{export_index}] {self.resolve_name(exp.object_name)}"
218
+ return f"Export[{export_index}] <invalid>"
219
+ import_index = -index - 1
220
+ if 0 <= import_index < len(self.imports):
221
+ imp = self.imports[import_index]
222
+ return f"Import[{import_index}] {self.resolve_name(imp.object_name)}"
223
+ return f"Import[{import_index}] <invalid>"
224
+
225
+ def resolve_object_path(self, index: int, seen: Optional[set] = None) -> str:
226
+ if index == 0:
227
+ return "None"
228
+ if seen is None:
229
+ seen = set()
230
+ if index in seen:
231
+ return "<cycle>"
232
+ seen.add(index)
233
+ if index > 0:
234
+ exp = self.exports[index - 1]
235
+ name = self.resolve_name(exp.object_name)
236
+ if exp.outer_index == 0:
237
+ return name
238
+ return f"{self.resolve_object_path(exp.outer_index, seen)}.{name}"
239
+ imp = self.imports[-index - 1]
240
+ name = self.resolve_name(imp.object_name)
241
+ if imp.outer_index == 0:
242
+ return name
243
+ return f"{self.resolve_object_path(imp.outer_index, seen)}.{name}"
244
+
245
+ def export_class_name(self, export: ExportEntry) -> str:
246
+ if export.class_index == 0:
247
+ return "Class"
248
+ if export.class_index > 0:
249
+ target = self.exports[export.class_index - 1]
250
+ return self.resolve_name(target.object_name)
251
+ target = self.imports[-export.class_index - 1]
252
+ return self.resolve_name(target.object_name)
253
+
254
+ def is_placeholder_export(self, export: ExportEntry) -> bool:
255
+ # An export is a placeholder/garbage slot if its class is the meta
256
+ # 'Class' (class_index == 0), its name resolves to literal 'None'
257
+ # (name table index 0 in UE3), it has no outer, no serial body, and
258
+ # no flags set. UE Explorer filters these out of its class list using
259
+ # essentially the same predicate (ClassIndex == 0 && Name == 'None').
260
+ # We additionally require zero size/offset/flags to avoid false
261
+ # positives on rare native objects whose class index is 0.
262
+ if export.class_index != 0:
263
+ return False
264
+ name = self.resolve_name(export.object_name)
265
+ if name.lower() != "none":
266
+ return False
267
+ if export.outer_index != 0:
268
+ return False
269
+ if export.serial_size != 0 or export.serial_offset != 0:
270
+ return False
271
+ if export.object_flags != 0 or export.export_flags != 0:
272
+ return False
273
+ return True
274
+
275
+ def resolve_export_class_candidates(self, export: ExportEntry) -> List[str]:
276
+ raw = self.export_class_name(export)
277
+ candidates = [raw]
278
+ for prefix in ("A", "U", "F"):
279
+ candidates.append(f"{prefix}{raw}")
280
+ return candidates
281
+
282
+
283
+ @dataclass
284
+ class SDKField:
285
+ name: str
286
+ type_name: str
287
+ offset: int
288
+ size: int
289
+ owner: str
290
+
291
+
292
+ @dataclass
293
+ class SDKType:
294
+ name: str
295
+ kind: str
296
+ super_name: Optional[str]
297
+ fields: List[SDKField] = field(default_factory=list)
298
+
299
+
300
+ @dataclass
301
+ class ParsedProperty:
302
+ index: int
303
+ name: str
304
+ tag_type: str
305
+ size: int
306
+ array_index: int
307
+ tag_offset: int
308
+ value_offset: int
309
+ value: str
310
+ declared_type: str = "?"
311
+ owner_type: str = "?"
312
+ struct_name: Optional[str] = None
313
+ enum_name: Optional[str] = None
314
+ bool_value: Optional[bool] = None
315
+ raw_hex: str = ""
316
+
317
+
318
+ class RLSDKDatabase:
319
+ def __init__(self):
320
+ self.types: Dict[str, SDKType] = {}
321
+
322
+ def get_type(self, name: str) -> Optional[SDKType]:
323
+ if name in self.types:
324
+ return self.types[name]
325
+ for candidate in (name, f"A{name}", f"U{name}", f"F{name}"):
326
+ if candidate in self.types:
327
+ return self.types[candidate]
328
+ return None
329
+
330
+ def resolve_field(self, owner_name: str, field_name: str) -> Tuple[Optional[SDKField], Optional[str]]:
331
+ seen = set()
332
+ cur = self.get_type(owner_name)
333
+ while cur and cur.name not in seen:
334
+ seen.add(cur.name)
335
+ for field in cur.fields:
336
+ if field.name == field_name:
337
+ return field, cur.name
338
+ cur = self.get_type(cur.super_name) if cur.super_name else None
339
+ return None, None
340
+
341
+
342
+ def parse_rlsdk_database(zip_path: Path) -> RLSDKDatabase:
343
+ db = RLSDKDatabase()
344
+ class_re = re.compile(r"//\s+(?:Class|ScriptStruct)\s+[^\n]+\n//[^\n]*\n(?:class|struct)\s+(\w+)(?:\s*:\s*public\s+(\w+))?\s*\{(.*?)\n\};", re.S)
345
+ field_re = re.compile(r"^\s*(.+?)\s+(\w+)(?:\[[^\]]+\])?;\s*//\s*0x([0-9A-Fa-f]+)\s*\(0x([0-9A-Fa-f]+)\)", re.M)
346
+ with zipfile.ZipFile(zip_path) as zf:
347
+ for name in zf.namelist():
348
+ if not name.endswith(("_classes.hpp", "_structs.hpp")):
349
+ continue
350
+ text = zf.read(name).decode("utf-8", errors="ignore")
351
+ kind = "class" if name.endswith("_classes.hpp") else "struct"
352
+ for m in class_re.finditer(text):
353
+ type_name, super_name, body = m.groups()
354
+ sdk_type = db.types.get(type_name)
355
+ if sdk_type is None:
356
+ sdk_type = SDKType(name=type_name, kind=kind, super_name=super_name)
357
+ db.types[type_name] = sdk_type
358
+ else:
359
+ sdk_type.kind = kind
360
+ sdk_type.super_name = super_name
361
+ fields: List[SDKField] = []
362
+ for fm in field_re.finditer(body):
363
+ type_name_raw, field_name, offset_hex, size_hex = fm.groups()
364
+ fields.append(SDKField(
365
+ name=field_name,
366
+ type_name=" ".join(type_name_raw.split()),
367
+ offset=int(offset_hex, 16),
368
+ size=int(size_hex, 16),
369
+ owner=type_name,
370
+ ))
371
+ sdk_type.fields = fields
372
+ return db
373
+
374
+
375
+ COMMON_STRUCT_DECODERS = {
376
+ "FVector": lambda r: f"({r.read_f32():.6g}, {r.read_f32():.6g}, {r.read_f32():.6g})",
377
+ "FVector2D": lambda r: f"({r.read_f32():.6g}, {r.read_f32():.6g})",
378
+ "FRotator": lambda r: f"({r.read_i32()}, {r.read_i32()}, {r.read_i32()})",
379
+ "FColor": lambda r: f"RGBA({r.read_u8()}, {r.read_u8()}, {r.read_u8()}, {r.read_u8()})",
380
+ "FLinearColor": lambda r: f"({r.read_f32():.6g}, {r.read_f32():.6g}, {r.read_f32():.6g}, {r.read_f32():.6g})",
381
+ "FQuat": lambda r: f"({r.read_f32():.6g}, {r.read_f32():.6g}, {r.read_f32():.6g}, {r.read_f32():.6g})",
382
+ "FGuid": lambda r: f"{r.read_u32():08X}-{r.read_u32():08X}-{r.read_u32():08X}-{r.read_u32():08X}",
383
+ }
384
+
385
+
386
+ def parse_tarray_inner_type(type_name: str) -> Optional[str]:
387
+ m = re.search(r"TArray<(.+)>", type_name)
388
+ if not m:
389
+ return None
390
+ return " ".join(m.group(1).split())
391
+
392
+
393
+ def clean_cpp_type_name(type_name: str) -> str:
394
+ t = type_name.replace("class ", "").replace("struct ", "").strip()
395
+ return t.rstrip("*").strip()
396
+
397
+
398
+ def decode_name_ref(raw: bytes, package: ParsedPackage) -> str:
399
+ if not raw:
400
+ return ""
401
+ bio = io.BytesIO(raw)
402
+ r = BinaryReader(bio)
403
+ ref = read_fname_pkg(r, package)
404
+ return package.resolve_name(ref)
405
+
406
+
407
+ def decode_object_ref(raw: bytes, package: ParsedPackage) -> str:
408
+ if not raw:
409
+ return ""
410
+ bio = io.BytesIO(raw)
411
+ r = BinaryReader(bio)
412
+ index = read_index_pkg(r, package)
413
+ return f"{index} ({package.resolve_object_ref(index)})"
414
+
415
+
416
+ def decode_array_preview(raw: bytes, inner_type: Optional[str], package: ParsedPackage) -> str:
417
+ if len(raw) < 4:
418
+ return raw.hex(" ").upper()
419
+ bio = io.BytesIO(raw)
420
+ r = BinaryReader(bio)
421
+ count = read_index_pkg(r, package)
422
+ if count < 0:
423
+ return f"count={count} (invalid)"
424
+ if count == 0:
425
+ return "count=0"
426
+ if not inner_type:
427
+ return f"count={count}, data={raw[4:36].hex(' ').upper()}"
428
+ inner_clean = clean_cpp_type_name(inner_type)
429
+ preview = []
430
+ try:
431
+ for _ in range(min(count, 4)):
432
+ if inner_clean in ("int32_t", "INT", "DWORD") and r.remaining() >= 4:
433
+ preview.append(str(r.read_i32()))
434
+ elif inner_clean == "float" and r.remaining() >= 4:
435
+ preview.append(f"{r.read_f32():.6g}")
436
+ elif inner_clean in ("FName", "class FName") and r.remaining() >= 8:
437
+ preview.append(package.resolve_name(read_fname_pkg(r, package)))
438
+ elif inner_clean.startswith("U") and r.remaining() >= 4:
439
+ preview.append(package.resolve_object_ref(read_index_pkg(r, package)))
440
+ elif inner_clean in COMMON_STRUCT_DECODERS:
441
+ preview.append(COMMON_STRUCT_DECODERS[inner_clean](r))
442
+ else:
443
+ break
444
+ except Exception:
445
+ pass
446
+ if preview:
447
+ return f"count={count}, preview=[{', '.join(preview)}]"
448
+ return f"count={count}, data={raw[4:36].hex(' ').upper()}"
449
+
450
+
451
+ def decode_property_value(tag_type: str, raw: bytes, package: ParsedPackage, declared_type: str = "", struct_name: Optional[str] = None, enum_name: Optional[str] = None, bool_value: Optional[bool] = None) -> str:
452
+ try:
453
+ if tag_type == "BoolProperty":
454
+ if bool_value is not None:
455
+ return "True" if bool_value else "False"
456
+ if raw:
457
+ return "True" if raw[0] else "False"
458
+ return "False"
459
+ if tag_type == "IntProperty" and len(raw) >= 4:
460
+ return str(struct.unpack("<i", raw[:4])[0])
461
+ if tag_type == "FloatProperty" and len(raw) >= 4:
462
+ return f"{struct.unpack('<f', raw[:4])[0]:.6g}"
463
+ if tag_type in ("ObjectProperty", "ClassProperty", "ComponentProperty", "InterfaceProperty"):
464
+ return decode_object_ref(raw, package)
465
+ if tag_type == "NameProperty":
466
+ return decode_name_ref(raw, package)
467
+ if tag_type == "StrProperty":
468
+ return BinaryReader(io.BytesIO(raw)).read_fstring()
469
+ if tag_type == "ByteProperty":
470
+ if enum_name and len(raw) >= 8:
471
+ return decode_name_ref(raw, package)
472
+ if raw:
473
+ return str(raw[0])
474
+ if tag_type == "StructProperty":
475
+ if struct_name in COMMON_STRUCT_DECODERS:
476
+ return COMMON_STRUCT_DECODERS[struct_name](BinaryReader(io.BytesIO(raw)))
477
+ return f"{struct_name or '?'} ({len(raw)} bytes)"
478
+ if tag_type == "ArrayProperty":
479
+ return decode_array_preview(raw, parse_tarray_inner_type(declared_type), package)
480
+ if tag_type == "QWordProperty" and len(raw) >= 8:
481
+ return str(struct.unpack("<Q", raw[:8])[0])
482
+ if tag_type == "StringRefProperty" and len(raw) >= 4:
483
+ return str(struct.unpack("<I", raw[:4])[0])
484
+ if tag_type == "DelegateProperty":
485
+ if raw:
486
+ rr = BinaryReader(io.BytesIO(raw))
487
+ obj = read_index_pkg(rr, package)
488
+ func = package.resolve_name(read_fname_pkg(rr, package))
489
+ return f"obj={package.resolve_object_ref(obj)}, func={func}"
490
+ if raw:
491
+ return raw[:32].hex(" ").upper()
492
+ return ""
493
+ except Exception as exc:
494
+ return f"<decode error: {exc}>"
495
+
496
+
497
+ VALID_PROPERTY_TYPES = {
498
+ "ByteProperty", "IntProperty", "BoolProperty", "FloatProperty", "ObjectProperty",
499
+ "NameProperty", "DelegateProperty", "ClassProperty", "ArrayProperty", "StructProperty",
500
+ "VectorProperty", "RotatorProperty", "StrProperty", "MapProperty", "FixedArrayProperty",
501
+ "InterfaceProperty", "ComponentProperty", "QWordProperty", "PointerProperty",
502
+ "StringRefProperty", "BioMask4Property", "GuidProperty"
503
+ }
504
+
505
+
506
+ def _valid_name_ref(ref: FNameRef, package: ParsedPackage) -> bool:
507
+ return 0 <= ref.name_index < len(package.names) and ref.instance_number >= -1
508
+
509
+
510
+ def _parse_property_tag_at(package: ParsedPackage, raw: bytes, offset: int, index: int) -> Tuple[Optional[ParsedProperty], int, bool]:
511
+ if offset < 0 or offset + 8 > len(raw):
512
+ return None, offset, False
513
+ r = BinaryReader(io.BytesIO(raw))
514
+ r.seek(offset)
515
+ try:
516
+ tag_offset = offset
517
+ name_ref = read_fname_pkg(r, package)
518
+ if not _valid_name_ref(name_ref, package):
519
+ return None, offset, False
520
+ name = package.resolve_name(name_ref)
521
+ if name == "None":
522
+ return None, r.tell(), True
523
+
524
+ type_ref = read_fname_pkg(r, package)
525
+ if not _valid_name_ref(type_ref, package):
526
+ return None, offset, False
527
+ tag_type = package.resolve_name(type_ref)
528
+ if tag_type not in VALID_PROPERTY_TYPES:
529
+ return None, offset, False
530
+
531
+ size = r.read_i32()
532
+ array_index = r.read_i32()
533
+ if size < 0 or array_index < 0:
534
+ return None, offset, False
535
+
536
+ struct_name = None
537
+ enum_name = None
538
+ bool_value = None
539
+ declared_type = "?"
540
+
541
+ if tag_type == "StructProperty":
542
+ sref = read_fname_pkg(r, package)
543
+ if not _valid_name_ref(sref, package):
544
+ return None, offset, False
545
+ struct_name = package.resolve_name(sref)
546
+ declared_type = struct_name
547
+ elif tag_type == "ByteProperty":
548
+ if package.summary.file_version >= ENUM_NAME_ADDED_TO_BYTE_PROPERTY_TAG:
549
+ eref = read_fname_pkg(r, package)
550
+ if not _valid_name_ref(eref, package):
551
+ return None, offset, False
552
+ enum_name = package.resolve_name(eref)
553
+ declared_type = enum_name or "Byte"
554
+ else:
555
+ declared_type = "Byte"
556
+ elif tag_type == "ArrayProperty":
557
+ declared_type = "TArray"
558
+ elif tag_type == "BoolProperty":
559
+ if package.summary.file_version >= BOOL_VALUE_TO_BYTE_FOR_BOOL_PROPERTY_TAG:
560
+ bool_value = bool(r.read_u8())
561
+ declared_type = "bool"
562
+ else:
563
+ declared_type = {
564
+ "IntProperty": "int",
565
+ "FloatProperty": "float",
566
+ "ObjectProperty": "UObject*",
567
+ "ClassProperty": "UClass*",
568
+ "ComponentProperty": "UObject*",
569
+ "InterfaceProperty": "UObject*",
570
+ "NameProperty": "FName",
571
+ "StrProperty": "FString",
572
+ "DelegateProperty": "FScriptDelegate",
573
+ "QWordProperty": "uint64",
574
+ "PointerProperty": "pointer",
575
+ "StringRefProperty": "uint32",
576
+ "MapProperty": "TMap",
577
+ "FixedArrayProperty": "array",
578
+ "GuidProperty": "FGuid",
579
+ "BioMask4Property": "BioMask4",
580
+ }.get(tag_type, tag_type)
581
+
582
+ value_offset = r.tell()
583
+ if value_offset + size > len(raw):
584
+ return None, offset, False
585
+ value_raw = raw[value_offset:value_offset + size]
586
+ value = decode_property_value(tag_type, value_raw, package, declared_type, struct_name, enum_name, bool_value)
587
+ prop = ParsedProperty(
588
+ index=index,
589
+ name=name,
590
+ tag_type=tag_type,
591
+ size=size,
592
+ array_index=array_index,
593
+ tag_offset=tag_offset,
594
+ value_offset=value_offset,
595
+ value=value,
596
+ declared_type=declared_type,
597
+ owner_type="SerializedTag",
598
+ struct_name=struct_name,
599
+ enum_name=enum_name,
600
+ bool_value=bool_value,
601
+ raw_hex=value_raw[:64].hex(" ").upper(),
602
+ )
603
+ return prop, value_offset + size, False
604
+ except Exception:
605
+ return None, offset, False
606
+
607
+
608
+ def _try_parse_property_stream(package: ParsedPackage, raw: bytes, start_offset: int) -> Tuple[int, List[ParsedProperty], bool]:
609
+ props: List[ParsedProperty] = []
610
+ offset = start_offset
611
+ seen = set()
612
+ ended = False
613
+ for i in range(4096):
614
+ if offset in seen:
615
+ break
616
+ seen.add(offset)
617
+ prop, next_offset, hit_end = _parse_property_tag_at(package, raw, offset, i)
618
+ if hit_end:
619
+ ended = True
620
+ offset = next_offset
621
+ break
622
+ if prop is None:
623
+ break
624
+ props.append(prop)
625
+ offset = next_offset
626
+ return offset, props, ended
627
+
628
+
629
+ def _find_best_property_stream_offset(package: ParsedPackage, raw: bytes, class_type: Optional[SDKType] = None, sdk_db: Optional[RLSDKDatabase] = None) -> Tuple[int, List[ParsedProperty]]:
630
+ del class_type, sdk_db
631
+ if len(raw) < 24:
632
+ return 0, []
633
+
634
+ best_offset = 0
635
+ best_props: List[ParsedProperty] = []
636
+ best_score = -1
637
+ max_scan = max(0, len(raw) - 24)
638
+ for start in range(max_scan + 1):
639
+ name_index = struct.unpack_from('<i', raw, start)[0]
640
+ if not (0 <= name_index < len(package.names)):
641
+ continue
642
+ end_off, props, ended = _try_parse_property_stream(package, raw, start)
643
+ if not props:
644
+ continue
645
+ score = len(props) * 1000
646
+ if ended:
647
+ score += 250
648
+ score += min(end_off - start, 512)
649
+ score -= start
650
+ if score > best_score:
651
+ best_score = score
652
+ best_offset = start
653
+ best_props = props
654
+ return best_offset, best_props
655
+
656
+
657
+ def parse_serialized_properties(package: ParsedPackage, export: ExportEntry, sdk_db: Optional[RLSDKDatabase]) -> List[ParsedProperty]:
658
+ del sdk_db
659
+ raw = package.object_data(export)
660
+ if not raw:
661
+ return []
662
+ _, props = _find_best_property_stream_offset(package, raw, None, None)
663
+ return props
664
+
665
+ class DecryptionProvider:
666
+ def __init__(self, key_file_path: Optional[str] = None):
667
+ if key_file_path is None:
668
+ self.decryption_keys = [DEFAULT_KEY]
669
+ else:
670
+ if not os.path.exists(key_file_path):
671
+ raise FileNotFoundError(f"Failed to load the key file: {key_file_path}")
672
+ with open(key_file_path, "r", encoding="utf-8") as fh:
673
+ self.decryption_keys = [
674
+ base64.b64decode(line.strip())
675
+ for line in fh
676
+ if line.strip()
677
+ ]
678
+
679
+ @staticmethod
680
+ def decrypt_ecb(key: bytes, data: bytes) -> bytes:
681
+ cipher = Cipher(algorithms.AES(key), modes.ECB(), backend=default_backend())
682
+ decryptor = cipher.decryptor()
683
+ return decryptor.update(data) + decryptor.finalize()
684
+
685
+ @staticmethod
686
+ def encrypt_ecb(key: bytes, data: bytes) -> bytes:
687
+ cipher = Cipher(algorithms.AES(key), modes.ECB(), backend=default_backend())
688
+ encryptor = cipher.encryptor()
689
+ return encryptor.update(data) + encryptor.finalize()
690
+
691
+
692
+ def find_valid_key(encrypted_path: Path, provider: DecryptionProvider) -> Tuple[FileSummary, FileCompressionMetaData, bytes, bytes]:
693
+ with encrypted_path.open("rb") as src:
694
+ summary = parse_file_summary(src)
695
+ meta = parse_file_compression_metadata(src)
696
+ encrypted_size = summary.total_header_size - meta.garbage_size - summary.name_offset
697
+ if encrypted_size < 0:
698
+ raise ValueError(
699
+ f"Computed encrypted region size is negative ({encrypted_size}). "
700
+ f"summary.total_header_size={summary.total_header_size}, "
701
+ f"meta.garbage_size={meta.garbage_size}, "
702
+ f"summary.name_offset={summary.name_offset}. "
703
+ f"This usually indicates a corrupted or already-edited package header."
704
+ )
705
+ encrypted_size = (encrypted_size + 15) & ~15
706
+ src.seek(summary.name_offset)
707
+ encrypted_data = src.read(encrypted_size)
708
+ if len(encrypted_data) != encrypted_size:
709
+ raise ValueError(
710
+ f"Failed to read encrypted region: expected {encrypted_size} bytes "
711
+ f"at offset {summary.name_offset}, got {len(encrypted_data)} (file truncated?)"
712
+ )
713
+ for key in provider.decryption_keys:
714
+ if verify_decryptor(summary, meta, key, encrypted_data):
715
+ return summary, meta, encrypted_data, key
716
+ raise ValueError("Unknown Decryption key")
717
+
718
+
719
+ def serialize_rl_chunk_table(chunks: List[FCompressedChunk]) -> bytes:
720
+ out = bytearray()
721
+ out += struct.pack("<i", len(chunks))
722
+ for chunk in chunks:
723
+ out += struct.pack("<q", chunk.uncompressed_offset)
724
+ out += struct.pack("<i", chunk.uncompressed_size)
725
+ out += struct.pack("<q", chunk.compressed_offset)
726
+ out += struct.pack("<i", chunk.compressed_size)
727
+ return bytes(out)
728
+
729
+
730
+ def compress_chunk_payload(uncompressed: bytes, block_size: int = 0x20000, level: int = 6) -> bytes:
731
+ out = bytearray()
732
+ out += struct.pack("<I", PACKAGE_FILE_TAG)
733
+ out += struct.pack("<i", block_size)
734
+ blocks = []
735
+ total_compressed = 0
736
+ for i in range(0, len(uncompressed), block_size):
737
+ piece = uncompressed[i:i + block_size]
738
+ comp = zlib.compress(piece, level)
739
+ blocks.append((comp, len(piece)))
740
+ total_compressed += len(comp)
741
+ out += struct.pack("<ii", total_compressed, len(uncompressed))
742
+ for comp, uncomp_size in blocks:
743
+ out += struct.pack("<ii", len(comp), uncomp_size)
744
+ for comp, _ in blocks:
745
+ out += comp
746
+ return bytes(out)
747
+
748
+
749
+ def _find_file_compression_metadata_offsets(stream: BinaryIO) -> Dict[str, int]:
750
+ parse_file_summary(stream)
751
+ meta_offset = stream.tell()
752
+ r = BinaryReader(stream)
753
+ garbage_size_offset = meta_offset
754
+ r.read_i32()
755
+ compressed_chunks_offset_offset = stream.tell()
756
+ r.read_i32()
757
+ last_block_size_offset = stream.tell()
758
+ r.read_i32()
759
+ return {
760
+ "meta_offset": meta_offset,
761
+ "garbage_size_offset": garbage_size_offset,
762
+ "compressed_chunks_offset_offset": compressed_chunks_offset_offset,
763
+ "last_block_size_offset": last_block_size_offset,
764
+ }
765
+
766
+
767
+ def find_key_for_encrypted_upk(encrypted_path: Path, provider: DecryptionProvider) -> bytes:
768
+ """Return the first key from *provider* that successfully decrypts *encrypted_path*.
769
+
770
+ Raises ValueError if no key in the provider works.
771
+ """
772
+ _, _, _, key = find_valid_key(encrypted_path, provider)
773
+ return key
774
+
775
+
776
+ def build_reencrypted_package(original_encrypted_path: Path, modified_decrypted_bytes: bytes, provider: DecryptionProvider, output_path: Path, *, override_key: Optional[bytes] = None) -> Path:
777
+ summary, meta, original_encrypted_data, valid_key = find_valid_key(original_encrypted_path, provider)
778
+ # If the caller wants to encrypt with a different key (e.g. sourced from a
779
+ # donor encrypted UPK), use that key for the output instead of the key that
780
+ # was used to decrypt the original package.
781
+ if override_key is not None:
782
+ valid_key = override_key
783
+ modified_summary = parse_file_summary(io.BytesIO(modified_decrypted_bytes))
784
+ original_plain = bytearray(DecryptionProvider.decrypt_ecb(valid_key, original_encrypted_data))
785
+ original_chunks = parse_rl_compressed_chunks(bytes(original_plain), meta.compressed_chunks_offset)
786
+ if not original_chunks:
787
+ raise ValueError("No compressed chunks were found in original encrypted header")
788
+
789
+ new_chunk_table_offset = modified_summary.depends_offset - modified_summary.name_offset
790
+ patch_limit = max(0, new_chunk_table_offset)
791
+ chunk_shift = modified_summary.depends_offset - original_chunks[0].uncompressed_offset
792
+
793
+ rebuilt_chunks: List[FCompressedChunk] = []
794
+ rebuilt_chunk_payloads: List[bytes] = []
795
+ chunk_table_placeholder = serialize_rl_chunk_table([
796
+ FCompressedChunk(0, 0, 0, 0) for _ in original_chunks
797
+ ])
798
+ required_plain_len = new_chunk_table_offset + len(chunk_table_placeholder)
799
+ encrypted_plain_len = (required_plain_len + 15) & ~15
800
+ header_plain = bytearray(encrypted_plain_len)
801
+ copy_len = min(len(original_plain), encrypted_plain_len)
802
+ header_plain[:copy_len] = original_plain[:copy_len]
803
+
804
+ new_total_header_size = modified_summary.name_offset + encrypted_plain_len + meta.garbage_size
805
+ current_compressed_offset = new_total_header_size
806
+ for i, chunk in enumerate(original_chunks):
807
+ start = chunk.uncompressed_offset + chunk_shift
808
+ if i + 1 < len(original_chunks):
809
+ end = original_chunks[i + 1].uncompressed_offset + chunk_shift
810
+ if end > len(modified_decrypted_bytes):
811
+ raise ValueError("Modified decrypted package changed size too early for the rebuilt chunk layout")
812
+ else:
813
+ end = len(modified_decrypted_bytes)
814
+ if end < start:
815
+ raise ValueError("Invalid rebuilt chunk bounds")
816
+ payload = compress_chunk_payload(modified_decrypted_bytes[start:end])
817
+ rebuilt_chunk_payloads.append(payload)
818
+ rebuilt_chunks.append(FCompressedChunk(
819
+ uncompressed_offset=start,
820
+ uncompressed_size=end - start,
821
+ compressed_offset=current_compressed_offset,
822
+ compressed_size=len(payload),
823
+ ))
824
+ current_compressed_offset += len(payload)
825
+
826
+ if patch_limit > len(header_plain):
827
+ raise ValueError("Modified decrypted header exceeds encrypted header capacity")
828
+ if patch_limit > 0:
829
+ header_plain[:patch_limit] = modified_decrypted_bytes[summary.name_offset:modified_summary.depends_offset]
830
+
831
+ chunk_table = serialize_rl_chunk_table(rebuilt_chunks)
832
+ table_end = new_chunk_table_offset + len(chunk_table)
833
+ if table_end > len(header_plain):
834
+ raise ValueError("Rebuilt compressed chunk table does not fit inside encrypted header")
835
+ header_plain[new_chunk_table_offset:table_end] = chunk_table
836
+ encrypted_header = DecryptionProvider.encrypt_ecb(valid_key, bytes(header_plain))
837
+
838
+ original_bytes = Path(original_encrypted_path).read_bytes()
839
+ prefix = bytearray(original_bytes[:summary.name_offset])
840
+ summary_offsets = _find_summary_offsets(modified_decrypted_bytes)
841
+ patch_i32_le(prefix, summary_offsets["total_header_size_offset"], new_total_header_size)
842
+ patch_i32_le(prefix, summary_offsets["name_count_offset"], modified_summary.name_count)
843
+ patch_i32_le(prefix, summary_offsets["name_offset_offset"], modified_summary.name_offset)
844
+ patch_i32_le(prefix, summary_offsets["export_count_offset"], modified_summary.export_count)
845
+ patch_i32_le(prefix, summary_offsets["export_offset_offset"], modified_summary.export_offset)
846
+ patch_i32_le(prefix, summary_offsets["import_count_offset"], modified_summary.import_count)
847
+ patch_i32_le(prefix, summary_offsets["import_offset_offset"], modified_summary.import_offset)
848
+ patch_i32_le(prefix, summary_offsets["depends_offset_offset"], modified_summary.depends_offset)
849
+ patch_i32_le(prefix, summary_offsets["import_export_guids_offset_offset"], modified_summary.import_export_guids_offset)
850
+ if "thumbnail_table_offset_offset" in summary_offsets:
851
+ patch_i32_le(prefix, summary_offsets["thumbnail_table_offset_offset"], modified_summary.thumbnail_table_offset)
852
+ _patch_generation_counts(prefix, summary_offsets, modified_summary.export_count, modified_summary.name_count)
853
+ with original_encrypted_path.open("rb") as src:
854
+ meta_offsets = _find_file_compression_metadata_offsets(src)
855
+ patch_i32_le(prefix, meta_offsets["compressed_chunks_offset_offset"], new_chunk_table_offset)
856
+ if rebuilt_chunks:
857
+ patch_i32_le(prefix, meta_offsets["last_block_size_offset"], rebuilt_chunks[-1].uncompressed_size)
858
+
859
+ output = bytearray()
860
+ output += prefix
861
+ output += encrypted_header
862
+ gap_start = modified_summary.name_offset + len(encrypted_header)
863
+ original_gap_start = summary.name_offset + len(original_encrypted_data)
864
+ original_gap_end = original_chunks[0].compressed_offset
865
+ gap_bytes = original_bytes[original_gap_start:original_gap_end]
866
+ if len(gap_bytes) != meta.garbage_size:
867
+ gap_bytes = original_bytes[original_gap_end - meta.garbage_size:original_gap_end]
868
+ output += gap_bytes
869
+ for payload in rebuilt_chunk_payloads:
870
+ output += payload
871
+
872
+ output_path.parent.mkdir(parents=True, exist_ok=True)
873
+ output_path.write_bytes(output)
874
+ return output_path
875
+
876
+
877
+ def _pack_fname_value(package: ParsedPackage, text: str) -> bytes:
878
+ text = text.strip()
879
+ # Allow either "#<index>" to pick a name table entry by raw index, or a
880
+ # plain base/base_<N> string to match by name. Instance suffixes are
881
+ # split off so users can write things like "Foo_3" and have it round-trip
882
+ # through the version-aware serialize_fname adjustment.
883
+ base_text, instance_number = _split_name_instance(text)
884
+ match = None
885
+ if base_text.startswith("#"):
886
+ try:
887
+ idx = int(base_text[1:])
888
+ if 0 <= idx < len(package.names):
889
+ match = package.names[idx]
890
+ except Exception:
891
+ pass
892
+ if match is None:
893
+ for entry in package.names:
894
+ if entry.name == base_text:
895
+ match = entry
896
+ break
897
+ if match is None:
898
+ # Fall back to the original full string (for the legacy case where a
899
+ # name literally contained '_<digits>').
900
+ for entry in package.names:
901
+ if entry.name == text:
902
+ match = entry
903
+ instance_number = 0
904
+ break
905
+ if match is None:
906
+ raise ValueError(f"FName not found in package name table: {text}")
907
+ # instance_number == 0 from _split_name_instance means "no suffix typed",
908
+ # which corresponds to in-memory -1 for >= NUMBER_ADDED_TO_NAME packages
909
+ # (so serialize_fname writes 0 on disk). Translate appropriately.
910
+ if package.summary.file_version >= NUMBER_ADDED_TO_NAME and instance_number == 0:
911
+ in_memory_instance = -1
912
+ else:
913
+ in_memory_instance = instance_number
914
+ return serialize_fname(FNameRef(match.index, in_memory_instance), package.summary)
915
+
916
+
917
+ def _parse_struct_numbers(text: str) -> List[float]:
918
+ parts = [p.strip() for p in text.replace("(", "").replace(")", "").split(",") if p.strip()]
919
+ return [float(p) for p in parts]
920
+
921
+
922
+
923
+ def get_export_entry_offsets(package: ParsedPackage) -> List[int]:
924
+ bio = io.BytesIO(package.file_bytes)
925
+ bio.seek(package.summary.export_offset)
926
+ r = BinaryReader(bio)
927
+ offsets: List[int] = []
928
+ generation_count = len(package.summary.generations)
929
+ for _ in range(package.summary.export_count):
930
+ offsets.append(bio.tell())
931
+ parse_export_entry(r, len(offsets) - 1, generation_count, package.summary)
932
+ return offsets
933
+
934
+
935
+ def patch_i32_le(data: bytearray, offset: int, value: int) -> None:
936
+ data[offset:offset + 4] = struct.pack("<i", value)
937
+
938
+
939
+ def patch_i64_le(data: bytearray, offset: int, value: int) -> None:
940
+ data[offset:offset + 8] = struct.pack("<q", value)
941
+
942
+
943
+ def apply_property_edit_bytes(package: ParsedPackage, export: ExportEntry, prop: ParsedProperty, text: str) -> bytes:
944
+ rel_offset, replacement = encode_property_value(package, prop, text)
945
+ abs_offset = export.serial_offset + rel_offset
946
+ size_delta = len(replacement) - prop.size
947
+ target_offset = prop.value_offset - 1 if prop.tag_type == "BoolProperty" and prop.bool_value is not None else prop.value_offset
948
+
949
+ if size_delta == 0:
950
+ data = bytearray(package.file_bytes)
951
+ data[abs_offset:abs_offset + len(replacement)] = replacement
952
+ return bytes(data)
953
+
954
+ if prop.tag_type != "StrProperty":
955
+ raise ValueError("Variable-size edits are currently supported for StrProperty only")
956
+
957
+ if export.serial_offset < 0 or export.serial_size < 0:
958
+ raise ValueError("Invalid export serial bounds")
959
+ export_end = export.serial_offset + export.serial_size
960
+ if export_end > len(package.file_bytes):
961
+ raise ValueError("Export serial data exceeds package size")
962
+
963
+ value_abs_offset = export.serial_offset + target_offset
964
+ tag_size_abs_offset = export.serial_offset + prop.tag_offset + 16
965
+
966
+ new_data = bytearray()
967
+ new_data += package.file_bytes[:value_abs_offset]
968
+ new_data += replacement
969
+ new_data += package.file_bytes[value_abs_offset + prop.size:]
970
+
971
+ export_entry_offsets = get_export_entry_offsets(package)
972
+ if export.table_index >= len(export_entry_offsets):
973
+ raise ValueError("Export table index out of range")
974
+
975
+ entry_offset = export_entry_offsets[export.table_index]
976
+ patch_i32_le(new_data, tag_size_abs_offset, len(replacement))
977
+ patch_i32_le(new_data, entry_offset + 32, export.serial_size + size_delta)
978
+
979
+ export_shift_point = export.serial_offset
980
+ for idx, other in enumerate(package.exports):
981
+ if idx == export.table_index:
982
+ continue
983
+ if other.serial_offset > export_shift_point:
984
+ other_entry_offset = export_entry_offsets[idx]
985
+ patch_i64_le(new_data, other_entry_offset + 36, other.serial_offset + size_delta)
986
+
987
+ return bytes(new_data)
988
+
989
+
990
+ def encode_property_value(package: ParsedPackage, prop: ParsedProperty, text: str) -> Tuple[int, bytes]:
991
+ text = text.strip()
992
+ if text.lower().startswith("hex:"):
993
+ raw = bytes.fromhex(text[4:].strip())
994
+ target_offset = prop.value_offset - 1 if prop.tag_type == "BoolProperty" and prop.bool_value is not None else prop.value_offset
995
+ expected = 1 if prop.tag_type == "BoolProperty" and prop.bool_value is not None else prop.size
996
+ if len(raw) != expected:
997
+ raise ValueError(f"hex payload must be exactly {expected} bytes")
998
+ return target_offset, raw
999
+ if prop.tag_type == "BoolProperty":
1000
+ v = text.lower()
1001
+ if v in ("1", "true", "yes", "on"):
1002
+ return prop.value_offset - 1, b""
1003
+ if v in ("0", "false", "no", "off"):
1004
+ return prop.value_offset - 1, b"\x00"
1005
+ raise ValueError("BoolProperty expects true/false")
1006
+ if prop.tag_type == "IntProperty":
1007
+ return prop.value_offset, struct.pack("<i", int(text, 0))
1008
+ if prop.tag_type == "FloatProperty":
1009
+ return prop.value_offset, struct.pack("<f", float(text))
1010
+ if prop.tag_type == "QWordProperty":
1011
+ return prop.value_offset, struct.pack("<Q", int(text, 0))
1012
+ if prop.tag_type == "StringRefProperty":
1013
+ return prop.value_offset, struct.pack("<I", int(text, 0))
1014
+ if prop.tag_type in ("ObjectProperty", "ClassProperty", "ComponentProperty", "InterfaceProperty"):
1015
+ resolved = resolve_object_index_by_text(package, text)
1016
+ if resolved is None:
1017
+ raise ValueError("Object reference not found in exports/imports; use an index like -12 or a full object path")
1018
+ return prop.value_offset, struct.pack("<i", resolved)
1019
+ if prop.tag_type == "NameProperty":
1020
+ return prop.value_offset, _pack_fname_value(package, text)
1021
+ if prop.tag_type == "ByteProperty":
1022
+ if prop.enum_name:
1023
+ return prop.value_offset, _pack_fname_value(package, text)
1024
+ return prop.value_offset, struct.pack("<B", int(text, 0) & 0xFF)
1025
+ if prop.tag_type == "StrProperty":
1026
+ encoded = write_fstring_bytes(text)
1027
+ return prop.value_offset, encoded
1028
+ if prop.tag_type == "StructProperty":
1029
+ if prop.struct_name == "FVector":
1030
+ vals = _parse_struct_numbers(text)
1031
+ if len(vals) != 3:
1032
+ raise ValueError("FVector expects x,y,z")
1033
+ return prop.value_offset, struct.pack("<fff", *vals)
1034
+ if prop.struct_name == "FVector2D":
1035
+ vals = _parse_struct_numbers(text)
1036
+ if len(vals) != 2:
1037
+ raise ValueError("FVector2D expects x,y")
1038
+ return prop.value_offset, struct.pack("<ff", *vals)
1039
+ if prop.struct_name == "FRotator":
1040
+ vals = [int(v) for v in _parse_struct_numbers(text)]
1041
+ if len(vals) != 3:
1042
+ raise ValueError("FRotator expects pitch,yaw,roll")
1043
+ return prop.value_offset, struct.pack("<iii", *vals)
1044
+ if prop.struct_name == "FColor":
1045
+ vals = [int(v) for v in _parse_struct_numbers(text)]
1046
+ if len(vals) != 4:
1047
+ raise ValueError("FColor expects r,g,b,a")
1048
+ return prop.value_offset, bytes(v & 0xFF for v in vals)
1049
+ if prop.struct_name == "FLinearColor":
1050
+ vals = _parse_struct_numbers(text)
1051
+ if len(vals) != 4:
1052
+ raise ValueError("FLinearColor expects r,g,b,a")
1053
+ return prop.value_offset, struct.pack("<ffff", *vals)
1054
+ if prop.struct_name == "FGuid":
1055
+ cleaned = text.replace('-', '').replace('{', '').replace('}', '').strip()
1056
+ if len(cleaned) != 32:
1057
+ raise ValueError("FGuid expects 32 hex digits or a dashed guid")
1058
+ vals = [int(cleaned[i:i+8], 16) for i in range(0, 32, 8)]
1059
+ return prop.value_offset, struct.pack("<IIII", *vals)
1060
+ raise ValueError(f"Editing is not implemented for {prop.tag_type}")
1061
+
1062
+ def write_fstring_bytes(text: str) -> bytes:
1063
+ if not text:
1064
+ return struct.pack('<i', 0)
1065
+ try:
1066
+ # If it's pure ASCII, serialize as 1-byte ANSI (positive length)
1067
+ encoded = text.encode('ascii') + b'\x00'
1068
+ return struct.pack('<i', len(encoded)) + encoded
1069
+ except UnicodeEncodeError:
1070
+ # If it contains non-ASCII characters, serialize as 2-byte UTF-16LE (negative length)
1071
+ encoded = text.encode('utf-16-le') + b'\x00\x00'
1072
+ char_count = len(text) + 1
1073
+ return struct.pack('<i', -char_count) + encoded
1074
+
1075
+ def serialize_fname(ref: FNameRef, summary: Optional["FileSummary"] = None) -> bytes:
1076
+ # Mirror the version-aware adjustment in read_fname: for >= NUMBER_ADDED_TO_NAME
1077
+ # the on-disk stored value is (instance_number + 1), so we add 1 here.
1078
+ # When summary is None (legacy callers), fall back to writing the raw
1079
+ # in-memory value, which preserves prior behaviour for any code path
1080
+ # that hasn't been threaded through.
1081
+ if summary is not None and summary.file_version >= NUMBER_ADDED_TO_NAME:
1082
+ stored_instance = ref.instance_number + 1
1083
+ else:
1084
+ stored_instance = ref.instance_number
1085
+ return struct.pack("<ii", ref.name_index, stored_instance)
1086
+
1087
+
1088
+ def serialize_name_entry(entry: NameEntry) -> bytes:
1089
+ return write_fstring_bytes(entry.name) + struct.pack("<Q", entry.flags)
1090
+
1091
+
1092
+ def serialize_import_entry(entry: ImportEntry, summary: Optional["FileSummary"] = None) -> bytes:
1093
+ return b"".join([
1094
+ serialize_fname(entry.class_package, summary),
1095
+ serialize_fname(entry.class_name, summary),
1096
+ struct.pack("<i", entry.outer_index),
1097
+ serialize_fname(entry.object_name, summary),
1098
+ ])
1099
+
1100
+
1101
+ def serialize_export_entry(entry: ExportEntry, summary: Optional["FileSummary"] = None) -> bytes:
1102
+ out = bytearray()
1103
+ out += struct.pack("<i", entry.class_index)
1104
+ out += struct.pack("<i", entry.super_index)
1105
+ out += struct.pack("<i", entry.outer_index)
1106
+ out += serialize_fname(entry.object_name, summary)
1107
+ out += struct.pack("<i", entry.archetype_index)
1108
+ out += struct.pack("<Q", entry.object_flags)
1109
+ out += struct.pack("<i", entry.serial_size)
1110
+ out += struct.pack("<q", entry.serial_offset)
1111
+ out += struct.pack("<i", entry.export_flags)
1112
+ out += struct.pack("<i", len(entry.net_objects))
1113
+ for value in entry.net_objects:
1114
+ out += struct.pack("<i", value)
1115
+ out += struct.pack("<IIII", *entry.package_guid)
1116
+ out += struct.pack("<i", entry.package_flags)
1117
+ return bytes(out)
1118
+
1119
+
1120
+ def _find_summary_offsets(data: bytes) -> Dict[str, int]:
1121
+ bio = io.BytesIO(data)
1122
+ r = BinaryReader(bio)
1123
+ if r.read_u32() != PACKAGE_FILE_TAG:
1124
+ raise ValueError("Not a valid Unreal Engine package")
1125
+ r.read_u16()
1126
+ r.read_u16()
1127
+ total_header_size_offset = bio.tell()
1128
+ r.read_i32()
1129
+ r.read_fstring()
1130
+ package_flags_offset = bio.tell()
1131
+ r.read_u32()
1132
+ name_count_offset = bio.tell()
1133
+ r.read_i32()
1134
+ name_offset_offset = bio.tell()
1135
+ r.read_i32()
1136
+ export_count_offset = bio.tell()
1137
+ r.read_i32()
1138
+ export_offset_offset = bio.tell()
1139
+ r.read_i32()
1140
+ import_count_offset = bio.tell()
1141
+ r.read_i32()
1142
+ import_offset_offset = bio.tell()
1143
+ r.read_i32()
1144
+ depends_offset_offset = bio.tell()
1145
+ r.read_i32()
1146
+ import_export_guids_offset_offset = bio.tell()
1147
+ r.read_i32()
1148
+ r.read_i32()
1149
+ r.read_i32()
1150
+ thumbnail_table_offset_offset = bio.tell()
1151
+ r.read_i32()
1152
+ read_guid(r)
1153
+ generations_count_offset = bio.tell()
1154
+ gen_count = r.read_i32()
1155
+ generation_entries_offset = bio.tell()
1156
+ return {
1157
+ "total_header_size_offset": total_header_size_offset,
1158
+ "package_flags_offset": package_flags_offset,
1159
+ "name_count_offset": name_count_offset,
1160
+ "name_offset_offset": name_offset_offset,
1161
+ "export_count_offset": export_count_offset,
1162
+ "export_offset_offset": export_offset_offset,
1163
+ "import_count_offset": import_count_offset,
1164
+ "import_offset_offset": import_offset_offset,
1165
+ "depends_offset_offset": depends_offset_offset,
1166
+ "import_export_guids_offset_offset": import_export_guids_offset_offset,
1167
+ "thumbnail_table_offset_offset": thumbnail_table_offset_offset,
1168
+ "generations_count_offset": generations_count_offset,
1169
+ "generation_entries_offset": generation_entries_offset,
1170
+ "generation_count": gen_count,
1171
+ }
1172
+
1173
+
1174
+ def _patch_generation_counts(data: bytearray, offsets: Dict[str, int], export_count: int, name_count: int) -> None:
1175
+ gen_count = offsets.get("generation_count", 0)
1176
+ if gen_count <= 0:
1177
+ return
1178
+ base = offsets["generation_entries_offset"] + (gen_count - 1) * 12
1179
+ if base + 8 > len(data):
1180
+ return
1181
+ patch_i32_le(data, base, export_count)
1182
+ patch_i32_le(data, base + 4, name_count)
1183
+
1184
+
1185
+ def _replace_header_tables(package: ParsedPackage, names: List[NameEntry], imports: List[ImportEntry]) -> bytes:
1186
+ summary = package.summary
1187
+ offsets = _find_summary_offsets(package.file_bytes)
1188
+ old_depends_offset = summary.depends_offset
1189
+
1190
+ prefix = bytearray(package.file_bytes[:summary.name_offset])
1191
+ patched_exports: List[ExportEntry] = []
1192
+ for x in package.exports:
1193
+ patched_exports.append(ExportEntry(
1194
+ table_index=x.table_index,
1195
+ class_index=x.class_index,
1196
+ super_index=x.super_index,
1197
+ outer_index=x.outer_index,
1198
+ object_name=FNameRef(x.object_name.name_index, x.object_name.instance_number),
1199
+ archetype_index=x.archetype_index,
1200
+ object_flags=x.object_flags,
1201
+ serial_size=x.serial_size,
1202
+ serial_offset=x.serial_offset,
1203
+ export_flags=x.export_flags,
1204
+ net_objects=list(x.net_objects),
1205
+ package_guid=x.package_guid,
1206
+ package_flags=x.package_flags,
1207
+ ))
1208
+
1209
+ names_blob = b"".join(serialize_name_entry(x) for x in names)
1210
+ imports_blob = b"".join(serialize_import_entry(x, summary) for x in imports)
1211
+ export_offset = summary.name_offset + len(names_blob) + len(imports_blob)
1212
+ depends_offset = export_offset + sum(len(serialize_export_entry(x, summary)) for x in patched_exports)
1213
+ delta = depends_offset - old_depends_offset
1214
+
1215
+ if delta != 0:
1216
+ for exp in patched_exports:
1217
+ if exp.serial_offset >= old_depends_offset:
1218
+ exp.serial_offset += delta
1219
+
1220
+ exports_blob = b"".join(serialize_export_entry(x, summary) for x in patched_exports)
1221
+ depends_offset = export_offset + len(exports_blob)
1222
+ delta = depends_offset - old_depends_offset
1223
+
1224
+ header_blob = prefix + names_blob + imports_blob + exports_blob
1225
+ patch_i32_le(header_blob, offsets["name_count_offset"], len(names))
1226
+ patch_i32_le(header_blob, offsets["name_offset_offset"], summary.name_offset)
1227
+ patch_i32_le(header_blob, offsets["export_count_offset"], len(patched_exports))
1228
+ patch_i32_le(header_blob, offsets["export_offset_offset"], export_offset)
1229
+ patch_i32_le(header_blob, offsets["import_count_offset"], len(imports))
1230
+ patch_i32_le(header_blob, offsets["import_offset_offset"], summary.name_offset + len(names_blob))
1231
+ patch_i32_le(header_blob, offsets["depends_offset_offset"], depends_offset)
1232
+
1233
+ import_export_guids_offset = summary.import_export_guids_offset
1234
+ if import_export_guids_offset >= old_depends_offset and import_export_guids_offset != 0:
1235
+ import_export_guids_offset += delta
1236
+ patch_i32_le(header_blob, offsets["import_export_guids_offset_offset"], import_export_guids_offset)
1237
+
1238
+ thumbnail_table_offset = summary.thumbnail_table_offset
1239
+ if thumbnail_table_offset >= old_depends_offset and thumbnail_table_offset != 0:
1240
+ thumbnail_table_offset += delta
1241
+ if "thumbnail_table_offset_offset" in offsets:
1242
+ patch_i32_le(header_blob, offsets["thumbnail_table_offset_offset"], thumbnail_table_offset)
1243
+
1244
+ # NOTE: total_header_size is intentionally written back UNCHANGED. In a
1245
+ # decrypted RL package this field carries over the value from the original
1246
+ # encrypted file (unpack_package copies the encrypted prefix verbatim into
1247
+ # the decrypted output and never adjusts this field). The encrypted-save
1248
+ # path (build_reencrypted_package) computes its own correct value from
1249
+ # name_offset + encrypted_plain_len + garbage_size and patches it
1250
+ # independently, so the value we write here only matters for
1251
+ # 'Save Decrypted UPK' where preserving the original-encrypted semantics
1252
+ # is the right behaviour. An earlier attempt to "fix" this by adding the
1253
+ # names+imports growth delta produced corrupt encrypted files because the
1254
+ # delta concept doesn't apply to the encrypted-layout meaning of this field.
1255
+ patch_i32_le(header_blob, offsets["total_header_size_offset"], summary.total_header_size)
1256
+ _patch_generation_counts(header_blob, offsets, len(patched_exports), len(names))
1257
+
1258
+ new_data = bytearray()
1259
+ new_data += header_blob
1260
+ new_data += package.file_bytes[old_depends_offset:]
1261
+ return bytes(new_data)
1262
+
1263
+
1264
+ def _split_name_instance(text: str) -> Tuple[str, int]:
1265
+ if '_' in text:
1266
+ base, suffix = text.rsplit('_', 1)
1267
+ if suffix.isdigit():
1268
+ return base, int(suffix)
1269
+ return text, 0
1270
+
1271
+
1272
+ def _find_existing_name_ref(names: List[NameEntry], text: str) -> Optional[FNameRef]:
1273
+ base, instance = _split_name_instance(text)
1274
+ for entry in names:
1275
+ if entry.name == base:
1276
+ return FNameRef(entry.index, instance)
1277
+ return None
1278
+
1279
+
1280
+ def _ensure_name_entry(names: List[NameEntry], text: str, flags: int = 0) -> FNameRef:
1281
+ found = _find_existing_name_ref(names, text)
1282
+ if found is not None:
1283
+ return found
1284
+ base, instance = _split_name_instance(text)
1285
+ names.append(NameEntry(index=len(names), name=base, flags=flags))
1286
+ return FNameRef(len(names) - 1, instance)
1287
+
1288
+
1289
+ def import_donor_names(package: ParsedPackage, donor_package: ParsedPackage, selected_names: Optional[List[str]] = None) -> ParsedPackage:
1290
+ names = [NameEntry(index=n.index, name=n.name, flags=n.flags) for n in package.names]
1291
+ wanted = None if not selected_names else set(selected_names)
1292
+ added = 0
1293
+ for entry in donor_package.names:
1294
+ if wanted is not None and entry.name not in wanted:
1295
+ continue
1296
+ if _find_existing_name_ref(names, entry.name) is None:
1297
+ names.append(NameEntry(index=len(names), name=entry.name, flags=entry.flags))
1298
+ added += 1
1299
+ if added == 0:
1300
+ result = ParsedPackage(package.file_path, package.summary, names, package.imports, package.exports, package.file_bytes)
1301
+ setattr(result, '_merge_added_names', 0)
1302
+ return result
1303
+ patched = _replace_header_tables(package, names, package.imports)
1304
+ temp_path = package.file_path.with_name(package.file_path.stem + '_names_merged.upk')
1305
+ temp_path.write_bytes(patched)
1306
+ result = parse_decrypted_package(temp_path)
1307
+ setattr(result, '_merge_added_names', added)
1308
+ return result
1309
+
1310
+
1311
+ def _collect_existing_import_paths(package: ParsedPackage) -> Dict[str, int]:
1312
+ out: Dict[str, int] = {}
1313
+ for i in range(len(package.imports)):
1314
+ out[package.resolve_object_path(-(i + 1))] = -(i + 1)
1315
+ return out
1316
+
1317
+
1318
+ def _class_package_and_name_for_ref(package: ParsedPackage, class_index: int) -> Tuple[str, str]:
1319
+ if class_index == 0:
1320
+ return "Core", "Class"
1321
+ path = package.resolve_object_path(class_index)
1322
+ parts = [p for p in path.split('.') if p and p != 'None']
1323
+ if not parts:
1324
+ return "Core", "Class"
1325
+ if len(parts) == 1:
1326
+ return "Core", parts[-1]
1327
+ return parts[-2], parts[-1]
1328
+
1329
+
1330
+ def _derive_donor_package_name(donor_package: ParsedPackage, override: Optional[str] = None) -> str:
1331
+ """Return the package name UE will use to LoadPackage the donor at runtime.
1332
+
1333
+ Priority:
1334
+ 1. Explicit override from the caller (e.g. user typed it in).
1335
+ 2. The donor file's stem (e.g. 'MyDonorAssets.upk' -> 'MyDonorAssets').
1336
+ This is what the engine resolves through its package search paths,
1337
+ so it must match how the file is actually deployed in the game's
1338
+ cooked content directory.
1339
+ 3. The donor's embedded summary.folder_name. Often empty in cooked
1340
+ RL packages but used as a last resort.
1341
+
1342
+ Raises ValueError if no usable name can be derived.
1343
+ """
1344
+ if override and override.strip():
1345
+ return override.strip()
1346
+ stem = donor_package.file_path.stem
1347
+ # Strip our own '_decrypted' / '_decompressed' suffixes that resolve_input_package
1348
+ # appends when it produces a working copy - the file the game loads has the
1349
+ # original stem.
1350
+ for suffix in ("_decrypted", "_decompressed"):
1351
+ if stem.endswith(suffix):
1352
+ stem = stem[: -len(suffix)]
1353
+ break
1354
+ if stem:
1355
+ return stem
1356
+ folder = (donor_package.summary.folder_name or "").strip()
1357
+ if folder:
1358
+ return folder
1359
+ raise ValueError("Could not determine donor package name; pass it explicitly")
1360
+
1361
+
1362
+ def merge_donor_exports_as_imports(target_package: ParsedPackage, donor_package: ParsedPackage, donor_package_name: Optional[str] = None) -> ParsedPackage:
1363
+ # The donor's package name is what the engine will look up at runtime to
1364
+ # locate and LoadPackage the donor .upk. Every donor export we re-import
1365
+ # MUST be rooted under a Core.Package import with this name, otherwise
1366
+ # the engine has no way to know which file to open to resolve the
1367
+ # reference. Previously donor root exports were imported with
1368
+ # outer_index=0 (i.e. as if they themselves were top-level packages),
1369
+ # which left the engine unable to resolve them.
1370
+ resolved_donor_name = _derive_donor_package_name(donor_package, donor_package_name)
1371
+
1372
+ names = [NameEntry(index=n.index, name=n.name, flags=n.flags) for n in target_package.names]
1373
+ imports = [ImportEntry(table_index=i, class_package=FNameRef(x.class_package.name_index, x.class_package.instance_number), class_name=FNameRef(x.class_name.name_index, x.class_name.instance_number), outer_index=x.outer_index, object_name=FNameRef(x.object_name.name_index, x.object_name.instance_number)) for i, x in enumerate(target_package.imports)]
1374
+ existing_paths = _collect_existing_import_paths(target_package)
1375
+ donor_cache: Dict[int, int] = {}
1376
+
1377
+ def ensure_package_root(package_name: str) -> int:
1378
+ existing = existing_paths.get(package_name)
1379
+ if existing is not None:
1380
+ return existing
1381
+ cp = _ensure_name_entry(names, 'Core')
1382
+ cn = _ensure_name_entry(names, 'Package')
1383
+ on = _ensure_name_entry(names, package_name)
1384
+ imports.append(ImportEntry(len(imports), cp, cn, 0, on))
1385
+ idx = -len(imports)
1386
+ existing_paths[package_name] = idx
1387
+ return idx
1388
+
1389
+ # Pre-create the donor package import up front. Even if no donor exports
1390
+ # ended up needing it (e.g. all collisions with existing imports), having
1391
+ # this entry guarantees the engine will attempt to load the donor file
1392
+ # when the target is loaded, which is what users typically want when
1393
+ # they "import donor exports".
1394
+ donor_root_index = ensure_package_root(resolved_donor_name)
1395
+
1396
+ def ensure_donor_object(index: int) -> int:
1397
+ if index == 0:
1398
+ return 0
1399
+ if index in donor_cache:
1400
+ return donor_cache[index]
1401
+ path = donor_package.resolve_object_path(index)
1402
+ # When matching against existing target imports, prepend the donor
1403
+ # package name so a donor export "Foo" doesn't collide with an
1404
+ # unrelated existing import literally named "Foo". For donor
1405
+ # imports we keep the original path because those refer to the same
1406
+ # external packages (Engine, Core, etc.) the target may also
1407
+ # reference, and we WANT to share those.
1408
+ scoped_path = f"{resolved_donor_name}.{path}" if index > 0 else path
1409
+ if scoped_path in existing_paths:
1410
+ donor_cache[index] = existing_paths[scoped_path]
1411
+ return existing_paths[scoped_path]
1412
+ if index > 0:
1413
+ obj = donor_package.exports[index - 1]
1414
+ obj_name = donor_package.resolve_name(obj.object_name)
1415
+ outer_index = ensure_donor_object(obj.outer_index) if obj.outer_index else 0
1416
+ if outer_index == 0:
1417
+ # Root donor export: parent it to the donor package import so
1418
+ # the engine knows to LoadPackage(donor_name) to resolve it.
1419
+ outer_index = donor_root_index
1420
+ class_pkg_name, class_name_name = _class_package_and_name_for_ref(donor_package, obj.class_index)
1421
+ else:
1422
+ obj = donor_package.imports[-index - 1]
1423
+ obj_name = donor_package.resolve_name(obj.object_name)
1424
+ outer_index = ensure_donor_object(obj.outer_index) if obj.outer_index else 0
1425
+ class_pkg_name = donor_package.resolve_name(obj.class_package)
1426
+ class_name_name = donor_package.resolve_name(obj.class_name)
1427
+ cp = _ensure_name_entry(names, class_pkg_name)
1428
+ cn = _ensure_name_entry(names, class_name_name)
1429
+ on = _ensure_name_entry(names, obj_name)
1430
+ imports.append(ImportEntry(len(imports), cp, cn, outer_index, on))
1431
+ new_index = -len(imports)
1432
+ donor_cache[index] = new_index
1433
+ existing_paths[scoped_path] = new_index
1434
+ return new_index
1435
+
1436
+ imported = 0
1437
+ for i in range(1, len(donor_package.exports) + 1):
1438
+ before = len(imports)
1439
+ ensure_donor_object(i)
1440
+ if len(imports) != before:
1441
+ imported += 1
1442
+
1443
+ patched = _replace_header_tables(target_package, names, imports)
1444
+ result = parse_decrypted_package_bytes(target_package.file_path, patched)
1445
+ setattr(result, '_merge_added_imports', len(imports) - len(target_package.imports))
1446
+ setattr(result, '_merge_added_names', len(names) - len(target_package.names))
1447
+ setattr(result, '_merge_donor_export_count', len(donor_package.exports))
1448
+ setattr(result, '_merge_donor_package_name', resolved_donor_name)
1449
+ return result
1450
+
1451
+
1452
+
1453
+ def replace_export_with_donor_export(target_package: ParsedPackage, donor_package: ParsedPackage, target_export_path: str, donor_export_path: str) -> ParsedPackage:
1454
+ merged = import_donor_names(target_package, donor_package, None)
1455
+ merged = merge_donor_exports_as_imports(merged, donor_package)
1456
+
1457
+ target_index = resolve_object_index_by_text(merged, target_export_path)
1458
+ donor_index = resolve_object_index_by_text(donor_package, donor_export_path)
1459
+ if target_index is None or target_index <= 0:
1460
+ raise ValueError(f"Target export not found: {target_export_path}")
1461
+ if donor_index is None or donor_index <= 0:
1462
+ raise ValueError(f"Donor export not found: {donor_export_path}")
1463
+
1464
+ target_export = merged.exports[target_index - 1]
1465
+ donor_export = donor_package.exports[donor_index - 1]
1466
+
1467
+ target_class = merged.export_class_name(target_export)
1468
+ donor_class = donor_package.export_class_name(donor_export)
1469
+ if target_class != donor_class:
1470
+ raise ValueError(f"Class mismatch: target is {target_class}, donor is {donor_class}")
1471
+
1472
+ donor_bytes = donor_package.object_data(donor_export)
1473
+ if not donor_bytes:
1474
+ raise ValueError("Donor export has no serial data")
1475
+
1476
+ size_delta = len(donor_bytes) - target_export.serial_size
1477
+ new_data = bytearray()
1478
+ new_data += merged.file_bytes[:target_export.serial_offset]
1479
+ new_data += donor_bytes
1480
+ new_data += merged.file_bytes[target_export.serial_offset + target_export.serial_size:]
1481
+
1482
+ export_entry_offsets = get_export_entry_offsets(merged)
1483
+ entry_offset = export_entry_offsets[target_index - 1]
1484
+ patch_i32_le(new_data, entry_offset + 32, len(donor_bytes))
1485
+
1486
+ for idx, other in enumerate(merged.exports):
1487
+ if idx == target_index - 1:
1488
+ continue
1489
+ if other.serial_offset > target_export.serial_offset:
1490
+ other_entry_offset = export_entry_offsets[idx]
1491
+ patch_i64_le(new_data, other_entry_offset + 36, other.serial_offset + size_delta)
1492
+
1493
+ result = parse_decrypted_package_bytes(merged.file_path, bytes(new_data))
1494
+ setattr(result, '_replace_target_export_path', target_export_path)
1495
+ setattr(result, '_replace_donor_export_path', donor_export_path)
1496
+ setattr(result, '_replace_note', 'Raw donor serial data copied into target export. Name/index remapping inside arbitrary native data is not performed.')
1497
+ return result
1498
+
1499
+
1500
+ def rename_export_fname(package: ParsedPackage, export: ExportEntry, new_name_text: str) -> ParsedPackage:
1501
+ """Rename the FName (object_name) of a single export.
1502
+
1503
+ Accepts either a bare base name ("MyName") or a base+instance form ("MyName_3").
1504
+ If the base name already exists in the package's name table, the export entry
1505
+ is patched in place (8 bytes at object_name field). If the base name is new,
1506
+ it is appended to the name table via _replace_header_tables, and the export's
1507
+ FName field is then patched to point at the newly added name.
1508
+ """
1509
+ new_name_text = (new_name_text or "").strip()
1510
+ if not new_name_text:
1511
+ raise ValueError("Empty FName")
1512
+ base, instance = _split_name_instance(new_name_text)
1513
+ if not base:
1514
+ raise ValueError("Empty base name")
1515
+ if instance < 0:
1516
+ raise ValueError("Instance number must be >= 0")
1517
+
1518
+ # Locate where this export's entry sits inside the export table so we can
1519
+ # patch the 8-byte object_name field in place. Layout of an export entry:
1520
+ # class_index (i32) | super_index (i32) | outer_index (i32) |
1521
+ # object_name (i32 name_index + i32 instance_number) | ...
1522
+ # so object_name starts at entry_offset + 12.
1523
+ export_entry_offsets = get_export_entry_offsets(package)
1524
+ if export.table_index < 0 or export.table_index >= len(export_entry_offsets):
1525
+ raise ValueError("Export table index out of range")
1526
+ fname_field_abs = export_entry_offsets[export.table_index] + 12
1527
+
1528
+ # The user-typed instance number is the "displayed" value (0 for no
1529
+ # suffix, 3 for _3, etc.). On disk for >= NUMBER_ADDED_TO_NAME the
1530
+ # stored value is (instance + 1), so we add 1 here. For older versions
1531
+ # the stored value equals the displayed value.
1532
+ if package.summary.file_version >= NUMBER_ADDED_TO_NAME:
1533
+ stored_instance = instance + 1 if instance > 0 else 0
1534
+ else:
1535
+ stored_instance = instance
1536
+
1537
+ # Try to reuse an existing name first.
1538
+ existing_idx: Optional[int] = None
1539
+ for entry in package.names:
1540
+ if entry.name == base:
1541
+ existing_idx = entry.index
1542
+ break
1543
+
1544
+ if existing_idx is not None:
1545
+ # Fast path: in-place 8-byte patch of the export entry's FName field.
1546
+ new_data = bytearray(package.file_bytes)
1547
+ new_data[fname_field_abs:fname_field_abs + 8] = struct.pack("<ii", existing_idx, stored_instance)
1548
+ result = parse_decrypted_package_bytes(package.file_path, bytes(new_data))
1549
+ setattr(result, '_rename_added_names', 0)
1550
+ setattr(result, '_rename_new_name', new_name_text)
1551
+ setattr(result, '_rename_export_index', export.table_index)
1552
+ return result
1553
+
1554
+ # Slow path: append a new name entry and rebuild the header tables. The
1555
+ # rebuild may shift the depends_offset and any export serial_offsets that
1556
+ # come after it, but the entries inside the export table itself stay at the
1557
+ # same relative positions because _replace_header_tables preserves their
1558
+ # order. So entry_offsets of the rebuilt file equal new_export_offset +
1559
+ # (old_offset - old_export_offset).
1560
+ names = [NameEntry(index=n.index, name=n.name, flags=n.flags) for n in package.names]
1561
+ new_entry_index = len(names)
1562
+ names.append(NameEntry(index=new_entry_index, name=base, flags=0))
1563
+
1564
+ rebuilt_bytes = bytearray(_replace_header_tables(package, names, package.imports))
1565
+
1566
+ # Recompute export entry offsets in the rebuilt file (their positions can
1567
+ # shift because the names table grew) and patch the FName field.
1568
+ rebuilt_pkg = parse_decrypted_package_bytes(package.file_path, bytes(rebuilt_bytes))
1569
+ new_offsets = get_export_entry_offsets(rebuilt_pkg)
1570
+ if export.table_index >= len(new_offsets):
1571
+ raise ValueError("Export entry not present after header rebuild")
1572
+ new_fname_field_abs = new_offsets[export.table_index] + 12
1573
+ rebuilt_bytes[new_fname_field_abs:new_fname_field_abs + 8] = struct.pack("<ii", new_entry_index, stored_instance)
1574
+
1575
+ result = parse_decrypted_package_bytes(package.file_path, bytes(rebuilt_bytes))
1576
+ setattr(result, '_rename_added_names', 1)
1577
+ setattr(result, '_rename_new_name', new_name_text)
1578
+ setattr(result, '_rename_export_index', export.table_index)
1579
+ return result
1580
+
1581
+
1582
+ def rename_name_entry(package: ParsedPackage, name_index: int, new_text: str) -> ParsedPackage:
1583
+ """Rewrite the text of a single entry in the package's name table.
1584
+
1585
+ The name's *index* stays the same, only the string changes. Because every
1586
+ FNameRef across the package (exports, imports, serialized property tags,
1587
+ object names, etc.) references names by index, all references continue to
1588
+ resolve correctly and now read the new text.
1589
+
1590
+ The names blob length almost always changes (different string length, or
1591
+ ANSI vs UTF-16 encoding), so the entire header is rebuilt via
1592
+ _replace_header_tables. That helper recomputes name_offset-following
1593
+ offsets (import/export/depends/thumbnail/import-export-guids) and shifts
1594
+ every export.serial_offset by the resulting delta, so the package stays
1595
+ internally consistent. total_header_size is preserved by the rebuild
1596
+ helper's offset patching - the bytes after the header are taken verbatim
1597
+ from the original file at old_depends_offset.
1598
+
1599
+ Args:
1600
+ package: The package to modify.
1601
+ name_index: Zero-based index into package.names of the entry to rename.
1602
+ new_text: New text for the name entry. Must be a bare base name (no
1603
+ "_<N>" instance suffix - instance numbers live on FNameRefs, not
1604
+ on name table entries).
1605
+
1606
+ Raises:
1607
+ ValueError: If name_index is out of range, new_text is empty, new_text
1608
+ contains an instance suffix, or new_text already exists elsewhere
1609
+ in the name table (which would create a duplicate base name and
1610
+ ambiguous lookups).
1611
+
1612
+ Returns:
1613
+ A re-parsed ParsedPackage with attributes:
1614
+ _name_rename_index: int - index that was renamed
1615
+ _name_rename_old: str - previous text
1616
+ _name_rename_new: str - new text
1617
+ _name_size_delta: int - bytes added (+) or removed (-) by the
1618
+ rename, useful for status reporting
1619
+ """
1620
+ new_text = (new_text or "").strip()
1621
+ if not new_text:
1622
+ raise ValueError("Empty name text")
1623
+ if name_index < 0 or name_index >= len(package.names):
1624
+ raise ValueError(f"Name index {name_index} out of range (0..{len(package.names) - 1})")
1625
+
1626
+ # Reject instance suffixes - those belong on FNameRefs, not entries. A
1627
+ # name table entry is a pure base string; entries like "Foo_3" only happen
1628
+ # if the original asset really had a literal underscore-digit base name.
1629
+ base, instance = _split_name_instance(new_text)
1630
+ if instance != 0:
1631
+ raise ValueError(
1632
+ "Name entries cannot include an instance suffix like '_3'. "
1633
+ "Instance numbers live on each FName reference, not on the "
1634
+ "name table entry. Use the bare base name (e.g. 'MyName')."
1635
+ )
1636
+
1637
+ old_entry = package.names[name_index]
1638
+ if old_entry.name == new_text:
1639
+ # No-op: re-parse so callers always get a fresh ParsedPackage with the
1640
+ # standard rename metadata attached.
1641
+ result = parse_decrypted_package_bytes(package.file_path, bytes(package.file_bytes))
1642
+ setattr(result, '_name_rename_index', name_index)
1643
+ setattr(result, '_name_rename_old', old_entry.name)
1644
+ setattr(result, '_name_rename_new', new_text)
1645
+ setattr(result, '_name_size_delta', 0)
1646
+ return result
1647
+
1648
+ # Reject collisions with other entries. Merging duplicates would require
1649
+ # remapping every FNameRef across exports/imports/serialized properties to
1650
+ # the surviving index, which is a much larger operation than a rename.
1651
+ for entry in package.names:
1652
+ if entry.index == name_index:
1653
+ continue
1654
+ if entry.name == new_text:
1655
+ raise ValueError(
1656
+ f"Name '{new_text}' already exists at index {entry.index}. "
1657
+ "Renaming would create a duplicate base name. Choose a unique "
1658
+ "name, or rename the other entry first."
1659
+ )
1660
+
1661
+ # Build the modified names list and let _replace_header_tables redo the
1662
+ # names blob, recompute downstream offsets, and shift export serial_offsets
1663
+ # by the size delta.
1664
+ old_blob_len = len(serialize_name_entry(old_entry))
1665
+ new_entry = NameEntry(index=name_index, name=new_text, flags=old_entry.flags)
1666
+ new_blob_len = len(serialize_name_entry(new_entry))
1667
+ size_delta = new_blob_len - old_blob_len
1668
+
1669
+ names = [NameEntry(index=n.index, name=n.name, flags=n.flags) for n in package.names]
1670
+ names[name_index] = new_entry
1671
+
1672
+ rebuilt_bytes = _replace_header_tables(package, names, package.imports)
1673
+ result = parse_decrypted_package_bytes(package.file_path, rebuilt_bytes)
1674
+ setattr(result, '_name_rename_index', name_index)
1675
+ setattr(result, '_name_rename_old', old_entry.name)
1676
+ setattr(result, '_name_rename_new', new_text)
1677
+ setattr(result, '_name_size_delta', size_delta)
1678
+ return result
1679
+
1680
+
1681
+ def resolve_object_index_by_text(package: ParsedPackage, text: str) -> Optional[int]:
1682
+ text = text.strip()
1683
+ try:
1684
+ return int(text, 0)
1685
+ except Exception:
1686
+ pass
1687
+ if text.startswith('Import[') or text.startswith('Export['):
1688
+ m = re.match(r'^(Import|Export)\[(\d+)\]', text)
1689
+ if m:
1690
+ kind, num = m.groups()
1691
+ idx = int(num)
1692
+ return -(idx + 1) if kind == 'Import' else (idx + 1)
1693
+ for i in range(len(package.exports)):
1694
+ if package.resolve_object_path(i + 1) == text:
1695
+ return i + 1
1696
+ for i in range(len(package.imports)):
1697
+ if package.resolve_object_path(-(i + 1)) == text:
1698
+ return -(i + 1)
1699
+ return None
1700
+
1701
+
1702
+ # ── DLLBind support ──────────────────────────────────────────────────────────
1703
+ #
1704
+ # In UE3 the compiler keyword `DLLBind(SomeDLL)` on a class declaration
1705
+ # stores the DLL name as an FString field called DLLBindName inside the
1706
+ # UClass serial body. It is the LAST field serialized by UClass::Serialize,
1707
+ # immediately after NativeClassName (also an FString).
1708
+ #
1709
+ # When the engine loads the package it reads this field and calls
1710
+ # LoadLibrary on the named DLL before the class is fully initialised,
1711
+ # making DLLBind a clean DLL-injection point for Rocket League mods.
1712
+ #
1713
+ # Binary layout at the tail of a cooked UClass serial body:
1714
+ # [... UClass-specific fields ...]
1715
+ # NativeClassName : FString (usually empty → 4 zero bytes)
1716
+ # DLLBindName : FString (empty = 4 zero bytes; or len+chars+NUL)
1717
+ #
1718
+ # FString encoding: int32 length (including NUL) then ASCII bytes + NUL.
1719
+ # Length == 0 means empty string (no NUL follows).
1720
+
1721
+ def is_uclass_export(package: ParsedPackage, export: ExportEntry) -> bool:
1722
+ """Return True when *export* is itself a class definition (class_index → Class)."""
1723
+ return package.export_class_name(export) == "Class"
1724
+
1725
+
1726
+ def find_uclass_dllbind_fstring_offset(raw: bytes) -> Optional[Tuple[int, str]]:
1727
+ """Locate the DLLBindName FString at the tail of a UClass serial body.
1728
+
1729
+ Strategy: DLLBindName is the last thing serialized by UClass::Serialize.
1730
+ We scan forward from the last 260 bytes of *raw* looking for the unique
1731
+ FString whose byte span ends exactly at len(raw).
1732
+
1733
+ Returns (fstring_start_offset, dll_name) where fstring_start_offset is
1734
+ the offset (relative to the start of *raw*) of the 4-byte length field of
1735
+ DLLBindName, and dll_name is the current value (empty string if no bind).
1736
+
1737
+ Returns None if no valid FString pattern ending at EOF is found.
1738
+ """
1739
+ L = len(raw)
1740
+ if L < 4:
1741
+ return None
1742
+
1743
+ # Determine the search window. DLLBind names are short (<260 chars),
1744
+ # so DLLBindName is at most 4+260 = 264 bytes. We walk backwards.
1745
+ lo = max(0, L - 264 - 4)
1746
+
1747
+ # Try every possible starting offset for an FString that ends at L.
1748
+ # fstring_start = pos
1749
+ # length field = int32 at pos (4 bytes)
1750
+ # string data = raw[pos+4 : pos+4+length] (length bytes)
1751
+ # total size = 4 + length
1752
+ # must satisfy = pos + 4 + length == L
1753
+ for pos in range(L - 4, lo - 1, -1):
1754
+ if pos < 0:
1755
+ break
1756
+ try:
1757
+ length = struct.unpack_from("<i", raw, pos)[0]
1758
+ except struct.error:
1759
+ break
1760
+
1761
+ if length == 0:
1762
+ # Empty FString: occupies exactly 4 bytes.
1763
+ if pos + 4 == L:
1764
+ return pos, ""
1765
+ # Keep scanning — this zero might be padding before the real field.
1766
+ continue
1767
+
1768
+ if length < 0 or length > 260:
1769
+ # Negative → UTF-16 (unusual for DLL names); too large → noise.
1770
+ continue
1771
+
1772
+ # Non-empty ASCII FString: must end exactly at L.
1773
+ if pos + 4 + length != L:
1774
+ continue
1775
+
1776
+ str_bytes = raw[pos + 4: L]
1777
+ if len(str_bytes) != length:
1778
+ continue
1779
+ # Null-terminated ASCII.
1780
+ if str_bytes[-1] != 0:
1781
+ continue
1782
+ try:
1783
+ dll_name = str_bytes[:-1].decode("ascii")
1784
+ except (UnicodeDecodeError, ValueError):
1785
+ continue
1786
+ if not dll_name.isprintable():
1787
+ continue
1788
+ return pos, dll_name
1789
+
1790
+ return None
1791
+
1792
+
1793
+ def set_uclass_dllbind_name(package: ParsedPackage, export: ExportEntry, dll_name: str) -> bytes:
1794
+ """Inject or replace the DLLBindName FString in a UClass serial body.
1795
+
1796
+ *dll_name* is the bare DLL name (e.g. ``'CodeRed.dll'``). Pass an empty
1797
+ string to remove an existing DLLBind.
1798
+
1799
+ Returns the full modified package bytes. The export's serial_size is
1800
+ updated and all subsequent exports' serial_offset values are shifted as
1801
+ required, mirroring the variable-size StrProperty edit logic.
1802
+
1803
+ Raises ValueError if the DLLBindName field cannot be found (e.g. the
1804
+ export is not a UClass) or if *dll_name* contains non-ASCII characters.
1805
+ """
1806
+ if dll_name and not dll_name.isascii():
1807
+ raise ValueError("DLL name must be ASCII (no unicode characters).")
1808
+
1809
+ raw = package.object_data(export)
1810
+ if not raw:
1811
+ raise ValueError("Export has no serial data.")
1812
+
1813
+ result = find_uclass_dllbind_fstring_offset(raw)
1814
+ if result is None:
1815
+ raise ValueError(
1816
+ "Could not locate DLLBindName in this export's serial data.\n"
1817
+ "Make sure the selected export is a UClass (class definition) "
1818
+ "and that its serial body is intact."
1819
+ )
1820
+
1821
+ fstring_offset, current_dll_name = result
1822
+
1823
+ # Build old and new FString byte representations.
1824
+ def encode_fstring(name: str) -> bytes:
1825
+ if not name:
1826
+ return struct.pack("<i", 0)
1827
+ enc = name.encode("ascii") + b"\x00"
1828
+ return struct.pack("<i", len(enc)) + enc
1829
+
1830
+ old_fstring = encode_fstring(current_dll_name)
1831
+ new_fstring = encode_fstring(dll_name)
1832
+
1833
+ if old_fstring == new_fstring:
1834
+ return bytes(package.file_bytes) # nothing to do
1835
+
1836
+ size_delta = len(new_fstring) - len(old_fstring)
1837
+
1838
+ # Absolute byte range of the old FString inside the package file.
1839
+ abs_start = export.serial_offset + fstring_offset
1840
+ abs_end = abs_start + len(old_fstring)
1841
+
1842
+ new_data = bytearray()
1843
+ new_data += package.file_bytes[:abs_start]
1844
+ new_data += new_fstring
1845
+ new_data += package.file_bytes[abs_end:]
1846
+
1847
+ if size_delta != 0:
1848
+ export_entry_offsets = get_export_entry_offsets(package)
1849
+ if export.table_index >= len(export_entry_offsets):
1850
+ raise ValueError("Export table index out of range.")
1851
+
1852
+ # Patch this export's serial_size (at entry_offset + 32).
1853
+ entry_offset = export_entry_offsets[export.table_index]
1854
+ patch_i32_le(new_data, entry_offset + 32, export.serial_size + size_delta)
1855
+
1856
+ # Shift all exports whose bodies come after this one (at entry_offset + 36).
1857
+ for idx, other in enumerate(package.exports):
1858
+ if idx == export.table_index:
1859
+ continue
1860
+ if other.serial_offset > export.serial_offset:
1861
+ other_entry_offset = export_entry_offsets[idx]
1862
+ patch_i64_le(new_data, other_entry_offset + 36, other.serial_offset + size_delta)
1863
+
1864
+ return bytes(new_data)
1865
+
1866
+
1867
+ class NativeWindowsDropTarget:
1868
+ WM_DROPFILES = 0x0233
1869
+ GWL_WNDPROC = -4
1870
+
1871
+ def __init__(self, widget: tk.Misc, callback):
1872
+ self.widget = widget
1873
+ self.callback = callback
1874
+ self.enabled = False
1875
+ if sys.platform != "win32":
1876
+ return
1877
+ self.user32 = ctypes.windll.user32
1878
+ self.shell32 = ctypes.windll.shell32
1879
+ self.user32.SetWindowLongPtrW.restype = ctypes.c_void_p
1880
+ self.user32.SetWindowLongPtrW.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
1881
+ self.user32.CallWindowProcW.restype = ctypes.c_longlong
1882
+ self.user32.CallWindowProcW.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_void_p, ctypes.c_void_p]
1883
+ self.shell32.DragAcceptFiles.argtypes = [ctypes.c_void_p, ctypes.c_bool]
1884
+ self.shell32.DragAcceptFiles.restype = None
1885
+ self.shell32.DragQueryFileW.argtypes = [ctypes.c_void_p, ctypes.c_uint, ctypes.c_wchar_p, ctypes.c_uint]
1886
+ self.shell32.DragQueryFileW.restype = ctypes.c_uint
1887
+ self.shell32.DragFinish.argtypes = [ctypes.c_void_p]
1888
+ self.shell32.DragFinish.restype = None
1889
+ self.old_proc = None
1890
+ self.new_proc = None
1891
+ widget.after(100, self._install)
1892
+
1893
+ def _install(self):
1894
+ if sys.platform != "win32" or not self.widget.winfo_exists():
1895
+ return
1896
+ hwnd = self.widget.winfo_id()
1897
+ self.shell32.DragAcceptFiles(hwnd, True)
1898
+ WNDPROC = ctypes.WINFUNCTYPE(ctypes.c_longlong, ctypes.c_void_p, ctypes.c_uint, ctypes.c_void_p, ctypes.c_void_p)
1899
+
1900
+ def _wnd_proc(hwnd, msg, wparam, lparam):
1901
+ if msg == self.WM_DROPFILES:
1902
+ hdrop = ctypes.c_void_p(wparam)
1903
+ count = self.shell32.DragQueryFileW(hdrop, 0xFFFFFFFF, None, 0)
1904
+ files = []
1905
+ for i in range(count):
1906
+ length = self.shell32.DragQueryFileW(hdrop, i, None, 0)
1907
+ buffer = ctypes.create_unicode_buffer(length + 1)
1908
+ self.shell32.DragQueryFileW(hdrop, i, buffer, length + 1)
1909
+ files.append(buffer.value)
1910
+ self.shell32.DragFinish(hdrop)
1911
+ self.widget.after(0, lambda: self.callback(files))
1912
+ return 0
1913
+ return self.user32.CallWindowProcW(self.old_proc, hwnd, msg, wparam, lparam)
1914
+
1915
+ self.new_proc = WNDPROC(_wnd_proc)
1916
+ self.old_proc = self.user32.SetWindowLongPtrW(hwnd, self.GWL_WNDPROC, self.new_proc)
1917
+ self.enabled = True
1918
+
1919
+
1920
+ def read_tarray(reader: BinaryReader, read_item):
1921
+ count = reader.read_i32()
1922
+ return [read_item(reader) for _ in range(count)]
1923
+
1924
+
1925
+ def read_guid(reader: BinaryReader) -> Tuple[int, int, int, int]:
1926
+ return (reader.read_u32(), reader.read_u32(), reader.read_u32(), reader.read_u32())
1927
+
1928
+
1929
+ def read_generation(reader: BinaryReader) -> Tuple[int, int, int]:
1930
+ return (reader.read_i32(), reader.read_i32(), reader.read_i32())
1931
+
1932
+
1933
+ def read_texture_allocation(reader: BinaryReader):
1934
+ reader.read_i32()
1935
+ reader.read_i32()
1936
+ reader.read_i32()
1937
+ reader.read_i32()
1938
+ reader.read_i32()
1939
+ read_tarray(reader, lambda r: r.read_i32())
1940
+ return None
1941
+
1942
+
1943
+ def read_compact_index(reader: BinaryReader) -> int:
1944
+ index = 0
1945
+ b0 = reader.read_u8()
1946
+ if (b0 & 0x40) != 0:
1947
+ b1 = reader.read_u8()
1948
+ if (b1 & 0x80) != 0:
1949
+ b2 = reader.read_u8()
1950
+ if (b2 & 0x80) != 0:
1951
+ b3 = reader.read_u8()
1952
+ if (b3 & 0x80) != 0:
1953
+ b4 = reader.read_u8()
1954
+ index = b4
1955
+ index = (index << 7) | (b3 & 0x7F)
1956
+ index = (index << 7) | (b2 & 0x7F)
1957
+ index = (index << 7) | (b1 & 0x7F)
1958
+ index = (index << 6) | (b0 & 0x3F)
1959
+ if (b0 & 0x80) != 0:
1960
+ index *= -1
1961
+ return index
1962
+
1963
+
1964
+ def read_index_pkg(reader: BinaryReader, package: ParsedPackage) -> int:
1965
+ if package.summary.file_version >= COMPACT_INDEX_DEPRECATED:
1966
+ return reader.read_i32()
1967
+ return read_compact_index(reader)
1968
+
1969
+
1970
+ def read_fname_pkg(reader: BinaryReader, package: ParsedPackage) -> FNameRef:
1971
+ name_index = read_index_pkg(reader, package)
1972
+ if package.summary.file_version >= NUMBER_ADDED_TO_NAME:
1973
+ instance_number = reader.read_i32() - 1
1974
+ else:
1975
+ instance_number = -1
1976
+ return FNameRef(name_index, instance_number)
1977
+
1978
+
1979
+ def read_fname(reader: BinaryReader, summary: Optional["FileSummary"] = None) -> FNameRef:
1980
+ # When called with a FileSummary, applies the same UE3 instance-number
1981
+ # convention as read_fname_pkg: the value stored on disk is (number + 1),
1982
+ # so we subtract 1 to recover the in-memory number where -1 means "no
1983
+ # suffix" and 0/1/2/... are real instance suffixes. UE Explorer's
1984
+ # ReadName/ReadNameReference does the same thing
1985
+ # (see UELib/src/UnrealStream.cs ReadName, line ~509-516). When called
1986
+ # without a summary we keep the legacy raw read for any caller that
1987
+ # genuinely wants two i32s with no adjustment - currently nothing in the
1988
+ # codebase relies on that, but the default keeps the signature backwards
1989
+ # compatible if external callers exist.
1990
+ name_index = reader.read_i32()
1991
+ raw_instance = reader.read_i32()
1992
+ if summary is not None and summary.file_version >= NUMBER_ADDED_TO_NAME:
1993
+ instance_number = raw_instance - 1
1994
+ else:
1995
+ instance_number = raw_instance
1996
+ return FNameRef(name_index, instance_number)
1997
+
1998
+
1999
+ def read_name_entry(reader: BinaryReader, index: int) -> NameEntry:
2000
+ return NameEntry(index=index, name=reader.read_fstring(), flags=reader.read_u64())
2001
+
2002
+
2003
+ def read_compressed_chunk_32(reader: BinaryReader) -> FCompressedChunk:
2004
+ return FCompressedChunk(
2005
+ uncompressed_offset=reader.read_i32(),
2006
+ uncompressed_size=reader.read_i32(),
2007
+ compressed_offset=reader.read_i32(),
2008
+ compressed_size=reader.read_i32(),
2009
+ )
2010
+
2011
+
2012
+ def read_compressed_chunk_64(reader: BinaryReader) -> FCompressedChunk:
2013
+ return FCompressedChunk(
2014
+ uncompressed_offset=reader.read_i64(),
2015
+ uncompressed_size=reader.read_i32(),
2016
+ compressed_offset=reader.read_i64(),
2017
+ compressed_size=reader.read_i32(),
2018
+ )
2019
+
2020
+
2021
+ def parse_file_summary(stream: BinaryIO) -> FileSummary:
2022
+ r = BinaryReader(stream)
2023
+ summary = FileSummary()
2024
+ summary.tag = r.read_u32()
2025
+ if summary.tag != PACKAGE_FILE_TAG:
2026
+ raise ValueError("Not a valid Unreal Engine package")
2027
+ summary.file_version = r.read_u16()
2028
+ summary.licensee_version = r.read_u16()
2029
+ summary.total_header_size = r.read_i32()
2030
+ summary.folder_name = r.read_fstring()
2031
+ summary.package_flags_flags_offset = r.tell()
2032
+ summary.package_flags = r.read_u32()
2033
+ summary.name_count = r.read_i32()
2034
+ summary.name_offset = r.read_i32()
2035
+ summary.export_count = r.read_i32()
2036
+ summary.export_offset = r.read_i32()
2037
+ summary.import_count = r.read_i32()
2038
+ summary.import_offset = r.read_i32()
2039
+ summary.depends_offset = r.read_i32()
2040
+ summary.import_export_guids_offset = r.read_i32()
2041
+ summary.import_guids_count = r.read_i32()
2042
+ summary.export_guids_count = r.read_i32()
2043
+ summary.thumbnail_table_offset = r.read_i32()
2044
+ summary.guid = read_guid(r)
2045
+ summary.generations = read_tarray(r, read_generation)
2046
+ summary.engine_version = r.read_u32()
2047
+ summary.cooker_version = r.read_u32()
2048
+ summary.compression_flags_offset = r.tell()
2049
+ summary.compression_flags = r.read_u32()
2050
+ summary.compressed_chunks = read_tarray(r, read_compressed_chunk_32)
2051
+ r.read_i32()
2052
+ read_tarray(r, lambda rr: rr.read_fstring())
2053
+ read_tarray(r, read_texture_allocation)
2054
+ return summary
2055
+
2056
+
2057
+ def parse_file_compression_metadata(stream: BinaryIO) -> FileCompressionMetaData:
2058
+ r = BinaryReader(stream)
2059
+ return FileCompressionMetaData(
2060
+ garbage_size=r.read_i32(),
2061
+ compressed_chunks_offset=r.read_i32(),
2062
+ last_block_size=r.read_i32(),
2063
+ )
2064
+
2065
+
2066
+ def verify_decryptor(summary: FileSummary, meta: FileCompressionMetaData, key: bytes, encrypted_data: bytes) -> bool:
2067
+ block_offset = meta.compressed_chunks_offset % 16
2068
+ block_start = meta.compressed_chunks_offset - block_offset
2069
+ probe = encrypted_data[block_start:block_start + 32]
2070
+ if len(probe) != 32:
2071
+ return False
2072
+ decrypted = DecryptionProvider.decrypt_ecb(key, probe)
2073
+ view = decrypted[block_offset:]
2074
+ if len(view) < 8:
2075
+ return False
2076
+ chunk_info_length, first_uncompressed_offset = struct.unpack("<ii", view[:8])
2077
+ return chunk_info_length >= 1 and first_uncompressed_offset == summary.depends_offset
2078
+
2079
+
2080
+ def decrypt_data(stream: BinaryIO, summary: FileSummary, meta: FileCompressionMetaData, provider: DecryptionProvider) -> bytes:
2081
+ encrypted_size = summary.total_header_size - meta.garbage_size - summary.name_offset
2082
+ encrypted_size = (encrypted_size + 15) & ~15
2083
+ stream.seek(summary.name_offset)
2084
+ encrypted_data = stream.read(encrypted_size)
2085
+ if len(encrypted_data) != encrypted_size:
2086
+ raise ValueError("Failed to read the encrypted data from the stream")
2087
+ valid_key = None
2088
+ for key in provider.decryption_keys:
2089
+ if verify_decryptor(summary, meta, key, encrypted_data):
2090
+ valid_key = key
2091
+ break
2092
+ if valid_key is None:
2093
+ raise ValueError("Unknown Decryption key")
2094
+ return DecryptionProvider.decrypt_ecb(valid_key, encrypted_data)
2095
+
2096
+
2097
+ def parse_rl_compressed_chunks(decrypted_data: bytes, offset: int) -> List[FCompressedChunk]:
2098
+ bio = io.BytesIO(decrypted_data)
2099
+ bio.seek(offset)
2100
+ r = BinaryReader(bio)
2101
+ return read_tarray(r, read_compressed_chunk_64)
2102
+
2103
+
2104
+ def process_compressed_data(output: BinaryIO, package_stream: BinaryIO, summary: FileSummary) -> None:
2105
+ if not summary.compressed_chunks:
2106
+ raise ValueError("No compressed chunks were found in decrypted data")
2107
+ first_uncompressed_offset = summary.compressed_chunks[0].uncompressed_offset
2108
+ last_chunk = summary.compressed_chunks[-1]
2109
+ final_size = last_chunk.uncompressed_offset + last_chunk.uncompressed_size
2110
+ output.truncate(final_size)
2111
+ output.seek(first_uncompressed_offset)
2112
+ r = BinaryReader(package_stream)
2113
+ for chunk in summary.compressed_chunks:
2114
+ package_stream.seek(chunk.compressed_offset)
2115
+ r.read_i32()
2116
+ r.read_i32()
2117
+ r.read_i32()
2118
+ total_uncompressed_size = r.read_i32()
2119
+ sum_uncompressed_size = 0
2120
+ blocks: List[Tuple[int, int]] = []
2121
+ while sum_uncompressed_size < total_uncompressed_size:
2122
+ comp_size = r.read_i32()
2123
+ uncomp_size = r.read_i32()
2124
+ blocks.append((comp_size, uncomp_size))
2125
+ sum_uncompressed_size += uncomp_size
2126
+ for comp_size, uncomp_size in blocks:
2127
+ compressed_block = r.read_exact(comp_size)
2128
+ inflated = zlib.decompress(compressed_block)
2129
+ if len(inflated) != uncomp_size:
2130
+ raise ValueError(f"Unexpected uncompressed block size: expected {uncomp_size}, got {len(inflated)}")
2131
+ output.write(inflated)
2132
+ output.seek(summary.package_flags_flags_offset)
2133
+ output.write(struct.pack("<I", summary.package_flags & ~PKG_COOKED))
2134
+ output.seek(summary.compression_flags_offset)
2135
+ output.write(struct.pack("<I", COMPRESS_NONE))
2136
+
2137
+
2138
+ def unpack_package(input_path: str, output_path: str, provider: DecryptionProvider) -> Path:
2139
+ with open(input_path, "rb") as src:
2140
+ summary = parse_file_summary(src)
2141
+ if (summary.compression_flags & COMPRESS_ZLIB) == 0:
2142
+ raise ValueError("Package compression type is unsupported")
2143
+ meta = parse_file_compression_metadata(src)
2144
+ src.seek(0)
2145
+ header_bytes = src.read(summary.name_offset)
2146
+ decrypted_data = decrypt_data(src, summary, meta, provider)
2147
+ summary.compressed_chunks = parse_rl_compressed_chunks(decrypted_data, meta.compressed_chunks_offset)
2148
+ if not summary.compressed_chunks or summary.compressed_chunks[0].uncompressed_offset != summary.depends_offset:
2149
+ raise ValueError("Failed to parse decrypted compressed chunk table")
2150
+ output_path = str(output_path)
2151
+ os.makedirs(os.path.dirname(output_path) or ".", exist_ok=True)
2152
+ with open(output_path, "wb+") as dst:
2153
+ dst.write(header_bytes)
2154
+ dst.write(decrypted_data)
2155
+ process_compressed_data(dst, src, summary)
2156
+ return Path(output_path)
2157
+
2158
+
2159
+ def unpack_plain_package(input_path: str, output_path: str) -> Path:
2160
+ with open(input_path, "rb") as src:
2161
+ summary = parse_file_summary(src)
2162
+ if (summary.compression_flags & COMPRESS_ZLIB) == 0:
2163
+ raise ValueError("Package compression type is unsupported")
2164
+ output_path = str(output_path)
2165
+ os.makedirs(os.path.dirname(output_path) or ".", exist_ok=True)
2166
+ original_bytes = Path(input_path).read_bytes()
2167
+ with open(output_path, "wb+") as dst:
2168
+ dst.write(original_bytes)
2169
+ process_compressed_data(dst, src, summary)
2170
+ return Path(output_path)
2171
+
2172
+
2173
+ def try_parse_plain_package(input_path: Path) -> Optional["ParsedPackage"]:
2174
+ try:
2175
+ return parse_decrypted_package(input_path)
2176
+ except Exception:
2177
+ return None
2178
+
2179
+
2180
+ def resolve_input_package(input_path: Path, decrypted_dir: Path, script_dir: Path) -> Tuple[Path, "ParsedPackage", Optional[DecryptionProvider], Optional[Path], bool]:
2181
+ plain_package = try_parse_plain_package(input_path)
2182
+ if plain_package is not None:
2183
+ return input_path, plain_package, None, None, False
2184
+
2185
+ with input_path.open("rb") as fh:
2186
+ summary = parse_file_summary(fh)
2187
+
2188
+ if (summary.compression_flags & COMPRESS_ZLIB) != 0:
2189
+ plain_decompressed_path = decrypted_dir / f"{input_path.stem}_decompressed.upk"
2190
+ try:
2191
+ unpack_plain_package(str(input_path), str(plain_decompressed_path))
2192
+ return plain_decompressed_path, parse_decrypted_package(plain_decompressed_path), None, None, False
2193
+ except Exception:
2194
+ pass
2195
+
2196
+ keys_path = find_keys_path(script_dir, input_path)
2197
+ if keys_path is None:
2198
+ raise FileNotFoundError("Could not find keys.txt next to the script, current directory, or selected file")
2199
+ provider = DecryptionProvider(str(keys_path))
2200
+ decrypted_path = decrypted_dir / f"{input_path.stem}_decrypted.upk"
2201
+ unpack_package(str(input_path), str(decrypted_path), provider)
2202
+ return decrypted_path, parse_decrypted_package(decrypted_path), provider, keys_path, True
2203
+
2204
+
2205
+ def parse_import_entry(reader: BinaryReader, table_index: int, summary: "FileSummary") -> ImportEntry:
2206
+ return ImportEntry(
2207
+ table_index=table_index,
2208
+ class_package=read_fname(reader, summary),
2209
+ class_name=read_fname(reader, summary),
2210
+ outer_index=reader.read_i32(),
2211
+ object_name=read_fname(reader, summary),
2212
+ )
2213
+
2214
+
2215
+ def parse_export_entry(reader: BinaryReader, table_index: int, generation_count: int, summary: "FileSummary") -> ExportEntry:
2216
+ # The export entry layout in this UE3 build is:
2217
+ # class_index (i32) | super_index (i32) | outer_index (i32) |
2218
+ # object_name (FName: i32 name_index + i32 instance_number) |
2219
+ # archetype_index (i32) | object_flags (u64) |
2220
+ # serial_size (i32) | serial_offset (i64) | export_flags (i32) |
2221
+ # net_objects (TArray<i32>: i32 count + count * i32) |
2222
+ # package_guid (4*u32) | package_flags (i32)
2223
+ #
2224
+ # net_objects IS length-prefixed in this package version - it was the
2225
+ # generation_count assumption that was wrong. The original "None / Class
2226
+ # / 0 / 0" tail in the GUI is most likely an artifact of the export count
2227
+ # in the summary being larger than the number of real entries on disk
2228
+ # (the table is followed by zero padding), not a parser desync. The
2229
+ # generation_count parameter is kept for signature stability with
2230
+ # callers, but is not used.
2231
+ del generation_count
2232
+ class_index = reader.read_i32()
2233
+ super_index = reader.read_i32()
2234
+ outer_index = reader.read_i32()
2235
+ object_name = read_fname(reader, summary)
2236
+ archetype_index = reader.read_i32()
2237
+ object_flags = reader.read_u64()
2238
+ serial_size = reader.read_i32()
2239
+ serial_offset = reader.read_i64()
2240
+ export_flags = reader.read_i32()
2241
+ net_objects = read_tarray(reader, lambda rr: rr.read_i32())
2242
+ package_guid = read_guid(reader)
2243
+ package_flags = reader.read_i32()
2244
+ return ExportEntry(
2245
+ table_index=table_index,
2246
+ class_index=class_index,
2247
+ super_index=super_index,
2248
+ outer_index=outer_index,
2249
+ object_name=object_name,
2250
+ archetype_index=archetype_index,
2251
+ object_flags=object_flags,
2252
+ serial_size=serial_size,
2253
+ serial_offset=serial_offset,
2254
+ export_flags=export_flags,
2255
+ net_objects=net_objects,
2256
+ package_guid=package_guid,
2257
+ package_flags=package_flags,
2258
+ )
2259
+
2260
+
2261
+ def parse_decrypted_package(file_path: Path) -> ParsedPackage:
2262
+ data = file_path.read_bytes()
2263
+ bio = io.BytesIO(data)
2264
+ summary = parse_file_summary(bio)
2265
+ if summary.compression_flags != COMPRESS_NONE:
2266
+ raise ValueError("The decrypted package is still marked as compressed")
2267
+ r = BinaryReader(bio)
2268
+ bio.seek(summary.name_offset)
2269
+ names = [read_name_entry(r, i) for i in range(summary.name_count)]
2270
+ bio.seek(summary.import_offset)
2271
+ imports = [parse_import_entry(r, i, summary) for i in range(summary.import_count)]
2272
+ bio.seek(summary.export_offset)
2273
+ exports = [parse_export_entry(r, i, len(summary.generations), summary) for i in range(summary.export_count)]
2274
+ return ParsedPackage(file_path=file_path, summary=summary, names=names, imports=imports, exports=exports, file_bytes=data)
2275
+
2276
+
2277
+ def parse_decrypted_package_bytes(file_path: Path, data: bytes) -> ParsedPackage:
2278
+ bio = io.BytesIO(data)
2279
+ summary = parse_file_summary(bio)
2280
+ if summary.compression_flags != COMPRESS_NONE:
2281
+ raise ValueError("The decrypted package is still marked as compressed")
2282
+ r = BinaryReader(bio)
2283
+ bio.seek(summary.name_offset)
2284
+ names = [read_name_entry(r, i) for i in range(summary.name_count)]
2285
+ bio.seek(summary.import_offset)
2286
+ imports = [parse_import_entry(r, i, summary) for i in range(summary.import_count)]
2287
+ bio.seek(summary.export_offset)
2288
+ exports = [parse_export_entry(r, i, len(summary.generations), summary) for i in range(summary.export_count)]
2289
+ return ParsedPackage(file_path=file_path, summary=summary, names=names, imports=imports, exports=exports, file_bytes=data)
2290
+
2291
+
2292
+ def verify_package(package: ParsedPackage) -> List[Tuple[str, str]]:
2293
+ """Deep consistency check on a parsed package's header tables.
2294
+
2295
+ Returns a list of (severity, message) tuples where severity is one of
2296
+ 'OK', 'WARN', 'ERROR'. An 'ERROR' indicates the package is internally
2297
+ inconsistent in a way that the engine is likely to choke on at load
2298
+ time (often manifesting as a freeze or crash). 'WARN' flags things
2299
+ that look unusual but might be intentional. 'OK' lines summarize
2300
+ successful invariant checks.
2301
+
2302
+ The checks here are derived from cross-referencing UE Explorer's
2303
+ canonical loader and the offset bookkeeping in our own _replace_header_tables.
2304
+ """
2305
+ findings: List[Tuple[str, str]] = []
2306
+ s = package.summary
2307
+ file_len = len(package.file_bytes)
2308
+
2309
+ # Summary-level offset sanity.
2310
+ if s.name_offset <= 0 or s.name_offset >= file_len:
2311
+ findings.append(("ERROR", f"name_offset {s.name_offset} is out of bounds (file size {file_len})"))
2312
+ if s.import_offset <= 0 or s.import_offset >= file_len:
2313
+ findings.append(("ERROR", f"import_offset {s.import_offset} is out of bounds"))
2314
+ if s.export_offset <= 0 or s.export_offset >= file_len:
2315
+ findings.append(("ERROR", f"export_offset {s.export_offset} is out of bounds"))
2316
+ if s.depends_offset <= 0 or s.depends_offset > file_len:
2317
+ findings.append(("ERROR", f"depends_offset {s.depends_offset} is out of bounds"))
2318
+
2319
+ # Tables must be in the canonical order: names < imports < exports < depends.
2320
+ if not (s.name_offset < s.import_offset < s.export_offset < s.depends_offset):
2321
+ findings.append((
2322
+ "ERROR",
2323
+ f"Header tables out of order: names@{s.name_offset} imports@{s.import_offset} "
2324
+ f"exports@{s.export_offset} depends@{s.depends_offset}",
2325
+ ))
2326
+ else:
2327
+ findings.append(("OK", "Header tables are in canonical order"))
2328
+
2329
+ # total_header_size sanity. For plain (decompressed) packages this should
2330
+ # equal depends_offset + (size of depends table). We can't compute the
2331
+ # depends table size without re-parsing it, but we can at least require
2332
+ # total_header_size >= depends_offset.
2333
+ if s.total_header_size < s.depends_offset:
2334
+ findings.append((
2335
+ "ERROR",
2336
+ f"total_header_size {s.total_header_size} is less than depends_offset {s.depends_offset}; "
2337
+ f"header region claims to end before the depends table starts",
2338
+ ))
2339
+ else:
2340
+ findings.append(("OK", f"total_header_size {s.total_header_size} >= depends_offset {s.depends_offset}"))
2341
+
2342
+ # Re-parse the export table from disk and verify the cursor lands at
2343
+ # depends_offset. If it doesn't, the export entries on disk don't match
2344
+ # what our parser thinks they look like, which would also confuse the
2345
+ # engine.
2346
+ try:
2347
+ bio = io.BytesIO(package.file_bytes)
2348
+ bio.seek(s.export_offset)
2349
+ r = BinaryReader(bio)
2350
+ for i in range(s.export_count):
2351
+ parse_export_entry(r, i, len(s.generations), s)
2352
+ end_cursor = bio.tell()
2353
+ if end_cursor != s.depends_offset:
2354
+ findings.append((
2355
+ "ERROR",
2356
+ f"After parsing {s.export_count} exports cursor is at {end_cursor}, "
2357
+ f"expected depends_offset {s.depends_offset} (delta {end_cursor - s.depends_offset})",
2358
+ ))
2359
+ else:
2360
+ findings.append(("OK", f"Export table parse cursor lands exactly at depends_offset"))
2361
+ except Exception as exc:
2362
+ findings.append(("ERROR", f"Export table re-parse failed: {exc}"))
2363
+
2364
+ # Per-export bounds. Every export's [serial_offset, serial_offset + serial_size)
2365
+ # must lie inside the file, and must lie at or after total_header_size
2366
+ # (the export bodies live in the data region, not the header region).
2367
+ body_violations = 0
2368
+ for exp in package.exports:
2369
+ if package.is_placeholder_export(exp):
2370
+ continue
2371
+ if exp.serial_size < 0:
2372
+ findings.append(("ERROR", f"Export[{exp.table_index}] has negative serial_size {exp.serial_size}"))
2373
+ body_violations += 1
2374
+ continue
2375
+ if exp.serial_size == 0:
2376
+ continue # Zero-size exports legitimately have offset 0.
2377
+ if exp.serial_offset < s.total_header_size:
2378
+ findings.append((
2379
+ "ERROR",
2380
+ f"Export[{exp.table_index}] '{package.resolve_name(exp.object_name)}' "
2381
+ f"serial_offset {exp.serial_offset} is before total_header_size {s.total_header_size}",
2382
+ ))
2383
+ body_violations += 1
2384
+ if exp.serial_offset + exp.serial_size > file_len:
2385
+ findings.append((
2386
+ "ERROR",
2387
+ f"Export[{exp.table_index}] '{package.resolve_name(exp.object_name)}' "
2388
+ f"body extends past EOF: serial_offset={exp.serial_offset} + serial_size={exp.serial_size} > file_size={file_len}",
2389
+ ))
2390
+ body_violations += 1
2391
+ if body_violations == 0:
2392
+ findings.append(("OK", f"All {sum(1 for e in package.exports if not package.is_placeholder_export(e))} non-placeholder export bodies are in-bounds"))
2393
+
2394
+ # Detect overlapping export bodies. Two exports' [start, end) ranges
2395
+ # should not overlap.
2396
+ bodies = sorted(
2397
+ ((exp.serial_offset, exp.serial_offset + exp.serial_size, exp.table_index, package.resolve_name(exp.object_name))
2398
+ for exp in package.exports
2399
+ if exp.serial_size > 0 and not package.is_placeholder_export(exp)),
2400
+ key=lambda x: x[0],
2401
+ )
2402
+ overlap_count = 0
2403
+ for prev, curr in zip(bodies, bodies[1:]):
2404
+ if curr[0] < prev[1]:
2405
+ findings.append((
2406
+ "ERROR",
2407
+ f"Export bodies overlap: Export[{prev[2]}] '{prev[3]}' [{prev[0]}, {prev[1]}) "
2408
+ f"vs Export[{curr[2]}] '{curr[3]}' [{curr[0]}, {curr[1]})",
2409
+ ))
2410
+ overlap_count += 1
2411
+ if overlap_count == 0 and bodies:
2412
+ findings.append(("OK", f"No overlapping export bodies among {len(bodies)} non-placeholder exports"))
2413
+
2414
+ # Cross-reference checks. Every export.class_index, super_index,
2415
+ # outer_index, archetype_index must be a valid export or import index.
2416
+ def _index_label(idx: int) -> str:
2417
+ if idx == 0:
2418
+ return "None"
2419
+ if idx > 0:
2420
+ return f"Export[{idx - 1}]"
2421
+ return f"Import[{-idx - 1}]"
2422
+
2423
+ bad_refs = 0
2424
+ for exp in package.exports:
2425
+ for field_name in ("class_index", "super_index", "outer_index", "archetype_index"):
2426
+ idx = getattr(exp, field_name)
2427
+ if idx == 0:
2428
+ continue
2429
+ if idx > 0:
2430
+ if not (1 <= idx <= len(package.exports)):
2431
+ findings.append((
2432
+ "ERROR",
2433
+ f"Export[{exp.table_index}] '{package.resolve_name(exp.object_name)}' "
2434
+ f"{field_name}={idx} -> {_index_label(idx)} is out of range",
2435
+ ))
2436
+ bad_refs += 1
2437
+ else:
2438
+ if not (1 <= -idx <= len(package.imports)):
2439
+ findings.append((
2440
+ "ERROR",
2441
+ f"Export[{exp.table_index}] '{package.resolve_name(exp.object_name)}' "
2442
+ f"{field_name}={idx} -> {_index_label(idx)} is out of range",
2443
+ ))
2444
+ bad_refs += 1
2445
+ for imp in package.imports:
2446
+ # Imports' outer_index can be 0 (top-level) or another import (negative)
2447
+ # or an export (positive). All three are legal in UE3 - imports
2448
+ # parented to exports happen for sub-objects of exports referenced
2449
+ # from outside.
2450
+ if imp.outer_index == 0:
2451
+ continue
2452
+ if imp.outer_index > 0:
2453
+ if not (1 <= imp.outer_index <= len(package.exports)):
2454
+ findings.append((
2455
+ "ERROR",
2456
+ f"Import[{imp.table_index}] '{package.resolve_name(imp.object_name)}' "
2457
+ f"outer_index={imp.outer_index} is out of range",
2458
+ ))
2459
+ bad_refs += 1
2460
+ else:
2461
+ if not (1 <= -imp.outer_index <= len(package.imports)):
2462
+ findings.append((
2463
+ "ERROR",
2464
+ f"Import[{imp.table_index}] '{package.resolve_name(imp.object_name)}' "
2465
+ f"outer_index={imp.outer_index} is out of range",
2466
+ ))
2467
+ bad_refs += 1
2468
+ if bad_refs == 0:
2469
+ findings.append(("OK", "All export/import cross-references resolve"))
2470
+
2471
+ # Name index validity. Every FNameRef in every import/export must have
2472
+ # a name_index inside the names table.
2473
+ bad_names = 0
2474
+ name_count = len(package.names)
2475
+ for imp in package.imports:
2476
+ for label, ref in (("class_package", imp.class_package), ("class_name", imp.class_name), ("object_name", imp.object_name)):
2477
+ if not (0 <= ref.name_index < name_count):
2478
+ findings.append((
2479
+ "ERROR",
2480
+ f"Import[{imp.table_index}] {label}.name_index={ref.name_index} is out of range (name_count={name_count})",
2481
+ ))
2482
+ bad_names += 1
2483
+ for exp in package.exports:
2484
+ if not (0 <= exp.object_name.name_index < name_count):
2485
+ findings.append((
2486
+ "ERROR",
2487
+ f"Export[{exp.table_index}] object_name.name_index={exp.object_name.name_index} is out of range",
2488
+ ))
2489
+ bad_names += 1
2490
+ if bad_names == 0:
2491
+ findings.append(("OK", f"All FName references point inside the {name_count}-entry name table"))
2492
+
2493
+ # Detect orphan exports: any non-placeholder export whose outer_index
2494
+ # cannot be reached from a root (outer_index == 0). Exports that descend
2495
+ # into invalid outers are loadable in isolation but the engine may
2496
+ # behave oddly when iterating the package tree.
2497
+ orphans = 0
2498
+ visited: Dict[int, bool] = {}
2499
+
2500
+ def _has_root(idx: int, depth: int = 0) -> bool:
2501
+ if depth > len(package.exports) + len(package.imports) + 2:
2502
+ return False # cycle
2503
+ if idx == 0:
2504
+ return True
2505
+ if idx in visited:
2506
+ return visited[idx]
2507
+ if idx > 0:
2508
+ outer = package.exports[idx - 1].outer_index
2509
+ else:
2510
+ outer = package.imports[-idx - 1].outer_index
2511
+ result = _has_root(outer, depth + 1)
2512
+ visited[idx] = result
2513
+ return result
2514
+
2515
+ for exp in package.exports:
2516
+ if package.is_placeholder_export(exp):
2517
+ continue
2518
+ if not _has_root(exp.table_index + 1):
2519
+ findings.append((
2520
+ "WARN",
2521
+ f"Export[{exp.table_index}] '{package.resolve_name(exp.object_name)}' "
2522
+ "has an unreachable outer chain (cycle or invalid outer)",
2523
+ ))
2524
+ orphans += 1
2525
+ if orphans == 0:
2526
+ findings.append(("OK", "All non-placeholder exports have valid outer chains"))
2527
+
2528
+ return findings
2529
+
2530
+
2531
+ def format_verify_report(findings: List[Tuple[str, str]]) -> str:
2532
+ error_count = sum(1 for sev, _ in findings if sev == "ERROR")
2533
+ warn_count = sum(1 for sev, _ in findings if sev == "WARN")
2534
+ ok_count = sum(1 for sev, _ in findings if sev == "OK")
2535
+ header = f"Package verification: {error_count} error(s), {warn_count} warning(s), {ok_count} check(s) passed"
2536
+ lines = [header, "=" * len(header), ""]
2537
+ # Errors first, then warnings, then OK.
2538
+ for severity in ("ERROR", "WARN", "OK"):
2539
+ for sev, msg in findings:
2540
+ if sev == severity:
2541
+ lines.append(f"[{sev}] {msg}")
2542
+ return "\n".join(lines)
2543
+
2544
+
2545
+ def sha1_file(path: Path) -> str:
2546
+ digest = hashlib.sha1()
2547
+ with path.open("rb") as fh:
2548
+ for chunk in iter(lambda: fh.read(1024 * 1024), b""):
2549
+ digest.update(chunk)
2550
+ return digest.hexdigest()
2551
+
2552
+
2553
+ def patch_sha1_in_exe(exe_path: Path, old_sha1_hex: str, new_sha1_hex: str) -> int:
2554
+ old_sha1_hex = old_sha1_hex.strip().lower()
2555
+ new_sha1_hex = new_sha1_hex.strip().lower()
2556
+ if len(old_sha1_hex) != 40 or len(new_sha1_hex) != 40:
2557
+ raise ValueError("SHA-1 values must be 40 hex characters")
2558
+ old_bytes = bytes.fromhex(old_sha1_hex)
2559
+ new_bytes = bytes.fromhex(new_sha1_hex)
2560
+ data = bytearray(exe_path.read_bytes())
2561
+ count = 0
2562
+ start = 0
2563
+ while True:
2564
+ idx = data.find(old_bytes, start)
2565
+ if idx < 0:
2566
+ break
2567
+ data[idx:idx + len(old_bytes)] = new_bytes
2568
+ count += 1
2569
+ start = idx + len(new_bytes)
2570
+ if count == 0:
2571
+ raise ValueError(f"Original SHA-1 bytes were not found in {exe_path.name}")
2572
+ exe_path.write_bytes(data)
2573
+ return count
2574
+
2575
+
2576
+ def find_keys_path(script_dir: Path, selected_file: Path) -> Optional[Path]:
2577
+ candidates = [
2578
+ script_dir / "keys.txt",
2579
+ Path.cwd() / "keys.txt",
2580
+ selected_file.parent / "keys.txt",
2581
+ ]
2582
+ for candidate in candidates:
2583
+ if candidate.exists():
2584
+ return candidate
2585
+ return None
2586
+
2587
+
2588
+ def format_hex_preview(data: bytes, base_offset: int = 0) -> str:
2589
+ if not data:
2590
+ return ""
2591
+ lines = []
2592
+ for i in range(0, len(data), 16):
2593
+ chunk = data[i:i + 16]
2594
+ hex_part = " ".join(f"{b:02X}" for b in chunk)
2595
+ ascii_part = "".join(chr(b) if 32 <= b < 127 else "." for b in chunk)
2596
+ lines.append(f"{base_offset + i:08X} {hex_part:<47} {ascii_part}")
2597
+ return "\n".join(lines)
2598
+
2599
+
2600
+ class App:
2601
+ def __init__(self, root: tk.Tk):
2602
+ self.root = root
2603
+ self.root.title("RL UPK All-in-One GUI")
2604
+ self.root.geometry("1500x920")
2605
+ self.script_dir = Path(__file__).resolve().parent
2606
+ self.decrypted_dir = self.script_dir / "Decrypted"
2607
+ self.decrypted_dir.mkdir(exist_ok=True)
2608
+ self.sha1_memory: Dict[str, Dict[str, str]] = {}
2609
+ self.package: Optional[ParsedPackage] = None
2610
+ self.current_content_map: Dict[str, ExportEntry] = {}
2611
+ self.current_export_map: Dict[str, ExportEntry] = {}
2612
+ self.current_import_map: Dict[str, ImportEntry] = {}
2613
+ self.current_name_map: Dict[str, NameEntry] = {}
2614
+ self.selected_input_path: Optional[Path] = None
2615
+ self.sdk_db: Optional[RLSDKDatabase] = None
2616
+ self.sdk_path: Optional[Path] = None
2617
+ self.current_properties: List[ParsedProperty] = []
2618
+ self.current_export: Optional[ExportEntry] = None
2619
+ self.current_property: Optional[ParsedProperty] = None
2620
+ self.current_provider: Optional[DecryptionProvider] = None
2621
+ self.current_encrypted_input_path: Optional[Path] = None
2622
+ self.current_keys_path: Optional[Path] = None
2623
+ self.current_original_sha1: Optional[str] = None
2624
+ self.donor_key_upk_path: Optional[Path] = None # path to donor encrypted UPK for key sourcing
2625
+ self.use_donor_key_var: Optional[tk.BooleanVar] = None # set during _build_ui
2626
+ self.donor_key_path_var: Optional[tk.StringVar] = None # display label, set during _build_ui
2627
+ self.status_var = tk.StringVar(value="Ready")
2628
+ self.original_var = tk.StringVar(value="Original: -")
2629
+ self.sha1_var = tk.StringVar(value="SHA-1: -")
2630
+ self.decrypted_var = tk.StringVar(value="Decrypted: -")
2631
+ self.keys_var = tk.StringVar(value="Keys: -")
2632
+ self.sdk_var = tk.StringVar(value="Property parser: package-native UE3 tags")
2633
+ self._build_ui()
2634
+ self.drop_target = NativeWindowsDropTarget(self.root, self._on_drop_files)
2635
+ self.root.after(200, self._update_drop_label)
2636
+
2637
+ def _build_ui(self):
2638
+ self.root.configure(bg="#101114")
2639
+ style = ttk.Style()
2640
+ try:
2641
+ style.theme_use("clam")
2642
+ except tk.TclError:
2643
+ pass
2644
+ style.configure("Treeview", rowheight=24)
2645
+
2646
+ top = tk.Frame(self.root, bg="#101114")
2647
+ top.pack(fill="x", padx=12, pady=12)
2648
+
2649
+ open_button = ttk.Button(top, text="Open UPK", command=self.open_file)
2650
+ open_button.pack(side="left")
2651
+
2652
+ self.drop_label = tk.Label(
2653
+ top,
2654
+ text="Drop an encrypted .upk here or use Open UPK",
2655
+ bg="#171a21",
2656
+ fg="#dbe6ff",
2657
+ padx=18,
2658
+ pady=12,
2659
+ relief="groove",
2660
+ bd=2,
2661
+ )
2662
+ self.drop_label.pack(side="left", fill="x", expand=True, padx=(10, 0))
2663
+
2664
+ info = tk.Frame(self.root, bg="#101114")
2665
+ info.pack(fill="x", padx=12)
2666
+ for value in [self.original_var, self.sha1_var, self.decrypted_var, self.keys_var]:
2667
+ tk.Label(info, textvariable=value, anchor="w", bg="#101114", fg="#dbe6ff").pack(fill="x")
2668
+
2669
+ main = tk.PanedWindow(self.root, sashrelief="flat", sashwidth=6, bg="#101114")
2670
+ main.pack(fill="both", expand=True, padx=12, pady=12)
2671
+
2672
+ left = tk.Frame(main, bg="#101114")
2673
+ right = tk.Frame(main, bg="#101114")
2674
+ main.add(left, minsize=520)
2675
+ main.add(right, minsize=520)
2676
+
2677
+ session_frame = ttk.LabelFrame(left, text="Session SHA-1 Memory")
2678
+ session_frame.pack(fill="x")
2679
+ self.history_tree = ttk.Treeview(session_frame, columns=("sha1",), show="tree headings", height=6)
2680
+ self.history_tree.heading("#0", text="File")
2681
+ self.history_tree.heading("sha1", text="SHA-1")
2682
+ self.history_tree.column("#0", width=320, anchor="w")
2683
+ self.history_tree.column("sha1", width=260, anchor="w")
2684
+ self.history_tree.pack(fill="x", padx=6, pady=6)
2685
+
2686
+ left_notebook = ttk.Notebook(left)
2687
+ left_notebook.pack(fill="both", expand=True, pady=(12, 0))
2688
+
2689
+ self.content_tree = self._make_tree(left_notebook, ("class", "path"), (240, 380))
2690
+ self.content_tree.heading("#0", text="Name")
2691
+ left_notebook.add(self.content_tree.master, text="Content")
2692
+ self.content_tree.bind("<<TreeviewSelect>>", self._on_content_select)
2693
+
2694
+ # Exports tab: wrap the tree in a frame that also holds a
2695
+ # 'Hide placeholder exports' checkbox. RL .upk files contain export
2696
+ # slots that are entirely zeroed (class=0, name='None', no body) -
2697
+ # these are real on-disk entries, not parser bugs, but they clutter
2698
+ # the table. The checkbox lets the user filter them out, matching
2699
+ # how UE Explorer hides them from its class list. Default is OFF so
2700
+ # nothing changes for existing workflows; flipping it on hides the
2701
+ # placeholders. Same pack-order rule as the Names tab: bottom row
2702
+ # first, tree second, so the tree's expand=True doesn't shove the
2703
+ # row off-screen.
2704
+ exports_tab = ttk.Frame(left_notebook)
2705
+ left_notebook.add(exports_tab, text="Exports")
2706
+
2707
+ self.hide_placeholder_exports_var = tk.BooleanVar(value=False)
2708
+ exports_top_row = ttk.Frame(exports_tab)
2709
+ exports_top_row.pack(side="top", fill="x", padx=6, pady=(6, 0))
2710
+ ttk.Checkbutton(
2711
+ exports_top_row,
2712
+ text="Hide placeholder exports (class=0, name=None, empty)",
2713
+ variable=self.hide_placeholder_exports_var,
2714
+ command=self._populate_exports_tree,
2715
+ ).pack(side="left")
2716
+ self.exports_placeholder_count_var = tk.StringVar(value="")
2717
+ ttk.Label(exports_top_row, textvariable=self.exports_placeholder_count_var).pack(side="left", padx=(12, 0))
2718
+
2719
+ self.exports_tree = self._make_tree(exports_tab, ("name", "class", "outer", "size", "offset"), (220, 160, 240, 90, 110))
2720
+ self.exports_tree.heading("#0", text="Index")
2721
+ self.exports_tree.master.pack(side="top", fill="both", expand=True)
2722
+ self.exports_tree.bind("<<TreeviewSelect>>", self._on_exports_select)
2723
+ # Visual styling for placeholder rows so they're clearly distinct
2724
+ # even when shown. Treeview supports tag_configure for per-row
2725
+ # foreground colors.
2726
+ self.exports_tree.tag_configure("placeholder", foreground="#5a6577")
2727
+
2728
+ self.imports_tree = self._make_tree(left_notebook, ("name", "class", "package", "outer"), (220, 160, 160, 260))
2729
+ self.imports_tree.heading("#0", text="Index")
2730
+ left_notebook.add(self.imports_tree.master, text="Imports")
2731
+ self.imports_tree.bind("<<TreeviewSelect>>", self._on_imports_select)
2732
+
2733
+ # Names tab: a wrapper frame holds both the names tree (top, fills
2734
+ # available space) and the inline rename row (bottom, fixed height).
2735
+ # Without the wrapper, the edit row would be inside the tree's own
2736
+ # grid-managed frame and could get clipped by the notebook's height
2737
+ # calculation, so we mimic the Properties-tab layout: pack the tree
2738
+ # frame and the edit row as siblings inside one container that gets
2739
+ # added to the notebook.
2740
+ #
2741
+ # Pack order matters: the bottom edit row is packed FIRST so pack
2742
+ # reserves its space before the tree's expand=True consumes the rest.
2743
+ # If we packed the tree first, it would claim all available height
2744
+ # and the edit row would be pushed below the visible area.
2745
+ names_tab = ttk.Frame(left_notebook)
2746
+ left_notebook.add(names_tab, text="Names")
2747
+
2748
+ # Edit row pinned to the bottom of the Names tab, outside the tree's
2749
+ # internal grid so the notebook can never hide it. Packed first.
2750
+ names_edit = ttk.Frame(names_tab)
2751
+ names_edit.pack(side="bottom", fill="x", padx=6, pady=6)
2752
+ self.name_edit_info_var = tk.StringVar(value="Select a name to rename")
2753
+ ttk.Label(names_edit, textvariable=self.name_edit_info_var).pack(fill="x")
2754
+ name_edit_row = ttk.Frame(names_edit)
2755
+ name_edit_row.pack(fill="x", pady=(4, 0))
2756
+ ttk.Label(name_edit_row, text="New Text:").pack(side="left")
2757
+ self.name_edit_var = tk.StringVar()
2758
+ self.name_edit_entry = ttk.Entry(name_edit_row, textvariable=self.name_edit_var)
2759
+ self.name_edit_entry.pack(side="left", fill="x", expand=True, padx=(6, 6))
2760
+ # Pressing Enter in the entry applies the rename.
2761
+ self.name_edit_entry.bind("<Return>", lambda _e: self.rename_selected_name())
2762
+ ttk.Button(name_edit_row, text="Rename Name", command=self.rename_selected_name).pack(side="left")
2763
+
2764
+ # Now pack the tree on top, filling the remaining space.
2765
+ self.names_tree = self._make_tree(names_tab, ("name", "flags"), (380, 220))
2766
+ self.names_tree.heading("#0", text="Index")
2767
+ # The tree's own frame (created by _make_tree, exposed as tree.master)
2768
+ # is already grid-managed internally; pack it into the wrapper.
2769
+ self.names_tree.master.pack(side="top", fill="both", expand=True)
2770
+ self.names_tree.bind("<<TreeviewSelect>>", self._on_names_select)
2771
+ # Double-clicking a row in the names tree focuses the entry so the
2772
+ # user can immediately type a replacement.
2773
+ self.names_tree.bind("<Double-1>", self._on_names_double_click)
2774
+
2775
+ self.summary_tree = self._make_tree(left_notebook, ("value",), (520,))
2776
+ self.summary_tree.heading("#0", text="Field")
2777
+ left_notebook.add(self.summary_tree.master, text="Summary")
2778
+
2779
+ right_notebook = ttk.Notebook(right)
2780
+ right_notebook.pack(fill="both", expand=True)
2781
+
2782
+ details_frame = ttk.Frame(right_notebook)
2783
+ right_notebook.add(details_frame, text="Details")
2784
+ self.details_text = tk.Text(details_frame, wrap="none", bg="#111318", fg="#e8ecf4", insertbackground="#ffffff")
2785
+ self._pack_text_with_scrollbars(details_frame, self.details_text)
2786
+
2787
+ properties_frame = ttk.Frame(right_notebook)
2788
+ right_notebook.add(properties_frame, text="Properties")
2789
+ self.properties_tree = self._make_tree(properties_frame, ("declared_type", "tag_type", "owner_type", "size", "array_index", "value"), (180, 130, 220, 80, 90, 420))
2790
+ self.properties_tree.heading("#0", text="Property")
2791
+ self.properties_tree.master.pack(fill="both", expand=True)
2792
+ self.properties_tree.bind("<<TreeviewSelect>>", self._on_property_select)
2793
+ prop_edit = ttk.Frame(properties_frame)
2794
+ prop_edit.pack(fill="x", padx=6, pady=6)
2795
+ self.property_info_var = tk.StringVar(value="Select a property to edit")
2796
+ ttk.Label(prop_edit, textvariable=self.property_info_var).pack(fill="x")
2797
+ # ── Donor-key row ────────────────────────────────────────────────────
2798
+ # When enabled, re-encryption uses the key that decrypts a *different*
2799
+ # encrypted UPK (selected by the user) rather than the key that was
2800
+ # used to decrypt the currently-loaded package. This lets you take an
2801
+ # edit from one version's package and encrypt it with a key from a
2802
+ # different version without needing to know the key in advance.
2803
+ donor_key_row = ttk.Frame(prop_edit)
2804
+ donor_key_row.pack(fill="x", pady=(4, 0))
2805
+ self.use_donor_key_var = tk.BooleanVar(value=False)
2806
+ ttk.Checkbutton(
2807
+ donor_key_row,
2808
+ text="Encrypt with key from another UPK:",
2809
+ variable=self.use_donor_key_var,
2810
+ ).pack(side="left")
2811
+ self.donor_key_path_var = tk.StringVar(value="(none selected)")
2812
+ tk.Label(donor_key_row, textvariable=self.donor_key_path_var,
2813
+ bg="#101114", fg="#a8b4cc", anchor="w").pack(side="left", fill="x", expand=True, padx=(6, 6))
2814
+ ttk.Button(donor_key_row, text="Pick Donor UPK…",
2815
+ command=self._pick_donor_key_upk).pack(side="left")
2816
+
2817
+ # ── Action buttons ───────────────────────────────────────────────────
2818
+ edit_row = ttk.Frame(prop_edit)
2819
+ edit_row.pack(fill="x", pady=(6, 0))
2820
+ ttk.Label(edit_row, text="New Value:").pack(side="left")
2821
+ self.property_edit_var = tk.StringVar()
2822
+ self.property_edit_entry = ttk.Entry(edit_row, textvariable=self.property_edit_var)
2823
+ self.property_edit_entry.pack(side="left", fill="x", expand=True, padx=(6, 6))
2824
+ ttk.Button(edit_row, text="Apply Property Edit", command=self.apply_property_edit).pack(side="left")
2825
+ ttk.Button(edit_row, text="Rename Export FName", command=self.rename_export_fname).pack(side="left", padx=(6, 0))
2826
+ ttk.Button(edit_row, text="Import Donor Names", command=self.import_donor_names).pack(side="left", padx=(6, 0))
2827
+ ttk.Button(edit_row, text="Import Donor Exports", command=self.import_donor_exports).pack(side="left", padx=(6, 0))
2828
+ ttk.Button(edit_row, text="Replace Export From Donor", command=self.replace_export_from_donor).pack(side="left", padx=(6, 0))
2829
+ ttk.Button(edit_row, text="Save Re-Encrypted UPK", command=self.save_reencrypted_upk).pack(side="left", padx=(6, 0))
2830
+ ttk.Button(edit_row, text="Save Decrypted UPK", command=self.save_decrypted_upk).pack(side="left", padx=(6, 0))
2831
+ ttk.Button(edit_row, text="Set DLLBind", command=self.set_dll_bind).pack(side="left", padx=(6, 0))
2832
+ ttk.Button(edit_row, text="Verify Package", command=self.verify_current_package).pack(side="left", padx=(6, 0))
2833
+
2834
+ raw_frame = ttk.Frame(right_notebook)
2835
+ right_notebook.add(raw_frame, text="Raw Data")
2836
+ self.raw_text = tk.Text(raw_frame, wrap="none", bg="#111318", fg="#e8ecf4", insertbackground="#ffffff")
2837
+ self._pack_text_with_scrollbars(raw_frame, self.raw_text)
2838
+
2839
+ status = tk.Label(self.root, textvariable=self.status_var, anchor="w", bg="#0b0c0f", fg="#dbe6ff", padx=12, pady=6)
2840
+ status.pack(fill="x", side="bottom")
2841
+
2842
+ def _make_tree(self, parent, columns: Tuple[str, ...], widths: Tuple[int, ...]):
2843
+ frame = ttk.Frame(parent)
2844
+ tree = ttk.Treeview(frame, columns=columns, show="tree headings")
2845
+ tree.heading("#0", text="#")
2846
+ tree.column("#0", width=70, anchor="w")
2847
+ for column, width in zip(columns, widths):
2848
+ tree.heading(column, text=column.title())
2849
+ tree.column(column, width=width, anchor="w")
2850
+ vsb = ttk.Scrollbar(frame, orient="vertical", command=tree.yview)
2851
+ hsb = ttk.Scrollbar(frame, orient="horizontal", command=tree.xview)
2852
+ tree.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set)
2853
+ tree.grid(row=0, column=0, sticky="nsew")
2854
+ vsb.grid(row=0, column=1, sticky="ns")
2855
+ hsb.grid(row=1, column=0, sticky="ew")
2856
+ frame.rowconfigure(0, weight=1)
2857
+ frame.columnconfigure(0, weight=1)
2858
+ return tree
2859
+
2860
+ def _pack_text_with_scrollbars(self, frame, text):
2861
+ vsb = ttk.Scrollbar(frame, orient="vertical", command=text.yview)
2862
+ hsb = ttk.Scrollbar(frame, orient="horizontal", command=text.xview)
2863
+ text.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set)
2864
+ text.grid(row=0, column=0, sticky="nsew")
2865
+ vsb.grid(row=0, column=1, sticky="ns")
2866
+ hsb.grid(row=1, column=0, sticky="ew")
2867
+ frame.rowconfigure(0, weight=1)
2868
+ frame.columnconfigure(0, weight=1)
2869
+
2870
+ def _load_rlsdk(self):
2871
+ candidates = [
2872
+ self.script_dir / "RLSDK.zip",
2873
+ Path.cwd() / "RLSDK.zip",
2874
+ self.script_dir / "SDK.zip",
2875
+ Path.cwd() / "SDK.zip",
2876
+ ]
2877
+ for candidate in candidates:
2878
+ if candidate.exists():
2879
+ try:
2880
+ self.sdk_db = parse_rlsdk_database(candidate)
2881
+ self.sdk_path = candidate
2882
+ self.sdk_var.set(f"RLSDK: {candidate}")
2883
+ return
2884
+ except Exception as exc:
2885
+ self.sdk_var.set(f"RLSDK: failed to load ({exc})")
2886
+ return
2887
+ self.sdk_var.set("RLSDK: not found (put RLSDK.zip next to the script)")
2888
+
2889
+ def _update_drop_label(self):
2890
+ if self.drop_target.enabled:
2891
+ self.drop_label.config(text="Drop an encrypted .upk anywhere on this window or use Open UPK")
2892
+ elif sys.platform == "win32":
2893
+ self.drop_label.config(text="Open UPK is ready. Drag/drop initializes after the window is fully created.")
2894
+ else:
2895
+ self.drop_label.config(text="Open UPK is available. Native drag/drop is enabled on Windows.")
2896
+
2897
+ def set_status(self, text: str):
2898
+ self.status_var.set(text)
2899
+ self.root.update_idletasks()
2900
+
2901
+ def open_file(self):
2902
+ path = filedialog.askopenfilename(filetypes=[("Unreal Packages", "*.upk"), ("All Files", "*.*")])
2903
+ if path:
2904
+ self.load_input(Path(path))
2905
+
2906
+ def _on_drop_files(self, files: List[str]):
2907
+ for file_path in files:
2908
+ path = Path(file_path)
2909
+ if path.suffix.lower() == ".upk":
2910
+ self.load_input(path)
2911
+ return
2912
+ messagebox.showerror("Invalid file", "Drop a .upk file.")
2913
+
2914
+ def load_input(self, input_path: Path):
2915
+ self.selected_input_path = input_path
2916
+ self.set_status(f"Hashing {input_path.name}...")
2917
+ threading.Thread(target=self._load_input_worker, args=(input_path,), daemon=True).start()
2918
+
2919
+ def _load_input_worker(self, input_path: Path):
2920
+ try:
2921
+ sha1 = sha1_file(input_path)
2922
+ self.sha1_memory[str(input_path)] = {"sha1": sha1}
2923
+ self.root.after(0, lambda: self._refresh_history())
2924
+ self.root.after(0, lambda: self.set_status(f"Opening {input_path.name}..."))
2925
+ decrypted_path, package, provider, keys_path, was_encrypted = resolve_input_package(input_path, self.decrypted_dir, self.script_dir)
2926
+ label = keys_path if keys_path is not None else "not required"
2927
+ self.root.after(0, lambda: self.keys_var.set(f"Keys: {label}"))
2928
+ if decrypted_path != input_path:
2929
+ self.root.after(0, lambda: self.set_status(f"Parsing {decrypted_path.name}..."))
2930
+ self.root.after(0, lambda: self._apply_loaded_package(input_path, sha1, keys_path or Path(""), decrypted_path, package, provider))
2931
+ except Exception as exc:
2932
+ details = "".join(traceback.format_exception_only(type(exc), exc)).strip()
2933
+ self.root.after(0, lambda: self._show_error(details))
2934
+
2935
+ def _refresh_history(self):
2936
+ for item in self.history_tree.get_children():
2937
+ self.history_tree.delete(item)
2938
+ for path, record in self.sha1_memory.items():
2939
+ self.history_tree.insert("", "end", text=Path(path).name, values=(record["sha1"],))
2940
+
2941
+ def _clear_tree(self, tree: ttk.Treeview):
2942
+ for item in tree.get_children():
2943
+ tree.delete(item)
2944
+
2945
+ def _apply_loaded_package(self, input_path: Path, sha1: str, keys_path: Path, decrypted_path: Path, package: ParsedPackage, provider: Optional[DecryptionProvider]):
2946
+ self.package = package
2947
+ self.current_provider = provider
2948
+ self.current_keys_path = keys_path if str(keys_path) else None
2949
+ self.current_encrypted_input_path = input_path if provider is not None else None
2950
+ self.current_original_sha1 = sha1
2951
+ self.original_var.set(f"Original: {input_path}")
2952
+ self.sha1_var.set(f"SHA-1: {sha1}")
2953
+ self.decrypted_var.set(f"Decrypted: {decrypted_path}")
2954
+ self.keys_var.set(f"Keys: {keys_path if str(keys_path) else 'not required'}")
2955
+ self._populate_summary()
2956
+ self._populate_content_tree()
2957
+ self._populate_exports_tree()
2958
+ self._populate_imports_tree()
2959
+ self._populate_names_tree()
2960
+ self.details_text.delete("1.0", "end")
2961
+ self.raw_text.delete("1.0", "end")
2962
+ self._clear_tree(self.properties_tree)
2963
+ self.current_export = None
2964
+ self.current_property = None
2965
+ self.property_edit_var.set("")
2966
+ self.property_info_var.set("Select a property to edit")
2967
+ self.current_properties = []
2968
+ self.details_text.insert("1.0", self._format_package_overview())
2969
+ self.set_status(f"Loaded {input_path.name}")
2970
+
2971
+ def _populate_summary(self):
2972
+ self._clear_tree(self.summary_tree)
2973
+ if not self.package:
2974
+ return
2975
+ s = self.package.summary
2976
+ rows = [
2977
+ ("FileVersion", s.file_version),
2978
+ ("LicenseeVersion", s.licensee_version),
2979
+ ("TotalHeaderSize", s.total_header_size),
2980
+ ("FolderName", s.folder_name),
2981
+ ("PackageFlags", hex(s.package_flags)),
2982
+ ("NameCount", s.name_count),
2983
+ ("NameOffset", s.name_offset),
2984
+ ("ExportCount", s.export_count),
2985
+ ("ExportOffset", s.export_offset),
2986
+ ("ImportCount", s.import_count),
2987
+ ("ImportOffset", s.import_offset),
2988
+ ("DependsOffset", s.depends_offset),
2989
+ ("CompressionFlags", hex(s.compression_flags)),
2990
+ ("EngineVersion", s.engine_version),
2991
+ ("CookerVersion", s.cooker_version),
2992
+ ("Guid", f"{s.guid[0]:08X}-{s.guid[1]:08X}-{s.guid[2]:08X}-{s.guid[3]:08X}"),
2993
+ ("Generations", len(s.generations)),
2994
+ ]
2995
+ for key, value in rows:
2996
+ self.summary_tree.insert("", "end", text=key, values=(value,))
2997
+
2998
+ def _populate_content_tree(self):
2999
+ self._clear_tree(self.content_tree)
3000
+ self.current_content_map.clear()
3001
+ if not self.package:
3002
+ return
3003
+ children: Dict[int, List[ExportEntry]] = {}
3004
+ for export in self.package.exports:
3005
+ children.setdefault(export.outer_index, []).append(export)
3006
+ roots = [export for export in self.package.exports if export.outer_index == 0 and export.class_index != 0]
3007
+ for export in roots:
3008
+ self._insert_content_export("", export, children)
3009
+
3010
+ def _insert_content_export(self, parent: str, export: ExportEntry, children: Dict[int, List[ExportEntry]]):
3011
+ iid = f"export:{export.table_index}"
3012
+ name = self.package.resolve_name(export.object_name)
3013
+ self.current_content_map[iid] = export
3014
+ path = self.package.resolve_object_path(export.table_index + 1)
3015
+ node = self.content_tree.insert(parent, "end", iid=iid, text=name, values=(self.package.export_class_name(export), path), open=False)
3016
+ for child in children.get(export.table_index + 1, []):
3017
+ self._insert_content_export(node, child, children)
3018
+
3019
+ def _populate_exports_tree(self):
3020
+ self._clear_tree(self.exports_tree)
3021
+ self.current_export_map.clear()
3022
+ if not self.package:
3023
+ self.exports_placeholder_count_var.set("")
3024
+ return
3025
+ hide_placeholders = bool(self.hide_placeholder_exports_var.get())
3026
+ placeholder_count = 0
3027
+ shown = 0
3028
+ for export in self.package.exports:
3029
+ is_placeholder = self.package.is_placeholder_export(export)
3030
+ if is_placeholder:
3031
+ placeholder_count += 1
3032
+ if hide_placeholders:
3033
+ continue
3034
+ iid = f"export:{export.table_index}"
3035
+ self.current_export_map[iid] = export
3036
+ tags = ("placeholder",) if is_placeholder else ()
3037
+ class_label = "Class (placeholder)" if is_placeholder else self.package.export_class_name(export)
3038
+ self.exports_tree.insert(
3039
+ "",
3040
+ "end",
3041
+ iid=iid,
3042
+ text=str(export.table_index),
3043
+ values=(
3044
+ self.package.resolve_name(export.object_name),
3045
+ class_label,
3046
+ self.package.resolve_object_ref(export.outer_index),
3047
+ export.serial_size,
3048
+ export.serial_offset,
3049
+ ),
3050
+ tags=tags,
3051
+ )
3052
+ shown += 1
3053
+ if placeholder_count == 0:
3054
+ self.exports_placeholder_count_var.set(f"{shown} exports, no placeholders")
3055
+ elif hide_placeholders:
3056
+ self.exports_placeholder_count_var.set(
3057
+ f"{shown} shown, {placeholder_count} placeholders hidden"
3058
+ )
3059
+ else:
3060
+ self.exports_placeholder_count_var.set(
3061
+ f"{shown} exports ({placeholder_count} placeholders, marked gray)"
3062
+ )
3063
+
3064
+ def _populate_imports_tree(self):
3065
+ self._clear_tree(self.imports_tree)
3066
+ self.current_import_map.clear()
3067
+ if not self.package:
3068
+ return
3069
+ for imp in self.package.imports:
3070
+ iid = f"import:{imp.table_index}"
3071
+ self.current_import_map[iid] = imp
3072
+ self.imports_tree.insert(
3073
+ "",
3074
+ "end",
3075
+ iid=iid,
3076
+ text=str(imp.table_index),
3077
+ values=(
3078
+ self.package.resolve_name(imp.object_name),
3079
+ self.package.resolve_name(imp.class_name),
3080
+ self.package.resolve_name(imp.class_package),
3081
+ self.package.resolve_object_ref(imp.outer_index),
3082
+ ),
3083
+ )
3084
+
3085
+ def _populate_names_tree(self):
3086
+ self._clear_tree(self.names_tree)
3087
+ self.current_name_map.clear()
3088
+ # Reset the inline rename row so stale text from a previous package or
3089
+ # selection doesn't carry over after a reload.
3090
+ if hasattr(self, "name_edit_var"):
3091
+ self.name_edit_var.set("")
3092
+ if hasattr(self, "name_edit_info_var"):
3093
+ self.name_edit_info_var.set("Select a name to rename")
3094
+ if not self.package:
3095
+ return
3096
+ for name in self.package.names:
3097
+ iid = f"name:{name.index}"
3098
+ self.current_name_map[iid] = name
3099
+ self.names_tree.insert("", "end", iid=iid, text=str(name.index), values=(name.name, hex(name.flags)))
3100
+
3101
+ def _format_package_overview(self) -> str:
3102
+ if not self.package:
3103
+ return ""
3104
+ s = self.package.summary
3105
+ return "\n".join([
3106
+ f"Package: {self.package.file_path.name}",
3107
+ f"Names: {len(self.package.names)}",
3108
+ f"Imports: {len(self.package.imports)}",
3109
+ f"Exports: {len(self.package.exports)}",
3110
+ f"File Version: {s.file_version}",
3111
+ f"Licensee Version: {s.licensee_version}",
3112
+ f"Header Size: {s.total_header_size}",
3113
+ f"Package Flags: {hex(s.package_flags)}",
3114
+ f"Compression Flags: {hex(s.compression_flags)}",
3115
+ f"Engine Version: {s.engine_version}",
3116
+ f"Cooker Version: {s.cooker_version}",
3117
+ ])
3118
+
3119
+ def _populate_properties(self, export: ExportEntry):
3120
+ self._clear_tree(self.properties_tree)
3121
+ self.current_export = export
3122
+ self.current_property = None
3123
+ self.property_edit_var.set("")
3124
+ self.property_info_var.set("Select a property to edit")
3125
+ self.current_properties = parse_serialized_properties(self.package, export, None) if self.package else []
3126
+ if not self.current_properties:
3127
+ self.properties_tree.insert("", "end", text="No serialized property tags parsed", values=("", "", "", "", "", ""))
3128
+ return
3129
+ for prop in self.current_properties:
3130
+ label = f"{prop.index}: {prop.name}"
3131
+ iid = f"prop:{prop.index}"
3132
+ self.properties_tree.insert("", "end", iid=iid, text=label, values=(prop.declared_type, prop.tag_type, prop.owner_type, prop.size, prop.array_index, prop.value))
3133
+
3134
+ def _show_export(self, export: ExportEntry):
3135
+ class_name = self.package.export_class_name(export)
3136
+ raw = self.package.object_data(export)
3137
+ preview = raw[:HEX_PREVIEW_LIMIT]
3138
+ self._populate_properties(export)
3139
+ details = [
3140
+ f"Type: Export[{export.table_index}]",
3141
+ f"Name: {self.package.resolve_name(export.object_name)}",
3142
+ f"Path: {self.package.resolve_object_path(export.table_index + 1)}",
3143
+ f"Class: {class_name}",
3144
+ f"ClassIndex: {export.class_index} ({self.package.resolve_object_ref(export.class_index)})",
3145
+ f"SuperIndex: {export.super_index} ({self.package.resolve_object_ref(export.super_index)})",
3146
+ f"OuterIndex: {export.outer_index} ({self.package.resolve_object_ref(export.outer_index)})",
3147
+ f"ArchetypeIndex: {export.archetype_index} ({self.package.resolve_object_ref(export.archetype_index)})",
3148
+ f"ObjectFlags: {hex(export.object_flags)}",
3149
+ f"ExportFlags: {hex(export.export_flags)}",
3150
+ f"PackageFlags: {hex(export.package_flags)}",
3151
+ f"SerialSize: {export.serial_size}",
3152
+ f"SerialOffset: {export.serial_offset}",
3153
+ f"PropertiesParsed: {len(self.current_properties)}",
3154
+ f"NetObjects: {export.net_objects}",
3155
+ f"PackageGuid: {export.package_guid[0]:08X}-{export.package_guid[1]:08X}-{export.package_guid[2]:08X}-{export.package_guid[3]:08X}",
3156
+ ]
3157
+ # ── DLLBind info (UClass exports only) ──────────────────────────────
3158
+ if is_uclass_export(self.package, export):
3159
+ dllbind_result = find_uclass_dllbind_fstring_offset(raw)
3160
+ if dllbind_result is not None:
3161
+ dllbind_offset, dllbind_name = dllbind_result
3162
+ details.append(
3163
+ f"DLLBind: {dllbind_name!r} "
3164
+ f"(FString at serial+0x{dllbind_offset:X})"
3165
+ if dllbind_name else
3166
+ f"DLLBind: (none) (FString at serial+0x{dllbind_offset:X})"
3167
+ )
3168
+ else:
3169
+ details.append("DLLBind: <could not locate DLLBindName field>")
3170
+ self.details_text.delete("1.0", "end")
3171
+ self.details_text.insert("1.0", "\n".join(details))
3172
+ self.raw_text.delete("1.0", "end")
3173
+ if raw:
3174
+ header = f"Object data preview: {len(preview)} / {len(raw)} bytes"
3175
+ if len(raw) > HEX_PREVIEW_LIMIT:
3176
+ header += f"\nPreview truncated at {HEX_PREVIEW_LIMIT} bytes\n"
3177
+ else:
3178
+ header += "\n"
3179
+ self.raw_text.insert("1.0", header + "\n" + format_hex_preview(preview, export.serial_offset))
3180
+ else:
3181
+ self.raw_text.insert("1.0", "No serial data available for this export.")
3182
+
3183
+ def _show_import(self, imp: ImportEntry):
3184
+ details = [
3185
+ f"Type: Import[{imp.table_index}]",
3186
+ f"Name: {self.package.resolve_name(imp.object_name)}",
3187
+ f"Path: {self.package.resolve_object_path(-imp.table_index - 1)}",
3188
+ f"ClassName: {self.package.resolve_name(imp.class_name)}",
3189
+ f"ClassPackage: {self.package.resolve_name(imp.class_package)}",
3190
+ f"OuterIndex: {imp.outer_index} ({self.package.resolve_object_ref(imp.outer_index)})",
3191
+ ]
3192
+ self.details_text.delete("1.0", "end")
3193
+ self.details_text.insert("1.0", "\n".join(details))
3194
+ self.raw_text.delete("1.0", "end")
3195
+ self.raw_text.insert("1.0", "Imports do not contain local serial data in the package.")
3196
+ self._clear_tree(self.properties_tree)
3197
+ self.current_export = None
3198
+ self.current_property = None
3199
+ self.property_edit_var.set("")
3200
+ self.property_info_var.set("Select a property to edit")
3201
+ self.current_properties = []
3202
+
3203
+ def _show_name(self, name: NameEntry):
3204
+ self.details_text.delete("1.0", "end")
3205
+ self.details_text.insert("1.0", f"Type: Name[{name.index}]\nName: {name.name}\nFlags: {hex(name.flags)}")
3206
+ self.raw_text.delete("1.0", "end")
3207
+ self.raw_text.insert("1.0", "")
3208
+ self._clear_tree(self.properties_tree)
3209
+ self.current_properties = []
3210
+
3211
+ def _on_content_select(self, _event):
3212
+ selection = self.content_tree.selection()
3213
+ if not selection:
3214
+ return
3215
+ iid = selection[0]
3216
+ export = self.current_content_map.get(iid)
3217
+ if export:
3218
+ self._show_export(export)
3219
+
3220
+ def _on_exports_select(self, _event):
3221
+ selection = self.exports_tree.selection()
3222
+ if not selection:
3223
+ return
3224
+ export = self.current_export_map.get(selection[0])
3225
+ if export:
3226
+ self._show_export(export)
3227
+
3228
+ def _on_imports_select(self, _event):
3229
+ selection = self.imports_tree.selection()
3230
+ if not selection:
3231
+ return
3232
+ imp = self.current_import_map.get(selection[0])
3233
+ if imp:
3234
+ self._show_import(imp)
3235
+
3236
+ def _on_names_select(self, _event):
3237
+ selection = self.names_tree.selection()
3238
+ if not selection:
3239
+ return
3240
+ name = self.current_name_map.get(selection[0])
3241
+ if name:
3242
+ self._show_name(name)
3243
+ # Pre-fill the rename entry with the current text and update the
3244
+ # info label so the user knows exactly which entry will be edited.
3245
+ self.name_edit_var.set(name.name)
3246
+ self.name_edit_info_var.set(
3247
+ f"Name[{name.index}] flags={hex(name.flags)} ({len(name.name)} chars)"
3248
+ )
3249
+
3250
+ def _on_names_double_click(self, _event):
3251
+ # Convenience: double-click jumps focus into the rename field with the
3252
+ # current text pre-selected so the user can just start typing.
3253
+ if not self.names_tree.selection():
3254
+ return
3255
+ self.name_edit_entry.focus_set()
3256
+ self.name_edit_entry.select_range(0, "end")
3257
+ self.name_edit_entry.icursor("end")
3258
+
3259
+ def _on_property_select(self, _event):
3260
+ selection = self.properties_tree.selection()
3261
+ if not selection:
3262
+ return
3263
+ iid = selection[0]
3264
+ if not iid.startswith("prop:"):
3265
+ return
3266
+ try:
3267
+ index = int(iid.split(":", 1)[1])
3268
+ except Exception:
3269
+ return
3270
+ if 0 <= index < len(self.current_properties):
3271
+ prop = self.current_properties[index]
3272
+ self.current_property = prop
3273
+ self.property_edit_var.set(prop.value)
3274
+ self.property_info_var.set(f"{prop.name} | {prop.tag_type} | file+0x{(self.current_export.serial_offset + (prop.value_offset - 1 if prop.tag_type == 'BoolProperty' and prop.bool_value is not None else prop.value_offset)):X}")
3275
+
3276
+ def apply_property_edit(self):
3277
+ if not self.package or not self.current_export or not self.current_property:
3278
+ messagebox.showwarning("UPK GUI", "Select an export and a property first.")
3279
+ return
3280
+ try:
3281
+ export_index = self.current_export.table_index
3282
+ prop_name = self.current_property.name
3283
+ prop_array_index = self.current_property.array_index
3284
+ new_bytes = apply_property_edit_bytes(self.package, self.current_export, self.current_property, self.property_edit_var.get())
3285
+ self.package = parse_decrypted_package_bytes(self.package.file_path, new_bytes)
3286
+ self.current_export = self.package.exports[export_index]
3287
+ self._show_export(self.current_export)
3288
+ for i, prop in enumerate(self.current_properties):
3289
+ if prop.name == prop_name and prop.array_index == prop_array_index:
3290
+ iid = f"prop:{i}"
3291
+ if self.properties_tree.exists(iid):
3292
+ self.properties_tree.selection_set(iid)
3293
+ self.properties_tree.focus(iid)
3294
+ self.current_property = prop
3295
+ self.property_edit_var.set(prop.value)
3296
+ break
3297
+ self.set_status(f"Edited {prop_name}")
3298
+ except Exception as exc:
3299
+ messagebox.showerror("UPK GUI", str(exc))
3300
+
3301
+ def _pick_donor_key_upk(self):
3302
+ """Let the user select an encrypted UPK whose decryption key will be
3303
+ used when 'Save Re-Encrypted UPK' is next invoked (while the checkbox
3304
+ is ticked). The key is resolved from keys.txt at save time, not here,
3305
+ so any recently-updated keys.txt is automatically picked up."""
3306
+ path = filedialog.askopenfilename(
3307
+ title="Select donor encrypted UPK (to source the encryption key)",
3308
+ filetypes=[("UPK files", "*.upk"), ("All files", "*.*")],
3309
+ initialdir=str(self.script_dir),
3310
+ )
3311
+ if not path:
3312
+ return
3313
+ self.donor_key_upk_path = Path(path)
3314
+ self.donor_key_path_var.set(self.donor_key_upk_path.name)
3315
+ # Auto-enable the checkbox when the user picks a file.
3316
+ self.use_donor_key_var.set(True)
3317
+ self.set_status(f"Donor key UPK set: {self.donor_key_upk_path.name}")
3318
+
3319
+ def save_reencrypted_upk(self):
3320
+ source_path = self.current_encrypted_input_path or self.selected_input_path
3321
+ if not self.package or not source_path or not self.current_provider:
3322
+ messagebox.showwarning("UPK GUI", "Load an encrypted package first.")
3323
+ return
3324
+
3325
+ # ── Resolve override key from donor UPK if requested ─────────────────
3326
+ override_key: Optional[bytes] = None
3327
+ if self.use_donor_key_var and self.use_donor_key_var.get():
3328
+ donor_path = self.donor_key_upk_path
3329
+ if not donor_path or not donor_path.exists():
3330
+ messagebox.showwarning(
3331
+ "UPK GUI",
3332
+ "The 'Encrypt with key from another UPK' option is enabled but no valid "
3333
+ "donor UPK has been selected.\n\n"
3334
+ "Click 'Pick Donor UPK…' to choose one, or uncheck the option to use "
3335
+ "the original package's own key.",
3336
+ )
3337
+ return
3338
+ # Build a provider from the same keys.txt so we can try all known keys.
3339
+ keys_path = self.current_keys_path
3340
+ try:
3341
+ donor_provider = DecryptionProvider(str(keys_path) if keys_path and keys_path.exists() else None)
3342
+ except Exception as exc:
3343
+ messagebox.showerror("UPK GUI", f"Could not load keys.txt for donor key search:\n{exc}")
3344
+ return
3345
+ try:
3346
+ self.set_status(f"Finding key for donor UPK: {donor_path.name}…")
3347
+ override_key = find_key_for_encrypted_upk(donor_path, donor_provider)
3348
+ import base64 as _b64
3349
+ self.set_status(f"Donor key found ({_b64.b64encode(override_key).decode()[:16]}…), saving…")
3350
+ except Exception as exc:
3351
+ messagebox.showerror(
3352
+ "UPK GUI",
3353
+ f"Could not find a working decryption key for the donor UPK:\n{donor_path.name}\n\n{exc}\n\n"
3354
+ "Make sure keys.txt contains a key that can decrypt the selected donor package.",
3355
+ )
3356
+ return
3357
+
3358
+ default_name = f"{source_path.stem}_edited{source_path.suffix}"
3359
+ initial_dir = str(self.script_dir / "ReEncrypted")
3360
+ out_path = filedialog.asksaveasfilename(
3361
+ title="Save re-encrypted UPK",
3362
+ defaultextension=".upk",
3363
+ filetypes=[("UPK files", "*.upk"), ("All files", "*.*")],
3364
+ initialdir=initial_dir,
3365
+ initialfile=default_name,
3366
+ )
3367
+ if not out_path:
3368
+ return
3369
+ try:
3370
+ saved = build_reencrypted_package(
3371
+ source_path,
3372
+ bytes(self.package.file_bytes),
3373
+ self.current_provider,
3374
+ Path(out_path),
3375
+ override_key=override_key,
3376
+ )
3377
+ new_sha1 = sha1_file(saved)
3378
+ patch_note = "RocketLeague.exe patch skipped (file not found)."
3379
+ exe_path = self.script_dir / "RocketLeague.exe"
3380
+ if self.current_original_sha1 and exe_path.exists():
3381
+ replaced = patch_sha1_in_exe(exe_path, self.current_original_sha1, new_sha1)
3382
+ patch_note = f"Patched {exe_path.name}: {replaced} occurrence(s)."
3383
+ elif self.current_original_sha1 and not exe_path.exists():
3384
+ patch_note = f"RocketLeague.exe patch skipped ({exe_path.name} not found)."
3385
+ key_note = ""
3386
+ if override_key is not None:
3387
+ import base64 as _b64
3388
+ key_note = f"\nEncryption key sourced from: {self.donor_key_upk_path.name}\n(key prefix: {_b64.b64encode(override_key).decode()[:16]}…)"
3389
+ self.set_status(f"Saved {saved.name}")
3390
+ prompt = f"Saved re-encrypted UPK:\n{saved}\n\nSHA-1: {new_sha1}\n{patch_note}{key_note}\n\nOpen it now?"
3391
+ if messagebox.askyesno("UPK GUI", prompt):
3392
+ self._load_selected_file(saved)
3393
+ except Exception as exc:
3394
+ messagebox.showerror("UPK GUI", str(exc))
3395
+
3396
+ def import_donor_names(self):
3397
+ if not self.package:
3398
+ messagebox.showwarning("UPK GUI", "Load a package first.")
3399
+ return
3400
+ donor_path = filedialog.askopenfilename(
3401
+ title="Select donor UPK",
3402
+ filetypes=[("UPK files", "*.upk"), ("All files", "*.*")],
3403
+ initialdir=str(self.script_dir),
3404
+ )
3405
+ if not donor_path:
3406
+ return
3407
+ try:
3408
+ donor_input = Path(donor_path)
3409
+ _donor_decrypted, donor_package, _donor_provider, _donor_keys, _donor_was_encrypted = resolve_input_package(donor_input, self.decrypted_dir, self.script_dir)
3410
+ name_to_import = simpledialog.askstring("Import Donor Names", "Name to import from donor package (leave blank for all donor names):", parent=self.root)
3411
+ selected = [name_to_import.strip()] if name_to_import and name_to_import.strip() else None
3412
+ merged = import_donor_names(self.package, donor_package, selected)
3413
+ self.package = merged
3414
+ self._apply_loaded_package(self.selected_input_path or self.package.file_path, self.current_original_sha1 or "-", self.current_keys_path or Path("keys.txt"), self.package.file_path, merged, self.current_provider)
3415
+ added_names = getattr(merged, '_merge_added_names', 0)
3416
+ self.set_status(f"Imported donor names: +{added_names} names")
3417
+ messagebox.showinfo("UPK GUI", f"Imported donor names: +{added_names}")
3418
+ except Exception as exc:
3419
+ messagebox.showerror("UPK GUI", str(exc))
3420
+
3421
+ def import_donor_exports(self):
3422
+ if not self.package:
3423
+ messagebox.showwarning("UPK GUI", "Load a package first.")
3424
+ return
3425
+ donor_path = filedialog.askopenfilename(
3426
+ title="Select donor UPK",
3427
+ filetypes=[("UPK files", "*.upk"), ("All files", "*.*")],
3428
+ initialdir=str(self.script_dir),
3429
+ )
3430
+ if not donor_path:
3431
+ return
3432
+ try:
3433
+ donor_input = Path(donor_path)
3434
+ donor_decrypted, donor_package, _donor_provider, _donor_keys, donor_was_encrypted = resolve_input_package(donor_input, self.decrypted_dir, self.script_dir)
3435
+ # Use the original (encrypted) input filename as the donor package
3436
+ # name, not the decrypted working copy's stem - the game looks up
3437
+ # the file by its deployed name in the cooked content folder.
3438
+ donor_pkg_name = donor_input.stem
3439
+ merged = merge_donor_exports_as_imports(self.package, donor_package, donor_pkg_name)
3440
+ self.package = merged
3441
+ self._apply_loaded_package(self.selected_input_path or self.package.file_path, self.current_original_sha1 or "-", self.current_keys_path or Path("keys.txt"), self.package.file_path, merged, self.current_provider)
3442
+ added_imports = getattr(merged, '_merge_added_imports', 0)
3443
+ added_names = getattr(merged, '_merge_added_names', 0)
3444
+ resolved_name = getattr(merged, '_merge_donor_package_name', donor_pkg_name)
3445
+ self.set_status(f"Imported donor exports as '{resolved_name}': +{added_imports} imports, +{added_names} names")
3446
+ messagebox.showinfo(
3447
+ "UPK GUI",
3448
+ f"Imported exports from {donor_input.name}\n\n"
3449
+ f"Donor package name (used in import paths): {resolved_name}\n"
3450
+ f"Added imports: {added_imports}\n"
3451
+ f"Added names: {added_names}\n\n"
3452
+ "All donor exports have been re-imported under a Core.Package "
3453
+ f"\"{resolved_name}\" import, so the engine will attempt to load "
3454
+ f"{resolved_name}.upk when this package is loaded.\n\n"
3455
+ "IMPORTANT: For the references to actually resolve at runtime, "
3456
+ f"deploy {donor_input.name} into the same cooked content folder "
3457
+ "where the game looks for .upk files (typically alongside the "
3458
+ "other Rocket League .upk files in CookedPCConsole). If the "
3459
+ "donor file isn't on disk where the engine can find it, the "
3460
+ "imports will fail to resolve and dependent objects will be "
3461
+ "missing at load time.\n\n"
3462
+ "You can now use the new imported object paths in ObjectProperty "
3463
+ "edits and the new imported names in NameProperty edits.",
3464
+ )
3465
+ except Exception as exc:
3466
+ messagebox.showerror("UPK GUI", str(exc))
3467
+
3468
+ def replace_export_from_donor(self):
3469
+ if not self.package:
3470
+ messagebox.showwarning("UPK GUI", "Load a package first.")
3471
+ return
3472
+ if not self.current_export:
3473
+ messagebox.showwarning("UPK GUI", "Select the target export first.")
3474
+ return
3475
+ donor_path = filedialog.askopenfilename(
3476
+ title="Select donor UPK",
3477
+ filetypes=[("UPK files", "*.upk"), ("All files", "*.*")],
3478
+ initialdir=str(self.script_dir),
3479
+ )
3480
+ if not donor_path:
3481
+ return
3482
+ try:
3483
+ donor_input = Path(donor_path)
3484
+ _donor_decrypted, donor_package, _donor_provider, _donor_keys, _donor_was_encrypted = resolve_input_package(donor_input, self.decrypted_dir, self.script_dir)
3485
+ target_path = self.package.resolve_object_path(self.current_export.table_index + 1)
3486
+ donor_export_path = simpledialog.askstring(
3487
+ "Replace Export From Donor",
3488
+ "Donor export path:",
3489
+ parent=self.root,
3490
+ )
3491
+ if not donor_export_path or not donor_export_path.strip():
3492
+ return
3493
+ replaced = replace_export_with_donor_export(self.package, donor_package, target_path, donor_export_path.strip())
3494
+ self.package = replaced
3495
+ self._apply_loaded_package(self.selected_input_path or self.package.file_path, self.current_original_sha1 or "-", self.current_keys_path or Path("keys.txt"), self.package.file_path, replaced, self.current_provider)
3496
+ note = getattr(replaced, '_replace_note', '')
3497
+ self.set_status(f"Replaced export from donor: {Path(donor_path).name}")
3498
+ messagebox.showinfo(
3499
+ "UPK GUI",
3500
+ f"Replaced target export:\n{target_path}\n\nWith donor export:\n{donor_export_path.strip()}\n\n{note}",
3501
+ )
3502
+ except Exception as exc:
3503
+ messagebox.showerror("UPK GUI", str(exc))
3504
+
3505
+
3506
+ def rename_export_fname(self):
3507
+ if not self.package:
3508
+ messagebox.showwarning("UPK GUI", "Load a package first.")
3509
+ return
3510
+ if not self.current_export:
3511
+ messagebox.showwarning("UPK GUI", "Select the export you want to rename first.")
3512
+ return
3513
+ export_index = self.current_export.table_index
3514
+ current_name = self.package.resolve_name(self.current_export.object_name)
3515
+ new_name = simpledialog.askstring(
3516
+ "Rename Export FName",
3517
+ "New FName for this export.\n"
3518
+ "- Use 'BaseName' or 'BaseName_<N>' (e.g. 'MyMesh_3').\n"
3519
+ "- An existing name is reused; a new base name is appended to the name table.\n"
3520
+ f"\nCurrent: {current_name}",
3521
+ initialvalue=current_name,
3522
+ parent=self.root,
3523
+ )
3524
+ if new_name is None:
3525
+ return
3526
+ new_name = new_name.strip()
3527
+ if not new_name:
3528
+ return
3529
+ if new_name == current_name:
3530
+ self.set_status("Rename skipped (name unchanged)")
3531
+ return
3532
+ try:
3533
+ renamed = rename_export_fname(self.package, self.current_export, new_name)
3534
+ self.package = renamed
3535
+ self._apply_loaded_package(
3536
+ self.selected_input_path or self.package.file_path,
3537
+ self.current_original_sha1 or "-",
3538
+ self.current_keys_path or Path("keys.txt"),
3539
+ self.package.file_path,
3540
+ renamed,
3541
+ self.current_provider,
3542
+ )
3543
+ # Re-select the renamed export so the user can immediately see/save it.
3544
+ if 0 <= export_index < len(self.package.exports):
3545
+ self.current_export = self.package.exports[export_index]
3546
+ self._show_export(self.current_export)
3547
+ iid = f"export:{export_index}"
3548
+ if self.exports_tree.exists(iid):
3549
+ self.exports_tree.selection_set(iid)
3550
+ self.exports_tree.focus(iid)
3551
+ self.exports_tree.see(iid)
3552
+ added = getattr(renamed, '_rename_added_names', 0)
3553
+ self.set_status(f"Renamed export[{export_index}] -> {new_name} (+{added} name)")
3554
+ messagebox.showinfo(
3555
+ "UPK GUI",
3556
+ f"Renamed export[{export_index}]:\n {current_name}\n ->\n {new_name}\n\n"
3557
+ f"Names added to package: {added}\n\n"
3558
+ "Use 'Save Decrypted UPK' (for plain packages) or 'Save Re-Encrypted UPK' "
3559
+ "(for encrypted packages) to write the change to disk.",
3560
+ )
3561
+ except Exception as exc:
3562
+ messagebox.showerror("UPK GUI", str(exc))
3563
+
3564
+ def rename_selected_name(self):
3565
+ """Rewrite the text of the currently-selected name table entry.
3566
+
3567
+ Triggered from the Names tab edit row (button or <Return> in the
3568
+ entry). Reads the new text from self.name_edit_var, calls the
3569
+ rename_name_entry helper which rebuilds the package's header (names
3570
+ blob + offsets + serial_offsets), and reloads the GUI from the
3571
+ rebuilt bytes. The same name index is re-selected after reload so
3572
+ the user can immediately verify the change.
3573
+ """
3574
+ if not self.package:
3575
+ messagebox.showwarning("UPK GUI", "Load a package first.")
3576
+ return
3577
+ selection = self.names_tree.selection()
3578
+ if not selection:
3579
+ messagebox.showwarning("UPK GUI", "Select a name in the Names list first.")
3580
+ return
3581
+ name_entry = self.current_name_map.get(selection[0])
3582
+ if name_entry is None:
3583
+ messagebox.showwarning("UPK GUI", "Could not resolve the selected name.")
3584
+ return
3585
+ new_text = self.name_edit_var.get().strip()
3586
+ if not new_text:
3587
+ messagebox.showwarning("UPK GUI", "New name text cannot be empty.")
3588
+ return
3589
+ if new_text == name_entry.name:
3590
+ self.set_status("Rename skipped (name unchanged)")
3591
+ return
3592
+ target_index = name_entry.index
3593
+ try:
3594
+ renamed = rename_name_entry(self.package, target_index, new_text)
3595
+ self.package = renamed
3596
+ self._apply_loaded_package(
3597
+ self.selected_input_path or self.package.file_path,
3598
+ self.current_original_sha1 or "-",
3599
+ self.current_keys_path or Path("keys.txt"),
3600
+ self.package.file_path,
3601
+ renamed,
3602
+ self.current_provider,
3603
+ )
3604
+ # Re-select the renamed entry. _populate_names_tree rebuilt the
3605
+ # tree from scratch, so the iid is regenerated but uses the same
3606
+ # name:<index> pattern.
3607
+ iid = f"name:{target_index}"
3608
+ if self.names_tree.exists(iid):
3609
+ self.names_tree.selection_set(iid)
3610
+ self.names_tree.focus(iid)
3611
+ self.names_tree.see(iid)
3612
+ delta = getattr(renamed, '_name_size_delta', 0)
3613
+ old_text = getattr(renamed, '_name_rename_old', name_entry.name)
3614
+ sign = "+" if delta >= 0 else ""
3615
+ self.set_status(
3616
+ f"Renamed name[{target_index}]: {old_text!r} -> {new_text!r} "
3617
+ f"({sign}{delta} bytes)"
3618
+ )
3619
+ messagebox.showinfo(
3620
+ "UPK GUI",
3621
+ f"Renamed name table entry [{target_index}]:\n"
3622
+ f" {old_text!r}\n"
3623
+ f" ->\n"
3624
+ f" {new_text!r}\n\n"
3625
+ f"Names blob delta: {sign}{delta} bytes\n"
3626
+ "Header offsets and export serial_offsets were rebuilt to "
3627
+ "match the new size.\n\n"
3628
+ "Use 'Save Decrypted UPK' (for plain packages) or "
3629
+ "'Save Re-Encrypted UPK' (for encrypted packages) to write "
3630
+ "the change to disk.",
3631
+ )
3632
+ except Exception as exc:
3633
+ messagebox.showerror("UPK GUI", str(exc))
3634
+
3635
+ def set_dll_bind(self):
3636
+ """Inject or change the DLLBind DLL name on the currently selected UClass export.
3637
+
3638
+ DLLBind causes the Unreal Engine to call LoadLibrary on the given DLL
3639
+ name immediately when the class is loaded from the package. This is
3640
+ the standard injection point used by Rocket League mod frameworks such
3641
+ as BakkesMod and CodeRed.
3642
+
3643
+ The DLL name is stored as an FString (DLLBindName) at the very end of
3644
+ the UClass serial body. This method locates that FString, replaces it
3645
+ with the user-supplied value, and adjusts all relevant package offsets
3646
+ so the result is a valid, save-able package.
3647
+
3648
+ Requirements:
3649
+ • The selected export must be a class definition (class_index → Class).
3650
+ • The DLL name must be pure ASCII (as required by the UE3 FString
3651
+ encoding used for DLLBindName in cooked packages).
3652
+ • No path separators — supply only the DLL filename, e.g. 'MyMod.dll'.
3653
+ """
3654
+ if not self.package or not self.current_export:
3655
+ messagebox.showwarning("UPK GUI", "Select a UClass export first.")
3656
+ return
3657
+
3658
+ if not is_uclass_export(self.package, self.current_export):
3659
+ cls = self.package.export_class_name(self.current_export)
3660
+ messagebox.showwarning(
3661
+ "UPK GUI",
3662
+ f"The selected export is a '{cls}', not a Class.\n\n"
3663
+ "DLLBind can only be set on class definition exports "
3664
+ "(exports whose class is 'Class' in the Exports tab). "
3665
+ "Select a UClass export and try again.",
3666
+ )
3667
+ return
3668
+
3669
+ raw = self.package.object_data(self.current_export)
3670
+ if not raw:
3671
+ messagebox.showerror("UPK GUI", "The selected UClass export has no serial data.")
3672
+ return
3673
+
3674
+ dllbind_result = find_uclass_dllbind_fstring_offset(raw)
3675
+ if dllbind_result is None:
3676
+ messagebox.showerror(
3677
+ "UPK GUI",
3678
+ "Could not locate the DLLBindName field in this UClass export's serial data.\n\n"
3679
+ "The export's binary layout may be non-standard or the serial data may be "
3680
+ "truncated. Check the Raw Data tab for the hex dump.",
3681
+ )
3682
+ return
3683
+
3684
+ fstring_offset, current_dll_name = dllbind_result
3685
+
3686
+ new_dll_name = simpledialog.askstring(
3687
+ "Set DLLBind",
3688
+ f"Enter the DLL filename to bind (ASCII only, no path separators).\n"
3689
+ f"Example: CodeRed.dll\n\n"
3690
+ f"Leave blank to remove an existing DLLBind.\n\n"
3691
+ f"Current DLLBind: {repr(current_dll_name) if current_dll_name else '(none)'}\n"
3692
+ f"FString at serial+0x{fstring_offset:X}",
3693
+ initialvalue=current_dll_name,
3694
+ parent=self.root,
3695
+ )
3696
+ if new_dll_name is None:
3697
+ return # user cancelled
3698
+
3699
+ new_dll_name = new_dll_name.strip()
3700
+
3701
+ if new_dll_name == current_dll_name:
3702
+ self.set_status("DLLBind unchanged — no action taken.")
3703
+ return
3704
+
3705
+ try:
3706
+ export_index = self.current_export.table_index
3707
+ export_name = self.package.resolve_name(self.current_export.object_name)
3708
+ new_bytes = set_uclass_dllbind_name(self.package, self.current_export, new_dll_name)
3709
+ self.package = parse_decrypted_package_bytes(self.package.file_path, new_bytes)
3710
+ self.current_export = self.package.exports[export_index]
3711
+ self._show_export(self.current_export)
3712
+
3713
+ if new_dll_name:
3714
+ action_msg = f"Set DLLBind → '{new_dll_name}'"
3715
+ detail_msg = (
3716
+ f"Export[{export_index}] '{export_name}' will now cause the engine "
3717
+ f"to load '{new_dll_name}' via LoadLibrary when this package is opened."
3718
+ )
3719
+ else:
3720
+ action_msg = "Removed DLLBind"
3721
+ detail_msg = f"DLLBind was cleared on Export[{export_index}] '{export_name}'."
3722
+
3723
+ self.set_status(action_msg)
3724
+ messagebox.showinfo(
3725
+ "UPK GUI",
3726
+ f"{action_msg}\n\n"
3727
+ f"{detail_msg}\n\n"
3728
+ "Use 'Save Re-Encrypted UPK' (for encrypted RL packages) or "
3729
+ "'Save Decrypted UPK' (for plain packages) to write the change to disk.",
3730
+ )
3731
+ except Exception as exc:
3732
+ messagebox.showerror("UPK GUI", str(exc))
3733
+
3734
+ def save_decrypted_upk(self):
3735
+ """Save the current (possibly edited) decrypted package bytes to a .upk file.
3736
+
3737
+ Use this when the loaded package is plain/decompressed (not encrypted).
3738
+ For encrypted Rocket League packages, use Save Re-Encrypted UPK instead.
3739
+ """
3740
+ if not self.package:
3741
+ messagebox.showwarning("UPK GUI", "Load a package first.")
3742
+ return
3743
+ # Pick a sensible default location/name. If the original input was a
3744
+ # plain UPK, write next to it; otherwise drop into the decrypted_dir.
3745
+ source_path = self.selected_input_path or self.package.file_path
3746
+ if self.current_provider is not None:
3747
+ initial_dir = str(self.decrypted_dir)
3748
+ default_name = f"{source_path.stem}_edited_decrypted.upk"
3749
+ else:
3750
+ initial_dir = str(source_path.parent)
3751
+ default_name = f"{source_path.stem}_edited{source_path.suffix or '.upk'}"
3752
+ out_path = filedialog.asksaveasfilename(
3753
+ title="Save decrypted UPK",
3754
+ defaultextension=".upk",
3755
+ filetypes=[("UPK files", "*.upk"), ("All files", "*.*")],
3756
+ initialdir=initial_dir,
3757
+ initialfile=default_name,
3758
+ )
3759
+ if not out_path:
3760
+ return
3761
+ try:
3762
+ out = Path(out_path)
3763
+ out.parent.mkdir(parents=True, exist_ok=True)
3764
+ out.write_bytes(bytes(self.package.file_bytes))
3765
+ self.set_status(f"Saved {out.name}")
3766
+ note = ""
3767
+ if self.current_provider is not None:
3768
+ note = (
3769
+ "\n\nNote: this is a decrypted/plain copy. The Rocket League game "
3770
+ "loader expects the encrypted format - use 'Save Re-Encrypted UPK' "
3771
+ "to produce a file the game will load."
3772
+ )
3773
+ prompt = f"Saved decrypted UPK:\n{out}{note}\n\nOpen it now?"
3774
+ if messagebox.askyesno("UPK GUI", prompt):
3775
+ self._load_selected_file(out)
3776
+ except Exception as exc:
3777
+ messagebox.showerror("UPK GUI", str(exc))
3778
+
3779
+
3780
+ def verify_current_package(self):
3781
+ """Run consistency checks on the loaded package and show a report dialog.
3782
+
3783
+ Useful when an edited package freezes or crashes the game on load -
3784
+ the report will pinpoint which header invariant is violated. Read-only,
3785
+ does not modify or save the package.
3786
+ """
3787
+ if not self.package:
3788
+ messagebox.showwarning("UPK GUI", "Load a package first.")
3789
+ return
3790
+ try:
3791
+ findings = verify_package(self.package)
3792
+ report = format_verify_report(findings)
3793
+ except Exception as exc:
3794
+ messagebox.showerror("UPK GUI", f"Verification failed to run: {exc}")
3795
+ return
3796
+
3797
+ # Show the report in a dedicated scrollable dialog so long reports
3798
+ # don't get clipped by messagebox's fixed sizing.
3799
+ dlg = tk.Toplevel(self.root)
3800
+ dlg.title("Package Verification Report")
3801
+ dlg.geometry("980x560")
3802
+ dlg.transient(self.root)
3803
+ text = tk.Text(dlg, wrap="word", bg="#111318", fg="#e8ecf4", insertbackground="#ffffff")
3804
+ vsb = ttk.Scrollbar(dlg, orient="vertical", command=text.yview)
3805
+ text.configure(yscrollcommand=vsb.set)
3806
+ text.grid(row=0, column=0, sticky="nsew")
3807
+ vsb.grid(row=0, column=1, sticky="ns")
3808
+ dlg.rowconfigure(0, weight=1)
3809
+ dlg.columnconfigure(0, weight=1)
3810
+ text.insert("1.0", report)
3811
+ text.tag_configure("error", foreground="#ff7676")
3812
+ text.tag_configure("warn", foreground="#ffd166")
3813
+ text.tag_configure("ok", foreground="#7be39a")
3814
+ for line_idx, line in enumerate(report.split("\n"), start=1):
3815
+ tag = None
3816
+ if line.startswith("[ERROR]"):
3817
+ tag = "error"
3818
+ elif line.startswith("[WARN]"):
3819
+ tag = "warn"
3820
+ elif line.startswith("[OK]"):
3821
+ tag = "ok"
3822
+ if tag is not None:
3823
+ text.tag_add(tag, f"{line_idx}.0", f"{line_idx}.end")
3824
+ text.configure(state="disabled")
3825
+
3826
+ button_row = ttk.Frame(dlg)
3827
+ button_row.grid(row=1, column=0, columnspan=2, sticky="ew", padx=8, pady=6)
3828
+
3829
+ def _copy():
3830
+ self.root.clipboard_clear()
3831
+ self.root.clipboard_append(report)
3832
+ self.set_status("Verification report copied to clipboard")
3833
+
3834
+ ttk.Button(button_row, text="Copy to Clipboard", command=_copy).pack(side="left")
3835
+ ttk.Button(button_row, text="Close", command=dlg.destroy).pack(side="right")
3836
+
3837
+ error_count = sum(1 for sev, _ in findings if sev == "ERROR")
3838
+ if error_count:
3839
+ self.set_status(f"Verification: {error_count} error(s) found")
3840
+ else:
3841
+ self.set_status("Verification: no errors found")
3842
+
3843
+
3844
+ def _show_error(self, details: str):
3845
+ self.set_status("Failed")
3846
+ messagebox.showerror("UPK GUI", details)
3847
+
3848
+
3849
+ def main() -> int:
3850
+ parser = argparse.ArgumentParser(add_help=False)
3851
+ parser.parse_args()
3852
+ root = tk.Tk()
3853
+ App(root)
3854
+ root.mainloop()
3855
+ return 0
3856
+
3857
+
3858
+ if __name__ == "__main__":
3859
+ raise SystemExit(main())