Kea2-python 1.0.6b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Kea2-python might be problematic. Click here for more details.
- kea2/__init__.py +3 -0
- kea2/absDriver.py +56 -0
- kea2/adbUtils.py +554 -0
- kea2/assets/config_version.json +16 -0
- kea2/assets/fastbot-thirdpart.jar +0 -0
- kea2/assets/fastbot_configs/abl.strings +2 -0
- kea2/assets/fastbot_configs/awl.strings +3 -0
- kea2/assets/fastbot_configs/max.config +7 -0
- kea2/assets/fastbot_configs/max.fuzzing.strings +699 -0
- kea2/assets/fastbot_configs/max.schema.strings +1 -0
- kea2/assets/fastbot_configs/max.strings +3 -0
- kea2/assets/fastbot_configs/max.tree.pruning +27 -0
- kea2/assets/fastbot_configs/teardown.py +18 -0
- kea2/assets/fastbot_configs/widget.block.py +38 -0
- kea2/assets/fastbot_libs/arm64-v8a/libfastbot_native.so +0 -0
- kea2/assets/fastbot_libs/armeabi-v7a/libfastbot_native.so +0 -0
- kea2/assets/fastbot_libs/x86/libfastbot_native.so +0 -0
- kea2/assets/fastbot_libs/x86_64/libfastbot_native.so +0 -0
- kea2/assets/framework.jar +0 -0
- kea2/assets/kea2-thirdpart.jar +0 -0
- kea2/assets/monkeyq.jar +0 -0
- kea2/assets/quicktest.py +126 -0
- kea2/cli.py +320 -0
- kea2/fastbotManager.py +267 -0
- kea2/fastbotx/ActivityTimes.py +52 -0
- kea2/fastbotx/ReuseEntry.py +74 -0
- kea2/fastbotx/ReuseModel.py +63 -0
- kea2/fastbotx/__init__.py +7 -0
- kea2/fbm_parser.py +871 -0
- kea2/fs_lock.py +131 -0
- kea2/kea2_api.py +166 -0
- kea2/keaUtils.py +1112 -0
- kea2/kea_launcher.py +319 -0
- kea2/logWatcher.py +92 -0
- kea2/mixin.py +22 -0
- kea2/report/__init__.py +0 -0
- kea2/report/bug_report_generator.py +793 -0
- kea2/report/mixin.py +482 -0
- kea2/report/report_merger.py +797 -0
- kea2/report/templates/bug_report_template.html +3876 -0
- kea2/report/templates/merged_bug_report_template.html +3333 -0
- kea2/report/utils.py +10 -0
- kea2/resultSyncer.py +65 -0
- kea2/u2Driver.py +610 -0
- kea2/utils.py +184 -0
- kea2/version_manager.py +102 -0
- kea2_python-1.0.6b0.dist-info/METADATA +447 -0
- kea2_python-1.0.6b0.dist-info/RECORD +52 -0
- kea2_python-1.0.6b0.dist-info/WHEEL +5 -0
- kea2_python-1.0.6b0.dist-info/entry_points.txt +2 -0
- kea2_python-1.0.6b0.dist-info/licenses/LICENSE +16 -0
- kea2_python-1.0.6b0.dist-info/top_level.txt +1 -0
kea2/fbm_parser.py
ADDED
|
@@ -0,0 +1,871 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
FBM merger tool
|
|
4
|
+
|
|
5
|
+
This script provides a single responsibility: merge two FBM files into a new FBM file
|
|
6
|
+
that preserves the original FlatBuffers schema generated under the `fastbotx` package.
|
|
7
|
+
|
|
8
|
+
Usage:
|
|
9
|
+
python fbm_parser.py --merge a.fbm b.fbm -o out.fbm
|
|
10
|
+
|
|
11
|
+
Notes:
|
|
12
|
+
- Requires the `flatbuffers` runtime and generated Python modules under `fastbotx/`.
|
|
13
|
+
- The merger concatenates ReuseEntry objects from the first file then the second.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import os
|
|
17
|
+
import threading
|
|
18
|
+
import uuid
|
|
19
|
+
from .fs_lock import FileLock, LockTimeoutError
|
|
20
|
+
|
|
21
|
+
STORAGE_PREFIX = "/sdcard/fastbot_"
|
|
22
|
+
|
|
23
|
+
# Ensure working directory is the script directory so relative imports for generated code work
|
|
24
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class FBMMerger:
|
|
28
|
+
"""Class encapsulating FBM merge functionality.
|
|
29
|
+
|
|
30
|
+
Public methods:
|
|
31
|
+
- merge(file_a, file_b, out_file): merge two FBM files into out_file.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(self):
|
|
35
|
+
self.script_dir = script_dir
|
|
36
|
+
# internal map: action_hash (int) -> { activity_str: times }
|
|
37
|
+
self._reuse_model_lock = threading.Lock()
|
|
38
|
+
self._reuse_model = {} # dict: int -> dict(activity->times)
|
|
39
|
+
self._model_save_path = ""
|
|
40
|
+
self._default_model_save_path = ""
|
|
41
|
+
|
|
42
|
+
# Prepare PC-side FBM directory under project configs/merge_fbm (or cwd fallback)
|
|
43
|
+
try:
|
|
44
|
+
from pathlib import Path
|
|
45
|
+
pc_dir = self._pc_fbm_dir()
|
|
46
|
+
# ensure Path object
|
|
47
|
+
pc_dir = Path(pc_dir)
|
|
48
|
+
pc_dir.mkdir(parents=True, exist_ok=True)
|
|
49
|
+
self._pc_dir = pc_dir
|
|
50
|
+
except Exception:
|
|
51
|
+
# best-effort: if directory creation fails, keep attribute None
|
|
52
|
+
self._pc_dir = None
|
|
53
|
+
|
|
54
|
+
def check_dependencies(self):
|
|
55
|
+
try:
|
|
56
|
+
import flatbuffers # noqa: F401
|
|
57
|
+
return True
|
|
58
|
+
except Exception:
|
|
59
|
+
print("Error: 'flatbuffers' runtime not installed. Run: pip install flatbuffers")
|
|
60
|
+
return False
|
|
61
|
+
|
|
62
|
+
def check_generated_code(self):
|
|
63
|
+
"""Check that the expected generated modules exist under fastbotx/"""
|
|
64
|
+
required = [
|
|
65
|
+
os.path.join(self.script_dir, "fastbotx", "__init__.py"),
|
|
66
|
+
os.path.join(self.script_dir, "fastbotx", "ReuseModel.py"),
|
|
67
|
+
os.path.join(self.script_dir, "fastbotx", "ReuseEntry.py"),
|
|
68
|
+
os.path.join(self.script_dir, "fastbotx", "ActivityTimes.py"),
|
|
69
|
+
]
|
|
70
|
+
missing = [p for p in required if not os.path.exists(p)]
|
|
71
|
+
if missing:
|
|
72
|
+
print("Error: Missing generated FlatBuffers Python files:")
|
|
73
|
+
for p in missing:
|
|
74
|
+
print(" - ", p)
|
|
75
|
+
return False
|
|
76
|
+
return True
|
|
77
|
+
|
|
78
|
+
def _ensure_fbm_suffix(self, path: str, param_name: str = 'file') -> bool:
|
|
79
|
+
"""Ensure the path ends with .fbm (case-insensitive). Print error and return False otherwise."""
|
|
80
|
+
if not path:
|
|
81
|
+
print(f"Error: {param_name} path is empty")
|
|
82
|
+
return False
|
|
83
|
+
if not str(path).lower().endswith('.fbm'):
|
|
84
|
+
print(f"Error: expected .fbm file for {param_name}: '{path}'")
|
|
85
|
+
return False
|
|
86
|
+
return True
|
|
87
|
+
|
|
88
|
+
def load_model(self, file_path):
|
|
89
|
+
"""Load and return ReuseModel root object from a FBM file.
|
|
90
|
+
|
|
91
|
+
Returns the model object on success, or None on failure.
|
|
92
|
+
"""
|
|
93
|
+
# suffix check
|
|
94
|
+
if not self._ensure_fbm_suffix(file_path, 'file_path'):
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
from .fastbotx.ReuseModel import ReuseModel
|
|
99
|
+
except Exception as e:
|
|
100
|
+
print("Error importing fastbotx.ReuseModel:", e)
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
with open(file_path, 'rb') as f:
|
|
105
|
+
data = f.read()
|
|
106
|
+
model = ReuseModel.GetRootAs(data, 0)
|
|
107
|
+
return model
|
|
108
|
+
except Exception as e:
|
|
109
|
+
print(f"Error reading/parsing FBM file {file_path}: {e}")
|
|
110
|
+
return None
|
|
111
|
+
|
|
112
|
+
def load_reuse_model(self, package_name: str):
|
|
113
|
+
"""Load a FBM file according to package name and populate internal reuse map.
|
|
114
|
+
|
|
115
|
+
Behavior follows the C++ example: compute path STORAGE_PREFIX + package + ".fbm",
|
|
116
|
+
set internal default paths, read binary, parse ReuseModel and convert into
|
|
117
|
+
self._reuse_model as a mapping actionHash -> {activity: times}.
|
|
118
|
+
"""
|
|
119
|
+
if not package_name:
|
|
120
|
+
print("Error: package_name required")
|
|
121
|
+
return False
|
|
122
|
+
|
|
123
|
+
model_file_path = STORAGE_PREFIX + package_name + ".fbm"
|
|
124
|
+
self._model_save_path = model_file_path
|
|
125
|
+
if self._model_save_path:
|
|
126
|
+
self._default_model_save_path = STORAGE_PREFIX + package_name + ".tmp.fbm"
|
|
127
|
+
|
|
128
|
+
print(f"Begin load model: {model_file_path}")
|
|
129
|
+
|
|
130
|
+
if not os.path.exists(model_file_path):
|
|
131
|
+
print(f"Read model file {model_file_path} failed, check if file exists!")
|
|
132
|
+
return False
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
with open(model_file_path, 'rb') as f:
|
|
136
|
+
data = f.read()
|
|
137
|
+
except Exception as e:
|
|
138
|
+
print(f"Failed to read file {model_file_path}: {e}")
|
|
139
|
+
return False
|
|
140
|
+
|
|
141
|
+
# parse using generated ReuseModel
|
|
142
|
+
try:
|
|
143
|
+
import importlib
|
|
144
|
+
ReuseModel_mod = importlib.import_module('kea2.fastbotx.ReuseModel')
|
|
145
|
+
ReuseEntry_mod = importlib.import_module('kea2.fastbotx.ReuseEntry')
|
|
146
|
+
ActivityTimes_mod = importlib.import_module('kea2.fastbotx.ActivityTimes')
|
|
147
|
+
except Exception as e:
|
|
148
|
+
print("Error importing fastbotx generated modules:", e)
|
|
149
|
+
return False
|
|
150
|
+
|
|
151
|
+
try:
|
|
152
|
+
reuse_fb_model = ReuseModel_mod.ReuseModel.GetRootAs(data, 0)
|
|
153
|
+
except Exception as e:
|
|
154
|
+
print("Error parsing FBM data:", e)
|
|
155
|
+
return False
|
|
156
|
+
|
|
157
|
+
# build map
|
|
158
|
+
new_map = {}
|
|
159
|
+
total = 0
|
|
160
|
+
try:
|
|
161
|
+
length = reuse_fb_model.ModelLength()
|
|
162
|
+
except Exception:
|
|
163
|
+
length = 0
|
|
164
|
+
|
|
165
|
+
for i in range(length):
|
|
166
|
+
entry = reuse_fb_model.Model(i)
|
|
167
|
+
if not entry:
|
|
168
|
+
continue
|
|
169
|
+
action_hash = entry.Action()
|
|
170
|
+
tcount = 0
|
|
171
|
+
try:
|
|
172
|
+
tcount = entry.TargetsLength()
|
|
173
|
+
except Exception:
|
|
174
|
+
tcount = 0
|
|
175
|
+
|
|
176
|
+
entry_dict = {}
|
|
177
|
+
for j in range(tcount):
|
|
178
|
+
target = entry.Targets(j)
|
|
179
|
+
if not target:
|
|
180
|
+
continue
|
|
181
|
+
try:
|
|
182
|
+
activity = target.Activity()
|
|
183
|
+
except Exception:
|
|
184
|
+
activity = None
|
|
185
|
+
try:
|
|
186
|
+
times = int(target.Times())
|
|
187
|
+
except Exception:
|
|
188
|
+
times = 0
|
|
189
|
+
if activity:
|
|
190
|
+
# convert to native str
|
|
191
|
+
entry_dict[activity] = times
|
|
192
|
+
|
|
193
|
+
if entry_dict:
|
|
194
|
+
new_map[int(action_hash)] = entry_dict
|
|
195
|
+
total += 1
|
|
196
|
+
|
|
197
|
+
# atomically replace internal map under lock
|
|
198
|
+
with self._reuse_model_lock:
|
|
199
|
+
self._reuse_model.clear()
|
|
200
|
+
self._reuse_model.update(new_map)
|
|
201
|
+
|
|
202
|
+
print(f"Loaded model contains actions: {len(self._reuse_model)} (entries processed: {total})")
|
|
203
|
+
return True
|
|
204
|
+
|
|
205
|
+
def extract_entries(self, model):
|
|
206
|
+
"""Extract entries from a ReuseModel into Python structures: list of (action_hash, [(activity, times), ...])"""
|
|
207
|
+
entries = []
|
|
208
|
+
try:
|
|
209
|
+
count = model.ModelLength()
|
|
210
|
+
except Exception:
|
|
211
|
+
# If the model API differs, return empty
|
|
212
|
+
return entries
|
|
213
|
+
|
|
214
|
+
for i in range(count):
|
|
215
|
+
entry = model.Model(i)
|
|
216
|
+
if not entry:
|
|
217
|
+
continue
|
|
218
|
+
action = entry.Action()
|
|
219
|
+
targets = []
|
|
220
|
+
try:
|
|
221
|
+
tcount = entry.TargetsLength()
|
|
222
|
+
except Exception:
|
|
223
|
+
tcount = 0
|
|
224
|
+
for j in range(tcount):
|
|
225
|
+
t = entry.Targets(j)
|
|
226
|
+
if not t:
|
|
227
|
+
continue
|
|
228
|
+
try:
|
|
229
|
+
activity = t.Activity()
|
|
230
|
+
except Exception:
|
|
231
|
+
activity = None
|
|
232
|
+
try:
|
|
233
|
+
times = t.Times()
|
|
234
|
+
except Exception:
|
|
235
|
+
times = 0
|
|
236
|
+
targets.append((activity, times))
|
|
237
|
+
entries.append((action, targets))
|
|
238
|
+
return entries
|
|
239
|
+
|
|
240
|
+
def merge(self, file_a, file_b, out_file, merge_mode='sum', debug=False):
|
|
241
|
+
"""Merge two FBM files into out_file. Returns True on success."""
|
|
242
|
+
# suffix checks
|
|
243
|
+
if not self._ensure_fbm_suffix(file_a, 'file_a'):
|
|
244
|
+
return False
|
|
245
|
+
if not self._ensure_fbm_suffix(file_b, 'file_b'):
|
|
246
|
+
return False
|
|
247
|
+
if out_file and not self._ensure_fbm_suffix(out_file, 'out_file'):
|
|
248
|
+
return False
|
|
249
|
+
|
|
250
|
+
if not os.path.exists(file_a):
|
|
251
|
+
print(f"Error: file not found: {file_a}")
|
|
252
|
+
return False
|
|
253
|
+
if not os.path.exists(file_b):
|
|
254
|
+
print(f"Error: file not found: {file_b}")
|
|
255
|
+
return False
|
|
256
|
+
|
|
257
|
+
if not self.check_dependencies():
|
|
258
|
+
return False
|
|
259
|
+
if not self.check_generated_code():
|
|
260
|
+
return False
|
|
261
|
+
|
|
262
|
+
# Load models
|
|
263
|
+
model_a = self.load_model(file_a)
|
|
264
|
+
if model_a is None:
|
|
265
|
+
print(f"Failed to load model from {file_a}")
|
|
266
|
+
return False
|
|
267
|
+
model_b = self.load_model(file_b)
|
|
268
|
+
if model_b is None:
|
|
269
|
+
print(f"Failed to load model from {file_b}")
|
|
270
|
+
return False
|
|
271
|
+
|
|
272
|
+
# Extract entries from both models
|
|
273
|
+
entries_a = self.extract_entries(model_a)
|
|
274
|
+
entries_b = self.extract_entries(model_b)
|
|
275
|
+
|
|
276
|
+
# Aggregate by action hash. For each action, merge targets by activity summing times.
|
|
277
|
+
aggregated = {} # action_hash -> { activity_str -> total_times }
|
|
278
|
+
|
|
279
|
+
# use refactored accumulate helper; honor debug flag
|
|
280
|
+
self._accumulate_entries(entries_a, aggregated, merge_mode=merge_mode, debug=debug)
|
|
281
|
+
self._accumulate_entries(entries_b, aggregated, merge_mode=merge_mode, debug=debug)
|
|
282
|
+
total_actions = len(aggregated)
|
|
283
|
+
print(f"Merging: {len(entries_a)} entries from {file_a} + {len(entries_b)} entries from {file_b} -> {total_actions} unique actions")
|
|
284
|
+
|
|
285
|
+
# Build new FlatBuffer and save
|
|
286
|
+
return self._write_aggregated_to_file(aggregated, out_file)
|
|
287
|
+
|
|
288
|
+
def _accumulate_entries(self, entries, aggregated, merge_mode='sum', debug=False):
|
|
289
|
+
"""Accumulate entries into aggregated map.
|
|
290
|
+
|
|
291
|
+
entries: iterable of (action_hash, [(activity, times), ...])
|
|
292
|
+
aggregated: dict to update
|
|
293
|
+
merge_mode: 'sum' or 'max'
|
|
294
|
+
debug: if True, print detailed per-action logs
|
|
295
|
+
"""
|
|
296
|
+
for action_hash, targets in entries:
|
|
297
|
+
ah = int(action_hash)
|
|
298
|
+
if ah not in aggregated:
|
|
299
|
+
aggregated[ah] = {}
|
|
300
|
+
for activity, times in targets:
|
|
301
|
+
if not activity:
|
|
302
|
+
continue
|
|
303
|
+
try:
|
|
304
|
+
t = int(times)
|
|
305
|
+
except Exception:
|
|
306
|
+
t = 0
|
|
307
|
+
old = aggregated[ah].get(activity, 0)
|
|
308
|
+
if merge_mode == 'max':
|
|
309
|
+
new = max(old, t)
|
|
310
|
+
else:
|
|
311
|
+
new = old + t
|
|
312
|
+
aggregated[ah][activity] = new
|
|
313
|
+
if debug:
|
|
314
|
+
print(f"FBM_ACCUM DEBUG action={ah} activity='{activity}' old={old} add={t} new={new}")
|
|
315
|
+
|
|
316
|
+
def _write_aggregated_to_file(self, aggregated, out_file):
|
|
317
|
+
"""Construct a FlatBuffer from aggregated map and save to out_file.
|
|
318
|
+
|
|
319
|
+
aggregated: dict[action_hash]->{activity: times}
|
|
320
|
+
out_file: path to write (if None, default path under pc_dir)
|
|
321
|
+
"""
|
|
322
|
+
try:
|
|
323
|
+
import flatbuffers
|
|
324
|
+
import importlib
|
|
325
|
+
ReuseModel_mod = importlib.import_module('kea2.fastbotx.ReuseModel')
|
|
326
|
+
ReuseEntry_mod = importlib.import_module('kea2.fastbotx.ReuseEntry')
|
|
327
|
+
ActivityTimes_mod = importlib.import_module('kea2.fastbotx.ActivityTimes')
|
|
328
|
+
except Exception as e:
|
|
329
|
+
print("Error importing required generated modules:", e)
|
|
330
|
+
return False
|
|
331
|
+
|
|
332
|
+
builder = flatbuffers.Builder(1024)
|
|
333
|
+
str_cache = {}
|
|
334
|
+
|
|
335
|
+
def cache_string(s):
|
|
336
|
+
if s is None:
|
|
337
|
+
return 0
|
|
338
|
+
if s in str_cache:
|
|
339
|
+
return str_cache[s]
|
|
340
|
+
off = builder.CreateString(s)
|
|
341
|
+
str_cache[s] = off
|
|
342
|
+
return off
|
|
343
|
+
|
|
344
|
+
entry_offsets = []
|
|
345
|
+
|
|
346
|
+
# Ensure module objects (in case import returned a class due to package-level imports)
|
|
347
|
+
import inspect
|
|
348
|
+
import importlib as _importlib
|
|
349
|
+
|
|
350
|
+
def _ensure_mod(obj):
|
|
351
|
+
# if someone passed the class object (ActivityTimes), load the module that defines it
|
|
352
|
+
if inspect.isclass(obj):
|
|
353
|
+
return _importlib.import_module(obj.__module__)
|
|
354
|
+
return obj
|
|
355
|
+
|
|
356
|
+
ReuseEntry_mod = _ensure_mod(ReuseEntry_mod)
|
|
357
|
+
ActivityTimes_mod = _ensure_mod(ActivityTimes_mod)
|
|
358
|
+
ReuseModel_mod = _ensure_mod(ReuseModel_mod)
|
|
359
|
+
|
|
360
|
+
# Build entries from aggregated map. Sort actions for deterministic output.
|
|
361
|
+
for action_hash in sorted(aggregated.keys()):
|
|
362
|
+
targets_map = aggregated[action_hash]
|
|
363
|
+
# Build ActivityTimes offsets for each activity. Sort activities for determinism.
|
|
364
|
+
target_offsets = []
|
|
365
|
+
for activity in sorted(targets_map.keys()):
|
|
366
|
+
times = targets_map[activity]
|
|
367
|
+
act_off = cache_string(activity)
|
|
368
|
+
# Compatibility: prefer module-level helper names but support both deprecated and new names
|
|
369
|
+
if hasattr(ActivityTimes_mod, 'ActivityTimesStart'):
|
|
370
|
+
ActivityTimes_mod.ActivityTimesStart(builder)
|
|
371
|
+
elif hasattr(ActivityTimes_mod, 'Start'):
|
|
372
|
+
ActivityTimes_mod.Start(builder)
|
|
373
|
+
else:
|
|
374
|
+
raise RuntimeError('ActivityTimes builder start function not found')
|
|
375
|
+
|
|
376
|
+
if act_off:
|
|
377
|
+
if hasattr(ActivityTimes_mod, 'ActivityTimesAddActivity'):
|
|
378
|
+
ActivityTimes_mod.ActivityTimesAddActivity(builder, act_off)
|
|
379
|
+
elif hasattr(ActivityTimes_mod, 'AddActivity'):
|
|
380
|
+
ActivityTimes_mod.AddActivity(builder, act_off)
|
|
381
|
+
else:
|
|
382
|
+
raise RuntimeError('ActivityTimes add activity function not found')
|
|
383
|
+
|
|
384
|
+
if hasattr(ActivityTimes_mod, 'ActivityTimesAddTimes'):
|
|
385
|
+
ActivityTimes_mod.ActivityTimesAddTimes(builder, int(times))
|
|
386
|
+
elif hasattr(ActivityTimes_mod, 'AddTimes'):
|
|
387
|
+
ActivityTimes_mod.AddTimes(builder, int(times))
|
|
388
|
+
else:
|
|
389
|
+
raise RuntimeError('ActivityTimes add times function not found')
|
|
390
|
+
|
|
391
|
+
if hasattr(ActivityTimes_mod, 'ActivityTimesEnd'):
|
|
392
|
+
toff = ActivityTimes_mod.ActivityTimesEnd(builder)
|
|
393
|
+
elif hasattr(ActivityTimes_mod, 'End'):
|
|
394
|
+
toff = ActivityTimes_mod.End(builder)
|
|
395
|
+
else:
|
|
396
|
+
raise RuntimeError('ActivityTimes end function not found')
|
|
397
|
+
|
|
398
|
+
target_offsets.append(toff)
|
|
399
|
+
|
|
400
|
+
# create vector of targets
|
|
401
|
+
if target_offsets:
|
|
402
|
+
if hasattr(ReuseEntry_mod, 'ReuseEntryStartTargetsVector'):
|
|
403
|
+
ReuseEntry_mod.ReuseEntryStartTargetsVector(builder, len(target_offsets))
|
|
404
|
+
elif hasattr(ReuseEntry_mod, 'StartTargetsVector'):
|
|
405
|
+
ReuseEntry_mod.StartTargetsVector(builder, len(target_offsets))
|
|
406
|
+
else:
|
|
407
|
+
raise RuntimeError('ReuseEntry start targets vector function not found')
|
|
408
|
+
for toff in reversed(target_offsets):
|
|
409
|
+
builder.PrependUOffsetTRelative(toff)
|
|
410
|
+
targets_vec = builder.EndVector()
|
|
411
|
+
else:
|
|
412
|
+
targets_vec = 0
|
|
413
|
+
|
|
414
|
+
# create entry using module helpers
|
|
415
|
+
if hasattr(ReuseEntry_mod, 'ReuseEntryStart'):
|
|
416
|
+
ReuseEntry_mod.ReuseEntryStart(builder)
|
|
417
|
+
elif hasattr(ReuseEntry_mod, 'Start'):
|
|
418
|
+
ReuseEntry_mod.Start(builder)
|
|
419
|
+
else:
|
|
420
|
+
raise RuntimeError('ReuseEntry start function not found')
|
|
421
|
+
try:
|
|
422
|
+
if hasattr(ReuseEntry_mod, 'ReuseEntryAddAction'):
|
|
423
|
+
ReuseEntry_mod.ReuseEntryAddAction(builder, action_hash)
|
|
424
|
+
elif hasattr(ReuseEntry_mod, 'AddAction'):
|
|
425
|
+
ReuseEntry_mod.AddAction(builder, action_hash)
|
|
426
|
+
except Exception:
|
|
427
|
+
pass
|
|
428
|
+
if targets_vec:
|
|
429
|
+
try:
|
|
430
|
+
if hasattr(ReuseEntry_mod, 'ReuseEntryAddTargets'):
|
|
431
|
+
ReuseEntry_mod.ReuseEntryAddTargets(builder, targets_vec)
|
|
432
|
+
elif hasattr(ReuseEntry_mod, 'AddTargets'):
|
|
433
|
+
ReuseEntry_mod.AddTargets(builder, targets_vec)
|
|
434
|
+
except Exception:
|
|
435
|
+
pass
|
|
436
|
+
if hasattr(ReuseEntry_mod, 'ReuseEntryEnd'):
|
|
437
|
+
entry_off = ReuseEntry_mod.ReuseEntryEnd(builder)
|
|
438
|
+
elif hasattr(ReuseEntry_mod, 'End'):
|
|
439
|
+
entry_off = ReuseEntry_mod.End(builder)
|
|
440
|
+
else:
|
|
441
|
+
raise RuntimeError('ReuseEntry end function not found')
|
|
442
|
+
entry_offsets.append(entry_off)
|
|
443
|
+
|
|
444
|
+
# model vector
|
|
445
|
+
if entry_offsets:
|
|
446
|
+
ReuseModel_mod.ReuseModelStartModelVector(builder, len(entry_offsets))
|
|
447
|
+
for eoff in reversed(entry_offsets):
|
|
448
|
+
builder.PrependUOffsetTRelative(eoff)
|
|
449
|
+
model_vec = builder.EndVector()
|
|
450
|
+
else:
|
|
451
|
+
model_vec = 0
|
|
452
|
+
|
|
453
|
+
ReuseModel_mod.ReuseModelStart(builder)
|
|
454
|
+
if model_vec:
|
|
455
|
+
try:
|
|
456
|
+
ReuseModel_mod.ReuseModelAddModel(builder, model_vec)
|
|
457
|
+
except Exception:
|
|
458
|
+
try:
|
|
459
|
+
ReuseModel_mod.AddModel(builder, model_vec)
|
|
460
|
+
except Exception:
|
|
461
|
+
pass
|
|
462
|
+
root = ReuseModel_mod.ReuseModelEnd(builder)
|
|
463
|
+
# Use helper to finish builder and save atomically
|
|
464
|
+
return self._save_builder_to_file(builder, root, out_file)
|
|
465
|
+
|
|
466
|
+
def _save_builder_to_file(self, builder, root_offset, out_file):
|
|
467
|
+
"""Finish the FlatBuffer builder and save bytes to out_file atomically.
|
|
468
|
+
|
|
469
|
+
Behavior mirrors the provided C++ example: finish the builder, write to a temporary
|
|
470
|
+
file and then move/replace into the final path. If out_file is empty, use a
|
|
471
|
+
default path under the script directory.
|
|
472
|
+
"""
|
|
473
|
+
import os
|
|
474
|
+
import tempfile
|
|
475
|
+
tmp_path = None
|
|
476
|
+
try:
|
|
477
|
+
# Ensure output path
|
|
478
|
+
if not out_file:
|
|
479
|
+
out_file = os.path.join(self._pc_dir, 'fastbot.model.fbm')
|
|
480
|
+
|
|
481
|
+
# Finish builder (if not already finished)
|
|
482
|
+
try:
|
|
483
|
+
builder.Finish(root_offset)
|
|
484
|
+
except Exception:
|
|
485
|
+
# If Finish was already called upstream, ignore
|
|
486
|
+
pass
|
|
487
|
+
|
|
488
|
+
buf = builder.Output()
|
|
489
|
+
|
|
490
|
+
out_dir = os.path.dirname(out_file) or self._pc_dir
|
|
491
|
+
os.makedirs(out_dir, exist_ok=True)
|
|
492
|
+
|
|
493
|
+
# Write to a unique temporary file in the target directory and atomically replace
|
|
494
|
+
fd, tmp_path = tempfile.mkstemp(prefix='.tmp_fbm_', dir=out_dir)
|
|
495
|
+
try:
|
|
496
|
+
with os.fdopen(fd, 'wb') as f:
|
|
497
|
+
f.write(buf)
|
|
498
|
+
try:
|
|
499
|
+
f.flush()
|
|
500
|
+
os.fsync(f.fileno())
|
|
501
|
+
except Exception:
|
|
502
|
+
# flush/fsync best-effort
|
|
503
|
+
pass
|
|
504
|
+
|
|
505
|
+
# Atomic replace; try os.replace first, then os.rename as fallback
|
|
506
|
+
try:
|
|
507
|
+
os.replace(tmp_path, out_file)
|
|
508
|
+
except Exception:
|
|
509
|
+
try:
|
|
510
|
+
os.rename(tmp_path, out_file)
|
|
511
|
+
except Exception:
|
|
512
|
+
# last-resort: write directly to out_file
|
|
513
|
+
try:
|
|
514
|
+
with open(out_file, 'wb') as f:
|
|
515
|
+
f.write(buf)
|
|
516
|
+
except Exception:
|
|
517
|
+
# if even that fails, attempt to cleanup tmp_path below
|
|
518
|
+
pass
|
|
519
|
+
finally:
|
|
520
|
+
# best-effort cleanup of tmp_path if it still exists
|
|
521
|
+
try:
|
|
522
|
+
if tmp_path and os.path.exists(tmp_path):
|
|
523
|
+
os.remove(tmp_path)
|
|
524
|
+
except Exception:
|
|
525
|
+
pass
|
|
526
|
+
|
|
527
|
+
# Set file permissions to 644
|
|
528
|
+
try:
|
|
529
|
+
# Check if it's Windows system
|
|
530
|
+
if os.name == 'nt':
|
|
531
|
+
# Directly use icacls command to set permissions on Windows
|
|
532
|
+
import subprocess
|
|
533
|
+
# First disable inheritance and copy existing permissions, then set new permissions
|
|
534
|
+
subprocess.run(["icacls", out_file, "/inheritance:d", "/grant", "Everyone:R", "/grant", "Administrators:F"],
|
|
535
|
+
capture_output=True, text=True, check=True)
|
|
536
|
+
print(f"Set Windows file permissions to simulate 644 for: {out_file}")
|
|
537
|
+
else:
|
|
538
|
+
# Set permissions directly on Unix/Linux systems
|
|
539
|
+
os.chmod(out_file, 0o644)
|
|
540
|
+
print(f"Set file permissions to 644 for: {out_file}")
|
|
541
|
+
except Exception as e:
|
|
542
|
+
print(f"Warning: Failed to set file permissions for {out_file}: {e}")
|
|
543
|
+
|
|
544
|
+
print(f"Merged FBM written to: {out_file} (size {len(buf)} bytes)")
|
|
545
|
+
return True
|
|
546
|
+
except Exception as e:
|
|
547
|
+
print("Error writing merged FBM:", e)
|
|
548
|
+
# cleanup tmp if exists
|
|
549
|
+
try:
|
|
550
|
+
if 'tmp_path' in locals() and os.path.exists(tmp_path):
|
|
551
|
+
os.remove(tmp_path)
|
|
552
|
+
except Exception:
|
|
553
|
+
pass
|
|
554
|
+
return False
|
|
555
|
+
|
|
556
|
+
def _pc_fbm_dir(self):
|
|
557
|
+
"""Return PC directory to store fbm files.
|
|
558
|
+
"""
|
|
559
|
+
from pathlib import Path
|
|
560
|
+
try:
|
|
561
|
+
from .utils import getProjectRoot
|
|
562
|
+
proj_root = getProjectRoot()
|
|
563
|
+
except Exception:
|
|
564
|
+
proj_root = None
|
|
565
|
+
|
|
566
|
+
if proj_root:
|
|
567
|
+
return Path(proj_root) / 'configs' / 'merge_fbm'
|
|
568
|
+
else:
|
|
569
|
+
return Path.cwd() / 'configs' / 'merge_fbm'
|
|
570
|
+
|
|
571
|
+
def _remote_fbm_path(self, package_name: str) -> str:
|
|
572
|
+
return f"/sdcard/fastbot_{package_name}.fbm"
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
def pull_and_merge_to_pc(self, package_name: str, device: str = None, transport_id: str = None):
|
|
576
|
+
"""Pull device FBM for package and merge it into PC fbm (PC file will be updated).
|
|
577
|
+
|
|
578
|
+
Returns True on success (or if nothing to do), False on failure.
|
|
579
|
+
"""
|
|
580
|
+
try:
|
|
581
|
+
from kea2.adbUtils import pull_file
|
|
582
|
+
except Exception:
|
|
583
|
+
try:
|
|
584
|
+
from adbUtils import pull_file # type: ignore
|
|
585
|
+
except Exception as e:
|
|
586
|
+
print("ADB utilities not available:", e)
|
|
587
|
+
return False
|
|
588
|
+
|
|
589
|
+
pc_dir = self._pc_dir
|
|
590
|
+
pc_dir.mkdir(parents=True, exist_ok=True)
|
|
591
|
+
pc_file = pc_dir / f"fastbot_{package_name}.fbm"
|
|
592
|
+
# generate a short random suffix for all intermediate files to avoid clashes between processes
|
|
593
|
+
rand = uuid.uuid4().hex[:8]
|
|
594
|
+
pulled_tmp = pc_dir / f"fastbot_{package_name}.from_device.{rand}.fbm"
|
|
595
|
+
merged_tmp = pc_dir / f"fastbot_{package_name}.merged.{rand}.fbm"
|
|
596
|
+
|
|
597
|
+
remote = self._remote_fbm_path(package_name)
|
|
598
|
+
try:
|
|
599
|
+
print(f"Attempting to pull {remote} to {pulled_tmp}")
|
|
600
|
+
pull_file(remote, str(pulled_tmp), device=device, transport_id=transport_id)
|
|
601
|
+
except Exception as e:
|
|
602
|
+
print(f"pull_file failed for {remote}: {e}")
|
|
603
|
+
|
|
604
|
+
if not pulled_tmp.exists() or pulled_tmp.stat().st_size == 0:
|
|
605
|
+
print(f"No FBM on device for {package_name}, nothing merged to PC.")
|
|
606
|
+
try:
|
|
607
|
+
if pulled_tmp.exists():
|
|
608
|
+
pulled_tmp.unlink()
|
|
609
|
+
except Exception:
|
|
610
|
+
pass
|
|
611
|
+
return False
|
|
612
|
+
|
|
613
|
+
# --- Try snapshot/delta workflow first ---
|
|
614
|
+
snapshot_remote = f"/sdcard/fastbot_{package_name}.snapshot.fbm"
|
|
615
|
+
pulled_snap_tmp = pc_dir / f"fastbot_{package_name}.snapshot.from_device.{rand}.fbm"
|
|
616
|
+
delta_tmp = pc_dir / f"fastbot_{package_name}.delta.{rand}.fbm"
|
|
617
|
+
try:
|
|
618
|
+
# attempt to pull snapshot (may fail silently)
|
|
619
|
+
try:
|
|
620
|
+
pull_file(snapshot_remote, str(pulled_snap_tmp), device=device, transport_id=transport_id)
|
|
621
|
+
except Exception:
|
|
622
|
+
# snapshot may not exist on device; ignore error and proceed (treat as empty)
|
|
623
|
+
pass
|
|
624
|
+
|
|
625
|
+
# Compute delta using snapshot if it exists, otherwise treat snapshot as empty (delta == current)
|
|
626
|
+
snapshot_path = str(pulled_snap_tmp) if pulled_snap_tmp.exists() and pulled_snap_tmp.stat().st_size > 0 else None
|
|
627
|
+
if snapshot_path:
|
|
628
|
+
print(f"Snapshot found on device for {package_name}, computing delta -> {delta_tmp}")
|
|
629
|
+
else:
|
|
630
|
+
print(f"No snapshot on device for {package_name}; treating snapshot as empty -> computing delta -> {delta_tmp}")
|
|
631
|
+
|
|
632
|
+
ok = self.compute_delta(snapshot_path, str(pulled_tmp), str(delta_tmp))
|
|
633
|
+
if not ok:
|
|
634
|
+
print("Delta computation failed; not performing merge.")
|
|
635
|
+
return False
|
|
636
|
+
|
|
637
|
+
print(f"Applying delta to PC core fbm: {delta_tmp} -> {pc_file}")
|
|
638
|
+
# apply_delta_to_pc will perform necessary locking around pc_file operations
|
|
639
|
+
applied = self.apply_delta_to_pc(str(pc_file), str(delta_tmp))
|
|
640
|
+
if applied:
|
|
641
|
+
print(f"[FBM] delta applied to PC for package '{package_name}'")
|
|
642
|
+
return True
|
|
643
|
+
else:
|
|
644
|
+
print("Applying delta failed; not performing merge.")
|
|
645
|
+
return False
|
|
646
|
+
finally:
|
|
647
|
+
# cleanup
|
|
648
|
+
try:
|
|
649
|
+
if pulled_tmp.exists():
|
|
650
|
+
pulled_tmp.unlink()
|
|
651
|
+
except Exception:
|
|
652
|
+
pass
|
|
653
|
+
try:
|
|
654
|
+
if merged_tmp.exists():
|
|
655
|
+
merged_tmp.unlink()
|
|
656
|
+
except Exception:
|
|
657
|
+
pass
|
|
658
|
+
try:
|
|
659
|
+
if pulled_snap_tmp.exists():
|
|
660
|
+
pulled_snap_tmp.unlink()
|
|
661
|
+
except Exception:
|
|
662
|
+
pass
|
|
663
|
+
try:
|
|
664
|
+
if delta_tmp.exists():
|
|
665
|
+
delta_tmp.unlink()
|
|
666
|
+
except Exception:
|
|
667
|
+
pass
|
|
668
|
+
|
|
669
|
+
# --- New workflow helpers ---
|
|
670
|
+
def create_device_snapshot(self, package_name: str, snapshot_remote: str = None, device: str = None, transport_id: str = None) -> bool:
|
|
671
|
+
"""Create an on-device snapshot (copy) of the fbm file.
|
|
672
|
+
|
|
673
|
+
Attempts `adb shell cp <src> <dst>` first, falls back to pull/push if cp is not available.
|
|
674
|
+
Returns True on success.
|
|
675
|
+
"""
|
|
676
|
+
src = self._remote_fbm_path(package_name)
|
|
677
|
+
dst = snapshot_remote or f"/sdcard/fastbot_{package_name}.snapshot.fbm"
|
|
678
|
+
try:
|
|
679
|
+
from kea2.adbUtils import adb_shell, pull_file, push_file
|
|
680
|
+
except Exception:
|
|
681
|
+
try:
|
|
682
|
+
from adbUtils import adb_shell, pull_file, push_file # type: ignore
|
|
683
|
+
except Exception as e:
|
|
684
|
+
print("ADB utilities not available:", e)
|
|
685
|
+
return False
|
|
686
|
+
|
|
687
|
+
try:
|
|
688
|
+
print(f"Creating device snapshot: cp {src} {dst}")
|
|
689
|
+
adb_shell(["cp", src, dst], device=device, transport_id=transport_id)
|
|
690
|
+
return True
|
|
691
|
+
except Exception as e:
|
|
692
|
+
print(f"adb shell cp failed ({e}), trying pull/push fallback")
|
|
693
|
+
# fallback: pull then push to dst
|
|
694
|
+
try:
|
|
695
|
+
pc_tmp = os.path.join(self._pc_dir, f"fastbot_{package_name}.snapshot.from_device.fbm")
|
|
696
|
+
pull_file(src, pc_tmp, device=device, transport_id=transport_id)
|
|
697
|
+
push_file(pc_tmp, dst, device=device, transport_id=transport_id)
|
|
698
|
+
try:
|
|
699
|
+
os.remove(pc_tmp)
|
|
700
|
+
except Exception:
|
|
701
|
+
pass
|
|
702
|
+
return True
|
|
703
|
+
except Exception as e2:
|
|
704
|
+
print(f"Snapshot fallback failed: {e2}")
|
|
705
|
+
return False
|
|
706
|
+
|
|
707
|
+
def compute_delta(self, snapshot_file: str, current_file: str, out_delta_file: str, merge_mode: str = 'increment') -> bool:
|
|
708
|
+
"""Compute delta between snapshot FBM and current FBM and write a delta FBM containing only positive increments.
|
|
709
|
+
|
|
710
|
+
merge_mode ignored except for compatibility; behavior: delta = max(0, current - snapshot)
|
|
711
|
+
"""
|
|
712
|
+
# Allow missing snapshot: only validate snapshot suffix when a path is provided and exists.
|
|
713
|
+
if snapshot_file and os.path.exists(snapshot_file):
|
|
714
|
+
if not self._ensure_fbm_suffix(snapshot_file, 'snapshot_file'):
|
|
715
|
+
return False
|
|
716
|
+
else:
|
|
717
|
+
# no snapshot available on device; treat as empty snapshot
|
|
718
|
+
snapshot_file = None
|
|
719
|
+
|
|
720
|
+
# Validate current and output paths
|
|
721
|
+
if not self._ensure_fbm_suffix(current_file, 'current_file'):
|
|
722
|
+
return False
|
|
723
|
+
if out_delta_file and not self._ensure_fbm_suffix(out_delta_file, 'out_delta_file'):
|
|
724
|
+
return False
|
|
725
|
+
|
|
726
|
+
# Load snapshot model if provided; if loading fails, log warning and treat as empty
|
|
727
|
+
model_snap = None
|
|
728
|
+
if snapshot_file:
|
|
729
|
+
model_snap = self.load_model(snapshot_file)
|
|
730
|
+
if model_snap is None:
|
|
731
|
+
print(f"Warning: failed to load snapshot model from {snapshot_file}; treating snapshot as empty")
|
|
732
|
+
model_snap = None
|
|
733
|
+
model_cur = self.load_model(current_file)
|
|
734
|
+
if model_cur is None:
|
|
735
|
+
print(f"Failed to load current model from {current_file}")
|
|
736
|
+
return False
|
|
737
|
+
|
|
738
|
+
entries_snap = self.extract_entries(model_snap)
|
|
739
|
+
entries_cur = self.extract_entries(model_cur)
|
|
740
|
+
|
|
741
|
+
# convert snapshot to map for fast lookup (action_hash -> {activity: times})
|
|
742
|
+
# NOTE: aggregate duplicate activity entries by summing, same as we do for current entries
|
|
743
|
+
snap_map = {}
|
|
744
|
+
for action_hash, targets in entries_snap:
|
|
745
|
+
ah = int(action_hash)
|
|
746
|
+
snap_map.setdefault(ah, {})
|
|
747
|
+
for activity, times in targets:
|
|
748
|
+
if not activity:
|
|
749
|
+
continue
|
|
750
|
+
try:
|
|
751
|
+
t = int(times)
|
|
752
|
+
except Exception:
|
|
753
|
+
t = 0
|
|
754
|
+
snap_map[ah][activity] = snap_map[ah].get(activity, 0) + t
|
|
755
|
+
|
|
756
|
+
# convert current entries into a consolidated map, summing duplicate activity entries if any
|
|
757
|
+
cur_map = {}
|
|
758
|
+
for action_hash, targets in entries_cur:
|
|
759
|
+
ah = int(action_hash)
|
|
760
|
+
cur_map.setdefault(ah, {})
|
|
761
|
+
for activity, times in targets:
|
|
762
|
+
if not activity:
|
|
763
|
+
continue
|
|
764
|
+
try:
|
|
765
|
+
t = int(times)
|
|
766
|
+
except Exception:
|
|
767
|
+
t = 0
|
|
768
|
+
cur_map[ah][activity] = cur_map[ah].get(activity, 0) + t
|
|
769
|
+
|
|
770
|
+
# compute deltas: for each action/activity, delta = cur_total - snapshot_total
|
|
771
|
+
delta_map = {}
|
|
772
|
+
for ah, activities in cur_map.items():
|
|
773
|
+
for activity, cur_t in activities.items():
|
|
774
|
+
snap_t = snap_map.get(ah, {}).get(activity, 0)
|
|
775
|
+
inc = cur_t - snap_t
|
|
776
|
+
if inc > 0:
|
|
777
|
+
delta_map.setdefault(ah, {})
|
|
778
|
+
delta_map[ah][activity] = inc
|
|
779
|
+
|
|
780
|
+
if not delta_map:
|
|
781
|
+
# produce an empty fbm file (deterministic) or skip writing — choose to write an empty FBM so callers can rely on its existence
|
|
782
|
+
print("No positive deltas found; writing empty delta FBM")
|
|
783
|
+
return self._write_aggregated_to_file({}, out_delta_file)
|
|
784
|
+
|
|
785
|
+
return self._write_aggregated_to_file(delta_map, out_delta_file)
|
|
786
|
+
|
|
787
|
+
def apply_delta_to_pc(self, pc_fbm: str, delta_fbm: str, out_fbm: str = None) -> bool:
|
|
788
|
+
"""Apply a delta FBM (containing increments) into the PC core FBM.
|
|
789
|
+
|
|
790
|
+
If out_fbm is None, overwrite pc_fbm atomically; otherwise write to out_fbm.
|
|
791
|
+
"""
|
|
792
|
+
if not self._ensure_fbm_suffix(pc_fbm, 'pc_fbm'):
|
|
793
|
+
return False
|
|
794
|
+
if not self._ensure_fbm_suffix(delta_fbm, 'delta_fbm'):
|
|
795
|
+
return False
|
|
796
|
+
if out_fbm and not self._ensure_fbm_suffix(out_fbm, 'out_fbm'):
|
|
797
|
+
return False
|
|
798
|
+
|
|
799
|
+
# Perform the entire PC file operation (read/merge/replace) under a single FileLock
|
|
800
|
+
# Ensure temporary target file has a .fbm suffix so merge(...) accepts it.
|
|
801
|
+
from pathlib import Path
|
|
802
|
+
if out_fbm:
|
|
803
|
+
target = out_fbm
|
|
804
|
+
else:
|
|
805
|
+
try:
|
|
806
|
+
target = str(Path(pc_fbm).with_suffix('.updated.fbm'))
|
|
807
|
+
except Exception:
|
|
808
|
+
# fallback: append .updated.fbm
|
|
809
|
+
target = pc_fbm + '.updated.fbm'
|
|
810
|
+
|
|
811
|
+
try:
|
|
812
|
+
with FileLock(str(pc_fbm), timeout=60.0):
|
|
813
|
+
# If pc_fbm doesn't exist, just copy delta into pc (delta assumed to be absolute increments over empty)
|
|
814
|
+
if not os.path.exists(pc_fbm):
|
|
815
|
+
try:
|
|
816
|
+
import shutil
|
|
817
|
+
if out_fbm:
|
|
818
|
+
shutil.copyfile(delta_fbm, out_fbm)
|
|
819
|
+
target_path = out_fbm
|
|
820
|
+
else:
|
|
821
|
+
shutil.copyfile(delta_fbm, pc_fbm)
|
|
822
|
+
target_path = pc_fbm
|
|
823
|
+
|
|
824
|
+
# Set file permissions to 644
|
|
825
|
+
try:
|
|
826
|
+
# Check if it's Windows system
|
|
827
|
+
if os.name == 'nt':
|
|
828
|
+
# Directly use icacls command to set permissions on Windows
|
|
829
|
+
import subprocess
|
|
830
|
+
# First disable inheritance and copy existing permissions, then set new permissions
|
|
831
|
+
subprocess.run(["icacls", target_path, "/inheritance:d", "/grant", "Everyone:R", "/grant", "Administrators:F"],
|
|
832
|
+
capture_output=True, text=True, check=True)
|
|
833
|
+
print(f"Set Windows file permissions to simulate 644 for: {target_path}")
|
|
834
|
+
else:
|
|
835
|
+
# Set permissions directly on Unix/Linux systems
|
|
836
|
+
os.chmod(target_path, 0o644)
|
|
837
|
+
print(f"Set file permissions to 644 for: {target_path}")
|
|
838
|
+
except Exception as e:
|
|
839
|
+
print(f"Warning: Failed to set file permissions for {target_path}: {e}")
|
|
840
|
+
|
|
841
|
+
return True
|
|
842
|
+
except Exception as e:
|
|
843
|
+
print(f"Failed to copy delta to pc_fbm: {e}")
|
|
844
|
+
return False
|
|
845
|
+
|
|
846
|
+
# Merge pc_fbm and delta_fbm using sum mode into target
|
|
847
|
+
ok = self.merge(pc_fbm, delta_fbm, target, merge_mode='sum')
|
|
848
|
+
if not ok:
|
|
849
|
+
print("Failed to merge delta into pc_fbm")
|
|
850
|
+
return False
|
|
851
|
+
|
|
852
|
+
# If out_fbm was not provided, replace pc_fbm atomically by moving target
|
|
853
|
+
if not out_fbm:
|
|
854
|
+
try:
|
|
855
|
+
os.replace(target, pc_fbm)
|
|
856
|
+
except Exception:
|
|
857
|
+
import shutil
|
|
858
|
+
try:
|
|
859
|
+
shutil.copyfile(target, pc_fbm)
|
|
860
|
+
os.remove(target)
|
|
861
|
+
except Exception:
|
|
862
|
+
pass
|
|
863
|
+
return True
|
|
864
|
+
except LockTimeoutError:
|
|
865
|
+
print(f"Timeout acquiring lock to merge/apply delta into {pc_fbm}")
|
|
866
|
+
return False
|
|
867
|
+
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
|
|
871
|
+
|