pkgmgr-kunrunic 0.1.1.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pkgmgr/__init__.py +16 -0
- pkgmgr/__main__.py +5 -0
- pkgmgr/cli.py +320 -0
- pkgmgr/collectors/__init__.py +5 -0
- pkgmgr/collectors/base.py +15 -0
- pkgmgr/collectors/checksums.py +35 -0
- pkgmgr/config.py +408 -0
- pkgmgr/points.py +98 -0
- pkgmgr/release.py +1031 -0
- pkgmgr/shell_integration.py +120 -0
- pkgmgr/snapshot.py +306 -0
- pkgmgr/templates/pkg.yaml.sample +16 -0
- pkgmgr/templates/pkgmgr.yaml.sample +51 -0
- pkgmgr/watch.py +79 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/METADATA +159 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/RECORD +24 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/WHEEL +5 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/entry_points.txt +2 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/licenses/LICENSE +21 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/top_level.txt +3 -0
- plugin/export_cksum.py +354 -0
- plugin/export_pkgstore.py +117 -0
- plugin/export_source_review.py +499 -0
- tools/echo_args.py +15 -0
pkgmgr/config.py
ADDED
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
"""Configuration helpers for the pkg manager scaffold."""
|
|
3
|
+
|
|
4
|
+
import glob
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
import textwrap
|
|
8
|
+
|
|
9
|
+
# Default locations under the user's home directory.
|
|
10
|
+
BASE_DIR = os.path.expanduser("~/pkgmgr")
|
|
11
|
+
DEFAULT_CONFIG_DIR = os.path.join(BASE_DIR, "config")
|
|
12
|
+
DEFAULT_STATE_DIR = os.path.join(BASE_DIR, "local", "state")
|
|
13
|
+
DEFAULT_CACHE_DIR = os.path.join(BASE_DIR, "cache")
|
|
14
|
+
# Default main config lives under BASE_DIR/config; configs under BASE_DIR
|
|
15
|
+
# are also discovered for backward compatibility.
|
|
16
|
+
DEFAULT_MAIN_CONFIG = os.path.join(DEFAULT_CONFIG_DIR, "pkgmgr.yaml")
|
|
17
|
+
HERE = os.path.dirname(os.path.abspath(__file__))
|
|
18
|
+
TEMPLATE_DIR = os.path.join(HERE, "templates")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
import yaml # type: ignore
|
|
23
|
+
except Exception:
|
|
24
|
+
yaml = None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
MAIN_TEMPLATE = """\
|
|
28
|
+
pkg_release_root: ~/PKG/RELEASE
|
|
29
|
+
sources:
|
|
30
|
+
- /path/to/source-A
|
|
31
|
+
- /path/to/source-B
|
|
32
|
+
|
|
33
|
+
source:
|
|
34
|
+
# glob patterns to exclude from source scanning
|
|
35
|
+
exclude:
|
|
36
|
+
- "**/build/**"
|
|
37
|
+
- "**/*.tmp"
|
|
38
|
+
- "**/bk"
|
|
39
|
+
- "**/*.sc*"
|
|
40
|
+
- "**/unit_test/**"
|
|
41
|
+
- "Jamrules*"
|
|
42
|
+
- "Jamfile*"
|
|
43
|
+
- "**/Jamrules*"
|
|
44
|
+
- "**/Jamfile*"
|
|
45
|
+
|
|
46
|
+
artifacts:
|
|
47
|
+
root: ~/HOME
|
|
48
|
+
targets: [bin, lib, data]
|
|
49
|
+
# glob patterns to exclude in artifacts area
|
|
50
|
+
exclude:
|
|
51
|
+
- log
|
|
52
|
+
- tmp/**
|
|
53
|
+
- "*.bak"
|
|
54
|
+
- "**/*.tmp"
|
|
55
|
+
|
|
56
|
+
watch:
|
|
57
|
+
interval_sec: 60
|
|
58
|
+
on_change: [] # optional list of action names to run on change (poller)
|
|
59
|
+
|
|
60
|
+
collectors:
|
|
61
|
+
enabled: ["checksums"]
|
|
62
|
+
|
|
63
|
+
actions:
|
|
64
|
+
# action_name: list of commands (cmd required, cwd/env optional)
|
|
65
|
+
export_cksum:
|
|
66
|
+
- cmd: python export_cksum.py --pkg-dir /path/to/pkg --excel /path/to/template.xlsx
|
|
67
|
+
cwd: /app/script
|
|
68
|
+
env: { APP_ENV: dev }
|
|
69
|
+
export_world_dev:
|
|
70
|
+
- cmd: python dev_world.py
|
|
71
|
+
cwd: /app/script
|
|
72
|
+
export_world_security:
|
|
73
|
+
- cmd: python security_world.py
|
|
74
|
+
cwd: /app/script
|
|
75
|
+
noti_email:
|
|
76
|
+
- cmd: sh noti_email.sh
|
|
77
|
+
cwd: /app/script
|
|
78
|
+
|
|
79
|
+
auto_actions:
|
|
80
|
+
create_pkg: []
|
|
81
|
+
update_pkg: []
|
|
82
|
+
update_pkg_release: []
|
|
83
|
+
close_pkg: []
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
PKG_TEMPLATE = """\
|
|
87
|
+
pkg:
|
|
88
|
+
id: "<pkg-id>"
|
|
89
|
+
root: "/path/to/release/<pkg-id>"
|
|
90
|
+
status: "open" # open|closed
|
|
91
|
+
|
|
92
|
+
include:
|
|
93
|
+
releases: []
|
|
94
|
+
|
|
95
|
+
git:
|
|
96
|
+
repo_root: null # optional override; default is git rev-parse from cwd
|
|
97
|
+
keywords: []
|
|
98
|
+
since: null # e.g. "2024-01-01"
|
|
99
|
+
until: null
|
|
100
|
+
|
|
101
|
+
collectors:
|
|
102
|
+
enabled: ["checksums"]
|
|
103
|
+
"""
|
|
104
|
+
|
|
105
|
+
MAIN_DEFAULTS = {
|
|
106
|
+
"pkg_release_root": None,
|
|
107
|
+
"sources": [],
|
|
108
|
+
"source": {"exclude": []},
|
|
109
|
+
"artifacts": {"root": None, "targets": [], "exclude": []},
|
|
110
|
+
"watch": {"interval_sec": 60, "on_change": []},
|
|
111
|
+
"collectors": {"enabled": ["checksums"]},
|
|
112
|
+
"actions": {},
|
|
113
|
+
"auto_actions": {
|
|
114
|
+
"create_pkg": [],
|
|
115
|
+
"update_pkg": [],
|
|
116
|
+
"update_pkg_release": [],
|
|
117
|
+
"close_pkg": [],
|
|
118
|
+
},
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _deep_merge(defaults, overrides):
|
|
123
|
+
merged = dict(defaults or {})
|
|
124
|
+
for key, value in (overrides or {}).items():
|
|
125
|
+
if isinstance(value, dict) and isinstance(merged.get(key), dict):
|
|
126
|
+
merged[key] = _deep_merge(merged.get(key, {}), value)
|
|
127
|
+
else:
|
|
128
|
+
merged[key] = value
|
|
129
|
+
return merged
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def _ensure_list(value, field):
|
|
133
|
+
if value is None:
|
|
134
|
+
return []
|
|
135
|
+
if isinstance(value, list):
|
|
136
|
+
return value
|
|
137
|
+
return [value]
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _ensure_list_of_strings(value, field):
|
|
141
|
+
raw_list = _ensure_list(value, field)
|
|
142
|
+
result = []
|
|
143
|
+
for idx, item in enumerate(raw_list):
|
|
144
|
+
if item is None:
|
|
145
|
+
continue
|
|
146
|
+
if not isinstance(item, (str, bytes, int, float)):
|
|
147
|
+
raise RuntimeError("expected %s[%d] to be string-like" % (field, idx))
|
|
148
|
+
result.append(str(item))
|
|
149
|
+
return result
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _validate_actions(actions):
|
|
153
|
+
if actions is None:
|
|
154
|
+
return {}
|
|
155
|
+
if not isinstance(actions, dict):
|
|
156
|
+
raise RuntimeError("actions must be a mapping of name -> command list")
|
|
157
|
+
validated = {}
|
|
158
|
+
for name, entry in actions.items():
|
|
159
|
+
if isinstance(entry, dict):
|
|
160
|
+
commands = [entry]
|
|
161
|
+
elif isinstance(entry, (list, tuple)):
|
|
162
|
+
commands = list(entry)
|
|
163
|
+
else:
|
|
164
|
+
raise RuntimeError("action %s must be a mapping or list" % name)
|
|
165
|
+
validated[name] = commands
|
|
166
|
+
return validated
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _validate_watch(watch_cfg):
|
|
170
|
+
watch = watch_cfg if isinstance(watch_cfg, dict) else {}
|
|
171
|
+
interval = watch.get("interval_sec", MAIN_DEFAULTS["watch"]["interval_sec"])
|
|
172
|
+
try:
|
|
173
|
+
interval = int(interval)
|
|
174
|
+
if interval <= 0:
|
|
175
|
+
raise ValueError
|
|
176
|
+
except Exception:
|
|
177
|
+
interval = MAIN_DEFAULTS["watch"]["interval_sec"]
|
|
178
|
+
on_change = _ensure_list_of_strings(watch.get("on_change"), "watch.on_change")
|
|
179
|
+
return {"interval_sec": interval, "on_change": on_change}
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _validate_auto_actions(auto_actions):
|
|
183
|
+
cfg = auto_actions if isinstance(auto_actions, dict) else {}
|
|
184
|
+
return {
|
|
185
|
+
"create_pkg": _ensure_list_of_strings(cfg.get("create_pkg"), "auto_actions.create_pkg"),
|
|
186
|
+
"update_pkg": _ensure_list_of_strings(cfg.get("update_pkg"), "auto_actions.update_pkg"),
|
|
187
|
+
"update_pkg_release": _ensure_list_of_strings(cfg.get("update_pkg_release"), "auto_actions.update_pkg_release"),
|
|
188
|
+
"close_pkg": _ensure_list_of_strings(cfg.get("close_pkg"), "auto_actions.close_pkg"),
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _validate_main_config(data):
|
|
193
|
+
if not isinstance(data, dict):
|
|
194
|
+
raise RuntimeError("main config must be a mapping")
|
|
195
|
+
cfg = _deep_merge(MAIN_DEFAULTS, data)
|
|
196
|
+
|
|
197
|
+
pkg_root = cfg.get("pkg_release_root")
|
|
198
|
+
if not pkg_root or not isinstance(pkg_root, (str, bytes)):
|
|
199
|
+
raise RuntimeError("pkg_release_root is required (path string)")
|
|
200
|
+
cfg["pkg_release_root"] = str(pkg_root)
|
|
201
|
+
|
|
202
|
+
cfg["sources"] = _ensure_list_of_strings(cfg.get("sources"), "sources")
|
|
203
|
+
src = cfg.get("source") if isinstance(cfg.get("source"), dict) else {}
|
|
204
|
+
cfg["source"] = {"exclude": _ensure_list_of_strings(src.get("exclude"), "source.exclude")}
|
|
205
|
+
|
|
206
|
+
artifacts = cfg.get("artifacts") if isinstance(cfg.get("artifacts"), dict) else {}
|
|
207
|
+
art_root = artifacts.get("root")
|
|
208
|
+
cfg["artifacts"] = {
|
|
209
|
+
"root": str(art_root) if art_root else None,
|
|
210
|
+
"targets": _ensure_list_of_strings(artifacts.get("targets"), "artifacts.targets"),
|
|
211
|
+
"exclude": _ensure_list_of_strings(artifacts.get("exclude"), "artifacts.exclude"),
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
cfg["watch"] = _validate_watch(cfg.get("watch"))
|
|
215
|
+
|
|
216
|
+
collectors = cfg.get("collectors") if isinstance(cfg.get("collectors"), dict) else {}
|
|
217
|
+
cfg["collectors"] = {
|
|
218
|
+
"enabled": _ensure_list_of_strings(collectors.get("enabled"), "collectors.enabled")
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
cfg["actions"] = _validate_actions(cfg.get("actions"))
|
|
222
|
+
cfg["auto_actions"] = _validate_auto_actions(cfg.get("auto_actions"))
|
|
223
|
+
|
|
224
|
+
return cfg
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _load_template_file(filename, fallback):
|
|
228
|
+
"""Try loading a template file under pkgmgr/templates; fallback to inline default."""
|
|
229
|
+
path = os.path.join(TEMPLATE_DIR, filename)
|
|
230
|
+
try:
|
|
231
|
+
f = open(path, "r")
|
|
232
|
+
try:
|
|
233
|
+
return f.read()
|
|
234
|
+
finally:
|
|
235
|
+
f.close()
|
|
236
|
+
except Exception:
|
|
237
|
+
return fallback
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def write_template(path=None):
|
|
241
|
+
"""Write the main pkgmgr.yaml template. Returns True if written."""
|
|
242
|
+
path = path or DEFAULT_MAIN_CONFIG
|
|
243
|
+
target = os.path.realpath(os.path.abspath(os.path.expanduser(path)))
|
|
244
|
+
if os.path.exists(target):
|
|
245
|
+
print("[make-config] config already exists at %s; remove it and re-run" % target)
|
|
246
|
+
return False
|
|
247
|
+
parent = os.path.dirname(target)
|
|
248
|
+
if parent and not os.path.exists(parent):
|
|
249
|
+
os.makedirs(parent)
|
|
250
|
+
content = _load_template_file("pkgmgr.yaml.sample", MAIN_TEMPLATE)
|
|
251
|
+
with open(target, "w") as f:
|
|
252
|
+
f.write(content)
|
|
253
|
+
print("[make-config] wrote template to %s" % target)
|
|
254
|
+
return True
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def write_pkg_template(path, pkg_id=None, pkg_root=None, include_releases=None, git_cfg=None, collectors_enabled=None):
|
|
258
|
+
"""
|
|
259
|
+
Write a pkg.yaml file. When pkg_id/pkg_root provided, render with those values;
|
|
260
|
+
otherwise fall back to the static template.
|
|
261
|
+
"""
|
|
262
|
+
target = os.path.abspath(path)
|
|
263
|
+
parent = os.path.dirname(target)
|
|
264
|
+
if parent and not os.path.exists(parent):
|
|
265
|
+
os.makedirs(parent)
|
|
266
|
+
|
|
267
|
+
if pkg_id is None or pkg_root is None or yaml is None:
|
|
268
|
+
content = _load_template_file("pkg.yaml.sample", PKG_TEMPLATE)
|
|
269
|
+
with open(target, "w") as f:
|
|
270
|
+
f.write(content)
|
|
271
|
+
else:
|
|
272
|
+
data = {
|
|
273
|
+
"pkg": {"id": str(pkg_id), "root": os.path.abspath(os.path.expanduser(pkg_root)), "status": "open"},
|
|
274
|
+
"include": {"releases": include_releases or []},
|
|
275
|
+
"git": {
|
|
276
|
+
"repo_root": (git_cfg or {}).get("repo_root"),
|
|
277
|
+
"keywords": _ensure_list_of_strings((git_cfg or {}).get("keywords"), "git.keywords"),
|
|
278
|
+
"since": (git_cfg or {}).get("since"),
|
|
279
|
+
"until": (git_cfg or {}).get("until"),
|
|
280
|
+
},
|
|
281
|
+
"collectors": {"enabled": collectors_enabled or ["checksums"]},
|
|
282
|
+
}
|
|
283
|
+
with open(target, "w") as f:
|
|
284
|
+
yaml.safe_dump(data, f, allow_unicode=True, sort_keys=True)
|
|
285
|
+
print("[create-pkg] wrote pkg config to %s" % target)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def load_pkg_config(path):
|
|
289
|
+
"""Load a pkg.yaml file."""
|
|
290
|
+
if yaml is None:
|
|
291
|
+
raise RuntimeError("PyYAML not installed; cannot read %s" % path)
|
|
292
|
+
abs_path = os.path.abspath(os.path.expanduser(path))
|
|
293
|
+
if not os.path.exists(abs_path):
|
|
294
|
+
raise RuntimeError("pkg config not found: %s" % abs_path)
|
|
295
|
+
with open(abs_path, "r") as f:
|
|
296
|
+
return yaml.safe_load(f) or {}
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def discover_main_configs(base_dir=None):
|
|
300
|
+
"""
|
|
301
|
+
Find pkgmgr config files under the base directory.
|
|
302
|
+
Search order:
|
|
303
|
+
- <base_dir>/pkgmgr*.yaml (new default)
|
|
304
|
+
- <base_dir>/config/pkgmgr*.yaml (legacy default)
|
|
305
|
+
"""
|
|
306
|
+
base = os.path.abspath(os.path.expanduser(base_dir or BASE_DIR))
|
|
307
|
+
search_roots = [base, os.path.join(base, "config")]
|
|
308
|
+
found = []
|
|
309
|
+
seen = set()
|
|
310
|
+
patterns = ["pkgmgr*.yaml", "pkgmgr*.yml"]
|
|
311
|
+
for root in search_roots:
|
|
312
|
+
if not os.path.isdir(root):
|
|
313
|
+
continue
|
|
314
|
+
for pattern in patterns:
|
|
315
|
+
for path in glob.glob(os.path.join(root, pattern)):
|
|
316
|
+
apath = os.path.realpath(os.path.abspath(path))
|
|
317
|
+
if apath not in seen:
|
|
318
|
+
seen.add(apath)
|
|
319
|
+
found.append(apath)
|
|
320
|
+
return sorted(found)
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def _prompt_to_pick(paths):
|
|
324
|
+
"""Interactive selector for multiple configs."""
|
|
325
|
+
print("[config] multiple pkgmgr configs found; pick one:")
|
|
326
|
+
for idx, p in enumerate(paths, 1):
|
|
327
|
+
print(" %d) %s" % (idx, p))
|
|
328
|
+
choice = None
|
|
329
|
+
while choice is None:
|
|
330
|
+
raw = input("Select number (1-%d): " % len(paths)).strip()
|
|
331
|
+
if not raw:
|
|
332
|
+
continue
|
|
333
|
+
try:
|
|
334
|
+
val = int(raw)
|
|
335
|
+
if 1 <= val <= len(paths):
|
|
336
|
+
choice = paths[val - 1]
|
|
337
|
+
else:
|
|
338
|
+
print(" invalid selection")
|
|
339
|
+
except Exception:
|
|
340
|
+
print(" enter a number")
|
|
341
|
+
return choice
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def resolve_main_config(path=None, base_dir=None, allow_interactive=True):
|
|
345
|
+
"""
|
|
346
|
+
Resolve main config path with discovery and optional interactive choice.
|
|
347
|
+
- If `path` is provided, return it as-is (expanded/abs).
|
|
348
|
+
- Otherwise search under BASE_DIR for pkgmgr*.yaml.
|
|
349
|
+
- none -> instruct user to create or pass --config
|
|
350
|
+
- one -> return it
|
|
351
|
+
- many -> prompt (tty) or raise (non-tty)
|
|
352
|
+
"""
|
|
353
|
+
if path:
|
|
354
|
+
return os.path.realpath(os.path.abspath(os.path.expanduser(path)))
|
|
355
|
+
configs = discover_main_configs(base_dir=base_dir)
|
|
356
|
+
if not configs:
|
|
357
|
+
raise RuntimeError(
|
|
358
|
+
"no pkgmgr config found under %s; run `pkgmgr make-config` "
|
|
359
|
+
"or pass --config" % os.path.abspath(os.path.expanduser(base_dir or BASE_DIR))
|
|
360
|
+
)
|
|
361
|
+
if len(configs) == 1:
|
|
362
|
+
return configs[0]
|
|
363
|
+
|
|
364
|
+
msg = "multiple pkgmgr configs found: %s" % ", ".join(configs)
|
|
365
|
+
if allow_interactive and sys.stdin.isatty():
|
|
366
|
+
return _prompt_to_pick(configs)
|
|
367
|
+
raise RuntimeError(msg + "; specify one with --config")
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
def load_main(path=None, base_dir=None, allow_interactive=True):
|
|
371
|
+
"""
|
|
372
|
+
Load and validate the main config YAML.
|
|
373
|
+
If PyYAML is missing, raise a clear error so installation can add it.
|
|
374
|
+
"""
|
|
375
|
+
path = resolve_main_config(
|
|
376
|
+
path=path, base_dir=base_dir, allow_interactive=allow_interactive
|
|
377
|
+
)
|
|
378
|
+
if yaml is None:
|
|
379
|
+
raise RuntimeError(
|
|
380
|
+
"PyYAML not installed; install it or keep using templates manually"
|
|
381
|
+
)
|
|
382
|
+
abs_path = os.path.abspath(path)
|
|
383
|
+
if not os.path.exists(abs_path):
|
|
384
|
+
raise RuntimeError("config not found: %s" % abs_path)
|
|
385
|
+
with open(abs_path, "r", encoding="utf-8") as f:
|
|
386
|
+
data = yaml.safe_load(f) or {}
|
|
387
|
+
return _validate_main_config(data)
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
def describe_expected_fields():
|
|
391
|
+
"""Return a help string for the main config layout."""
|
|
392
|
+
return textwrap.dedent(
|
|
393
|
+
"""
|
|
394
|
+
pkg_release_root: root directory where pkg/<id> will live
|
|
395
|
+
sources: list of source roots to watch
|
|
396
|
+
source.exclude: glob patterns to skip under sources (supports **, *.ext)
|
|
397
|
+
artifacts.targets: top-level artifacts (bin/lib/data) to include
|
|
398
|
+
artifacts.exclude: glob patterns for dirs/files to skip (supports **, *.ext)
|
|
399
|
+
watch.interval_sec: poll interval for the watcher
|
|
400
|
+
watch.on_change: action names to run when changes are detected
|
|
401
|
+
collectors.enabled: default collectors to run per pkg
|
|
402
|
+
actions: mapping action_name -> list of command entries with:
|
|
403
|
+
- cmd: shell command string (required, often relative to cwd)
|
|
404
|
+
- cwd: working directory (optional)
|
|
405
|
+
- env: key/value env overrides for that command only (optional)
|
|
406
|
+
auto_actions: mapping of lifecycle events to action names (create_pkg/update_pkg/update_pkg_release/close_pkg)
|
|
407
|
+
"""
|
|
408
|
+
).strip()
|
pkgmgr/points.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
"""Checkpoint/point helpers for pkg workflow."""
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
from . import config, snapshot
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _points_root(pkg_id):
|
|
12
|
+
return os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id), "points")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def create_point(cfg, pkg_id, label=None, actions_run=None, actions_result=None, snapshot_data=None):
|
|
16
|
+
"""
|
|
17
|
+
Create a checkpoint ("point") for the given pkg:
|
|
18
|
+
- takes a snapshot (or uses provided data)
|
|
19
|
+
- writes meta + snapshot under state/pkg/<id>/points/<ts>/
|
|
20
|
+
"""
|
|
21
|
+
ts = time.strftime("%Y%m%dT%H%M%S", time.localtime())
|
|
22
|
+
base = _points_root(pkg_id)
|
|
23
|
+
point_dir = os.path.join(base, ts)
|
|
24
|
+
if not os.path.exists(point_dir):
|
|
25
|
+
os.makedirs(point_dir)
|
|
26
|
+
|
|
27
|
+
snap = snapshot_data or snapshot.create_snapshot(cfg)
|
|
28
|
+
meta = {
|
|
29
|
+
"pkg_id": str(pkg_id),
|
|
30
|
+
"label": label,
|
|
31
|
+
"created_at": ts,
|
|
32
|
+
"actions_run": actions_run or [],
|
|
33
|
+
"actions_result": actions_result or [],
|
|
34
|
+
"snapshot": "snapshot.json",
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
meta_path = os.path.join(point_dir, "meta.json")
|
|
38
|
+
snap_path = os.path.join(point_dir, "snapshot.json")
|
|
39
|
+
with open(meta_path, "w") as f:
|
|
40
|
+
json.dump(meta, f, ensure_ascii=False, indent=2, sort_keys=True)
|
|
41
|
+
with open(snap_path, "w") as f:
|
|
42
|
+
json.dump(snap, f, ensure_ascii=False, indent=2, sort_keys=True)
|
|
43
|
+
|
|
44
|
+
print("[point] created %s (label=%s actions=%s)" % (point_dir, label, actions_run or []))
|
|
45
|
+
return point_dir
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def load_latest_point(pkg_id):
|
|
49
|
+
"""Load latest point's meta and snapshot for a pkg. Returns (meta, snapshot) or (None, None)."""
|
|
50
|
+
base = _points_root(pkg_id)
|
|
51
|
+
if not os.path.exists(base):
|
|
52
|
+
return None, None
|
|
53
|
+
candidates = [d for d in os.listdir(base) if os.path.isdir(os.path.join(base, d))]
|
|
54
|
+
if not candidates:
|
|
55
|
+
return None, None
|
|
56
|
+
latest = sorted(candidates)[-1]
|
|
57
|
+
pdir = os.path.join(base, latest)
|
|
58
|
+
meta_path = os.path.join(pdir, "meta.json")
|
|
59
|
+
snap_path = os.path.join(pdir, "snapshot.json")
|
|
60
|
+
meta = None
|
|
61
|
+
snap = None
|
|
62
|
+
try:
|
|
63
|
+
with open(meta_path, "r") as f:
|
|
64
|
+
meta = json.load(f)
|
|
65
|
+
except Exception:
|
|
66
|
+
meta = {"id": latest}
|
|
67
|
+
try:
|
|
68
|
+
with open(snap_path, "r") as f:
|
|
69
|
+
snap = json.load(f)
|
|
70
|
+
except Exception:
|
|
71
|
+
snap = None
|
|
72
|
+
return meta, snap
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def list_points(pkg_id):
|
|
76
|
+
"""List available points for a package."""
|
|
77
|
+
base = _points_root(pkg_id)
|
|
78
|
+
if not os.path.exists(base):
|
|
79
|
+
print("[point] no points found for pkg %s" % pkg_id)
|
|
80
|
+
return []
|
|
81
|
+
|
|
82
|
+
entries = []
|
|
83
|
+
for name in sorted(os.listdir(base)):
|
|
84
|
+
pdir = os.path.join(base, name)
|
|
85
|
+
if not os.path.isdir(pdir):
|
|
86
|
+
continue
|
|
87
|
+
meta_path = os.path.join(pdir, "meta.json")
|
|
88
|
+
meta = {}
|
|
89
|
+
try:
|
|
90
|
+
with open(meta_path, "r") as f:
|
|
91
|
+
meta = json.load(f)
|
|
92
|
+
except Exception:
|
|
93
|
+
meta = {"created_at": name, "label": None}
|
|
94
|
+
entries.append({"id": name, "path": pdir, "label": meta.get("label"), "created_at": meta.get("created_at")})
|
|
95
|
+
|
|
96
|
+
for e in entries:
|
|
97
|
+
print("[point] %s label=%s path=%s" % (e["id"], e["label"], e["path"]))
|
|
98
|
+
return entries
|