pkgmgr-kunrunic 0.1.1.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pkgmgr-kunrunic might be problematic. Click here for more details.
- pkgmgr/__init__.py +17 -0
- pkgmgr/__main__.py +5 -0
- pkgmgr/cli.py +278 -0
- pkgmgr/collectors/__init__.py +5 -0
- pkgmgr/collectors/base.py +15 -0
- pkgmgr/collectors/checksums.py +35 -0
- pkgmgr/config.py +380 -0
- pkgmgr/gitcollect.py +10 -0
- pkgmgr/points.py +98 -0
- pkgmgr/release.py +579 -0
- pkgmgr/shell_integration.py +83 -0
- pkgmgr/snapshot.py +306 -0
- pkgmgr/templates/pkg.yaml.sample +16 -0
- pkgmgr/templates/pkgmgr.yaml.sample +51 -0
- pkgmgr/watch.py +79 -0
- pkgmgr_kunrunic-0.1.1.dev0.dist-info/METADATA +147 -0
- pkgmgr_kunrunic-0.1.1.dev0.dist-info/RECORD +21 -0
- pkgmgr_kunrunic-0.1.1.dev0.dist-info/WHEEL +5 -0
- pkgmgr_kunrunic-0.1.1.dev0.dist-info/entry_points.txt +2 -0
- pkgmgr_kunrunic-0.1.1.dev0.dist-info/licenses/LICENSE +21 -0
- pkgmgr_kunrunic-0.1.1.dev0.dist-info/top_level.txt +1 -0
pkgmgr/release.py
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
"""Release/package lifecycle scaffolding."""
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import re
|
|
7
|
+
import shutil
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
import subprocess
|
|
11
|
+
|
|
12
|
+
from . import config, snapshot, shell_integration, points
|
|
13
|
+
from .collectors import checksums as checksums_module
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def ensure_environment():
|
|
17
|
+
"""Prepare environment: print shell PATH/alias instructions."""
|
|
18
|
+
script_dir = os.path.dirname(sys.executable)
|
|
19
|
+
shell_integration.ensure_path_and_alias(script_dir)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _pkg_dir(cfg, pkg_id):
|
|
23
|
+
root = os.path.expanduser(cfg.get("pkg_release_root", ""))
|
|
24
|
+
if not root:
|
|
25
|
+
raise RuntimeError("pkg_release_root missing in config")
|
|
26
|
+
return os.path.join(root, str(pkg_id))
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _pkg_state_dir(pkg_id):
|
|
30
|
+
return os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id))
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _pkg_state_path(pkg_id):
|
|
34
|
+
return os.path.join(_pkg_state_dir(pkg_id), "state.json")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _timestamp():
|
|
38
|
+
return time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime())
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _load_pkg_state(pkg_id):
|
|
42
|
+
path = _pkg_state_path(pkg_id)
|
|
43
|
+
if not os.path.exists(path):
|
|
44
|
+
return None
|
|
45
|
+
try:
|
|
46
|
+
with open(path, "r") as f:
|
|
47
|
+
return json.load(f)
|
|
48
|
+
except Exception:
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _write_pkg_state(pkg_id, status, extra=None):
|
|
53
|
+
state_dir = _pkg_state_dir(pkg_id)
|
|
54
|
+
if not os.path.exists(state_dir):
|
|
55
|
+
os.makedirs(state_dir)
|
|
56
|
+
now = _timestamp()
|
|
57
|
+
existing = _load_pkg_state(pkg_id) or {}
|
|
58
|
+
state = {
|
|
59
|
+
"pkg_id": str(pkg_id),
|
|
60
|
+
"status": status,
|
|
61
|
+
"opened_at": existing.get("opened_at"),
|
|
62
|
+
"updated_at": now,
|
|
63
|
+
}
|
|
64
|
+
if status == "open":
|
|
65
|
+
state["opened_at"] = state["opened_at"] or now
|
|
66
|
+
state.pop("closed_at", None)
|
|
67
|
+
if status == "closed":
|
|
68
|
+
state["closed_at"] = now
|
|
69
|
+
if extra:
|
|
70
|
+
state.update(extra)
|
|
71
|
+
with open(_pkg_state_path(pkg_id), "w") as f:
|
|
72
|
+
json.dump(state, f, ensure_ascii=False, indent=2, sort_keys=True)
|
|
73
|
+
return state
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def pkg_is_closed(pkg_id):
|
|
77
|
+
state = _load_pkg_state(pkg_id)
|
|
78
|
+
return bool(state and state.get("status") == "closed")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def pkg_state(pkg_id):
|
|
82
|
+
return _load_pkg_state(pkg_id)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def create_pkg(cfg, pkg_id):
|
|
86
|
+
"""Create pkg directory and write pkg.yaml template."""
|
|
87
|
+
dest = _pkg_dir(cfg, pkg_id)
|
|
88
|
+
if not os.path.exists(dest):
|
|
89
|
+
os.makedirs(dest)
|
|
90
|
+
template_path = os.path.join(dest, "pkg.yaml")
|
|
91
|
+
|
|
92
|
+
def _should_write_template(path):
|
|
93
|
+
if not os.path.exists(path):
|
|
94
|
+
return True
|
|
95
|
+
prompt = "[create-pkg] pkg.yaml already exists at %s; overwrite? [y/N]: " % path
|
|
96
|
+
if not sys.stdin.isatty():
|
|
97
|
+
print(prompt + "non-tty -> keeping existing")
|
|
98
|
+
return False
|
|
99
|
+
ans = input(prompt).strip().lower()
|
|
100
|
+
return ans in ("y", "yes")
|
|
101
|
+
|
|
102
|
+
if not _should_write_template(template_path):
|
|
103
|
+
print("[create-pkg] kept existing pkg.yaml; no changes made")
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
git_cfg = cfg.get("git") or {}
|
|
107
|
+
collectors_enabled = (cfg.get("collectors") or {}).get("enabled") or ["checksums"]
|
|
108
|
+
config.write_pkg_template(
|
|
109
|
+
template_path,
|
|
110
|
+
pkg_id=pkg_id,
|
|
111
|
+
pkg_root=dest,
|
|
112
|
+
include_releases=[],
|
|
113
|
+
git_cfg=git_cfg,
|
|
114
|
+
collectors_enabled=collectors_enabled,
|
|
115
|
+
)
|
|
116
|
+
# initial snapshot placeholder (only if no baseline exists yet)
|
|
117
|
+
baseline_path = os.path.join(config.DEFAULT_STATE_DIR, "baseline.json")
|
|
118
|
+
if not os.path.exists(baseline_path):
|
|
119
|
+
snapshot.create_baseline(cfg)
|
|
120
|
+
else:
|
|
121
|
+
print("[create-pkg] baseline already exists; skipping baseline creation")
|
|
122
|
+
_write_pkg_state(pkg_id, "open")
|
|
123
|
+
print("[create-pkg] prepared %s" % dest)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def close_pkg(cfg, pkg_id):
|
|
127
|
+
"""Mark pkg closed (stub)."""
|
|
128
|
+
dest = _pkg_dir(cfg, pkg_id)
|
|
129
|
+
if not os.path.exists(dest):
|
|
130
|
+
print("[close-pkg] pkg dir not found, nothing to close: %s" % dest)
|
|
131
|
+
return
|
|
132
|
+
marker = os.path.join(dest, ".closed")
|
|
133
|
+
with open(marker, "w") as f:
|
|
134
|
+
f.write("closed\n")
|
|
135
|
+
_write_pkg_state(pkg_id, "closed")
|
|
136
|
+
print("[close-pkg] marked closed: %s" % dest)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def collect_for_pkg(cfg, pkg_id, collectors=None):
|
|
140
|
+
"""Run collector hooks (stub)."""
|
|
141
|
+
if pkg_id and pkg_is_closed(pkg_id):
|
|
142
|
+
print("[collect] pkg=%s is closed; skipping collectors" % pkg_id)
|
|
143
|
+
return
|
|
144
|
+
print(
|
|
145
|
+
"[collect] pkg=%s collectors=%s (stub; wire to collectors.checksums etc.)"
|
|
146
|
+
% (pkg_id, collectors or "default")
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def export_pkg(cfg, pkg_id, fmt):
|
|
151
|
+
"""Export pkg data placeholder."""
|
|
152
|
+
print("[export] pkg=%s format=%s (stub)" % (pkg_id, fmt))
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def run_actions(cfg, names):
|
|
156
|
+
"""Run configured actions by name. Returns result list."""
|
|
157
|
+
actions = cfg.get("actions", {}) or {}
|
|
158
|
+
if not names:
|
|
159
|
+
print("[actions] no action names provided")
|
|
160
|
+
return []
|
|
161
|
+
results = []
|
|
162
|
+
for name in names:
|
|
163
|
+
entries = actions.get(name)
|
|
164
|
+
if not entries:
|
|
165
|
+
print("[actions] unknown action: %s" % name)
|
|
166
|
+
results.append({"name": name, "status": "missing", "rc": None})
|
|
167
|
+
continue
|
|
168
|
+
if isinstance(entries, dict):
|
|
169
|
+
entries = [entries]
|
|
170
|
+
if not isinstance(entries, (list, tuple)):
|
|
171
|
+
print("[actions] invalid action format for %s" % name)
|
|
172
|
+
results.append({"name": name, "status": "invalid", "rc": None})
|
|
173
|
+
continue
|
|
174
|
+
print("[actions] running %s (%d command(s))" % (name, len(entries)))
|
|
175
|
+
for idx, entry in enumerate(entries):
|
|
176
|
+
cmd, cwd, env = _parse_action_entry(entry)
|
|
177
|
+
if not cmd:
|
|
178
|
+
print("[actions] skip empty cmd for %s #%d" % (name, idx + 1))
|
|
179
|
+
continue
|
|
180
|
+
rc = _run_cmd(cmd, cwd=cwd, env=env, label="%s #%d" % (name, idx + 1))
|
|
181
|
+
results.append(
|
|
182
|
+
{
|
|
183
|
+
"name": name,
|
|
184
|
+
"status": "ok" if rc == 0 else "failed",
|
|
185
|
+
"rc": rc,
|
|
186
|
+
}
|
|
187
|
+
)
|
|
188
|
+
return results
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _parse_action_entry(entry):
|
|
192
|
+
if isinstance(entry, dict):
|
|
193
|
+
cmd = entry.get("cmd")
|
|
194
|
+
cwd = entry.get("cwd")
|
|
195
|
+
env = entry.get("env")
|
|
196
|
+
return cmd, cwd, env
|
|
197
|
+
return entry, None, None
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def _run_cmd(cmd, cwd=None, env=None, label=None):
|
|
201
|
+
merged_env = os.environ.copy()
|
|
202
|
+
if env and isinstance(env, dict):
|
|
203
|
+
for k, v in env.items():
|
|
204
|
+
if v is None:
|
|
205
|
+
continue
|
|
206
|
+
merged_env[str(k)] = str(v)
|
|
207
|
+
try:
|
|
208
|
+
p = subprocess.Popen(cmd, shell=True, cwd=cwd, env=merged_env)
|
|
209
|
+
rc = p.wait()
|
|
210
|
+
prefix = "[actions]"
|
|
211
|
+
tag = " (%s)" % label if label else ""
|
|
212
|
+
if rc == 0:
|
|
213
|
+
print("%s command ok%s" % (prefix, tag))
|
|
214
|
+
else:
|
|
215
|
+
print("%s command failed%s (rc=%s)" % (prefix, tag, rc))
|
|
216
|
+
except Exception as e:
|
|
217
|
+
prefix = "[actions]"
|
|
218
|
+
tag = " (%s)" % label if label else ""
|
|
219
|
+
print("%s error%s: %s" % (prefix, tag, str(e)))
|
|
220
|
+
return 1
|
|
221
|
+
return rc
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def create_point(cfg, pkg_id, label=None, actions_run=None, actions_result=None, snapshot_data=None):
|
|
225
|
+
"""Create a checkpoint for a package (snapshot + meta)."""
|
|
226
|
+
return points.create_point(
|
|
227
|
+
cfg, pkg_id, label=label, actions_run=actions_run, actions_result=actions_result, snapshot_data=snapshot_data
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def list_points(cfg, pkg_id):
|
|
232
|
+
"""List checkpoints for a package."""
|
|
233
|
+
return points.list_points(pkg_id)
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def _git_repo_root(pkg_root, git_cfg):
|
|
237
|
+
# Prefer explicit repo_root from pkg config; if relative, resolve from pkg_root.
|
|
238
|
+
repo_root = (git_cfg or {}).get("repo_root")
|
|
239
|
+
if repo_root:
|
|
240
|
+
repo_root = os.path.expanduser(repo_root)
|
|
241
|
+
if not os.path.isabs(repo_root):
|
|
242
|
+
repo_root = os.path.abspath(os.path.join(pkg_root, repo_root))
|
|
243
|
+
else:
|
|
244
|
+
repo_root = os.path.abspath(repo_root)
|
|
245
|
+
if os.path.isdir(repo_root):
|
|
246
|
+
return repo_root
|
|
247
|
+
print("[git] repo_root %s not found; falling back to git rev-parse" % repo_root)
|
|
248
|
+
try:
|
|
249
|
+
out = subprocess.check_output(
|
|
250
|
+
["git", "rev-parse", "--show-toplevel"], stderr=subprocess.STDOUT, universal_newlines=True
|
|
251
|
+
)
|
|
252
|
+
return out.strip()
|
|
253
|
+
except Exception:
|
|
254
|
+
print("[git] not a git repo or git unavailable; skipping git collection")
|
|
255
|
+
return None
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def _collect_git_hits(pkg_cfg, pkg_root):
|
|
259
|
+
git_cfg = pkg_cfg.get("git") or {}
|
|
260
|
+
keywords = [str(k) for k in (git_cfg.get("keywords") or []) if str(k).strip()]
|
|
261
|
+
result = {"keywords": keywords, "commits": []}
|
|
262
|
+
files = set()
|
|
263
|
+
if not keywords:
|
|
264
|
+
return result, files
|
|
265
|
+
|
|
266
|
+
repo_root = _git_repo_root(pkg_root, git_cfg)
|
|
267
|
+
if not repo_root:
|
|
268
|
+
return result, files
|
|
269
|
+
|
|
270
|
+
since = git_cfg.get("since")
|
|
271
|
+
until = git_cfg.get("until")
|
|
272
|
+
commits = {}
|
|
273
|
+
current = None
|
|
274
|
+
|
|
275
|
+
for kw in keywords:
|
|
276
|
+
cmd = [
|
|
277
|
+
"git",
|
|
278
|
+
"--no-pager",
|
|
279
|
+
"log",
|
|
280
|
+
"--name-only",
|
|
281
|
+
"--pretty=format:%H\t%s",
|
|
282
|
+
"--grep=%s" % kw,
|
|
283
|
+
"--regexp-ignore-case",
|
|
284
|
+
"--all",
|
|
285
|
+
"--",
|
|
286
|
+
]
|
|
287
|
+
if since:
|
|
288
|
+
cmd.append("--since=%s" % since)
|
|
289
|
+
if until:
|
|
290
|
+
cmd.append("--until=%s" % until)
|
|
291
|
+
try:
|
|
292
|
+
out = subprocess.check_output(cmd, cwd=repo_root, stderr=subprocess.STDOUT, universal_newlines=True)
|
|
293
|
+
except Exception as e:
|
|
294
|
+
print("[git] log failed for keyword %s: %s" % (kw, str(e)))
|
|
295
|
+
continue
|
|
296
|
+
|
|
297
|
+
for line in out.splitlines():
|
|
298
|
+
line = line.strip()
|
|
299
|
+
if not line:
|
|
300
|
+
continue
|
|
301
|
+
if "\t" in line:
|
|
302
|
+
parts = line.split("\t", 1)
|
|
303
|
+
commit_hash, subject = parts[0], parts[1]
|
|
304
|
+
current = commits.setdefault(
|
|
305
|
+
commit_hash, {"hash": commit_hash, "subject": subject, "keywords": set(), "files": set()}
|
|
306
|
+
)
|
|
307
|
+
current["keywords"].add(kw)
|
|
308
|
+
continue
|
|
309
|
+
if current:
|
|
310
|
+
current["files"].add(line)
|
|
311
|
+
files.add(os.path.join(repo_root, line))
|
|
312
|
+
|
|
313
|
+
for c in commits.values():
|
|
314
|
+
c["files"] = sorted(c["files"])
|
|
315
|
+
c["keywords"] = sorted(c["keywords"])
|
|
316
|
+
# Provide stable, user-facing aliases.
|
|
317
|
+
c["commit"] = c.get("hash")
|
|
318
|
+
# fetch author and full commit message body for richer context
|
|
319
|
+
try:
|
|
320
|
+
info = subprocess.check_output(
|
|
321
|
+
["git", "show", "-s", "--format=%an\t%ae\t%ad%n%s%n%b", c["hash"]],
|
|
322
|
+
cwd=repo_root,
|
|
323
|
+
stderr=subprocess.STDOUT,
|
|
324
|
+
universal_newlines=True,
|
|
325
|
+
)
|
|
326
|
+
header, _, body = info.partition("\n")
|
|
327
|
+
parts = header.split("\t")
|
|
328
|
+
c["author_name"] = parts[0] if len(parts) > 0 else ""
|
|
329
|
+
c["author_email"] = parts[1] if len(parts) > 1 else ""
|
|
330
|
+
c["authored_at"] = parts[2] if len(parts) > 2 else ""
|
|
331
|
+
c["message"] = body.rstrip("\n")
|
|
332
|
+
except Exception as e:
|
|
333
|
+
print("[git] show failed for %s: %s" % (c["hash"], str(e)))
|
|
334
|
+
c["message"] = c.get("subject", "")
|
|
335
|
+
if c.get("author_name") or c.get("author_email"):
|
|
336
|
+
if c.get("author_email"):
|
|
337
|
+
c["author"] = "%s <%s>" % (c.get("author_name", ""), c.get("author_email", ""))
|
|
338
|
+
else:
|
|
339
|
+
c["author"] = c.get("author_name", "")
|
|
340
|
+
c["date"] = c.get("authored_at", "")
|
|
341
|
+
result["commits"].append(c)
|
|
342
|
+
result["commits"] = sorted(result["commits"], key=lambda c: c["hash"])
|
|
343
|
+
return result, files
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def _collect_release_files(pkg_root, pkg_cfg):
|
|
347
|
+
include_cfg = pkg_cfg.get("include") or {}
|
|
348
|
+
releases = include_cfg.get("releases") or []
|
|
349
|
+
files = []
|
|
350
|
+
for rel in releases:
|
|
351
|
+
target = os.path.abspath(os.path.join(pkg_root, str(rel)))
|
|
352
|
+
if not os.path.exists(target):
|
|
353
|
+
print("[update-pkg] skip missing release dir: %s" % target)
|
|
354
|
+
continue
|
|
355
|
+
for base, _, names in os.walk(target):
|
|
356
|
+
for name in names:
|
|
357
|
+
files.append(os.path.join(base, name))
|
|
358
|
+
return files
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def _hash_paths(paths):
|
|
362
|
+
checksums = {}
|
|
363
|
+
for path in sorted(set(paths)):
|
|
364
|
+
if not os.path.exists(path) or not os.path.isfile(path):
|
|
365
|
+
continue
|
|
366
|
+
try:
|
|
367
|
+
checksums[path] = checksums_module.sha256_of_file(path)
|
|
368
|
+
except Exception as e:
|
|
369
|
+
print("[update-pkg] failed to hash %s: %s" % (path, str(e)))
|
|
370
|
+
return checksums
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
_REL_VER_RE = re.compile(r"release\.v(\d+)\.(\d+)\.(\d+)$")
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def _list_release_versions(base_dir):
|
|
377
|
+
"""Return list of (major, minor, patch, path) under base_dir."""
|
|
378
|
+
versions = []
|
|
379
|
+
if not os.path.isdir(base_dir):
|
|
380
|
+
return versions
|
|
381
|
+
for name in os.listdir(base_dir):
|
|
382
|
+
m = _REL_VER_RE.match(name)
|
|
383
|
+
if not m:
|
|
384
|
+
continue
|
|
385
|
+
ver = tuple(int(x) for x in m.groups())
|
|
386
|
+
versions.append((ver, os.path.join(base_dir, name)))
|
|
387
|
+
versions.sort()
|
|
388
|
+
return versions
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def _next_release_version(base_dir):
|
|
392
|
+
versions = _list_release_versions(base_dir)
|
|
393
|
+
if not versions:
|
|
394
|
+
return (0, 0, 1), None
|
|
395
|
+
latest_ver, latest_path = versions[-1]
|
|
396
|
+
next_ver = (latest_ver[0], latest_ver[1], latest_ver[2] + 1)
|
|
397
|
+
return next_ver, latest_path
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
def _format_version(ver_tuple):
|
|
401
|
+
return "release.v%d.%d.%d" % ver_tuple
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def _relpath_from_pkg(pkg_dir, path):
|
|
405
|
+
try:
|
|
406
|
+
rel = os.path.relpath(path, pkg_dir)
|
|
407
|
+
if rel.startswith(".."):
|
|
408
|
+
return os.path.basename(path)
|
|
409
|
+
return rel
|
|
410
|
+
except Exception:
|
|
411
|
+
return os.path.basename(path)
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
def _collect_release_sources(pkg_dir, pkg_cfg):
|
|
415
|
+
include_cfg = pkg_cfg.get("include") or {}
|
|
416
|
+
releases = include_cfg.get("releases") or []
|
|
417
|
+
files = []
|
|
418
|
+
for rel in releases:
|
|
419
|
+
rel_str = str(rel)
|
|
420
|
+
target = rel_str
|
|
421
|
+
if not os.path.isabs(target):
|
|
422
|
+
target = os.path.join(pkg_dir, rel_str)
|
|
423
|
+
target = os.path.abspath(os.path.expanduser(target))
|
|
424
|
+
if not os.path.exists(target):
|
|
425
|
+
print("[update-pkg] skip missing release source: %s" % target)
|
|
426
|
+
continue
|
|
427
|
+
if os.path.isfile(target):
|
|
428
|
+
files.append((target, _relpath_from_pkg(pkg_dir, target)))
|
|
429
|
+
continue
|
|
430
|
+
for base, _, names in os.walk(target):
|
|
431
|
+
for name in names:
|
|
432
|
+
abspath = os.path.join(base, name)
|
|
433
|
+
relpath = _relpath_from_pkg(pkg_dir, abspath)
|
|
434
|
+
files.append((abspath, relpath))
|
|
435
|
+
return files
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
def _load_prev_hashes(prev_release_dir):
|
|
439
|
+
hashes = {}
|
|
440
|
+
for base, _, names in os.walk(prev_release_dir):
|
|
441
|
+
for name in names:
|
|
442
|
+
abspath = os.path.join(base, name)
|
|
443
|
+
if not os.path.isfile(abspath):
|
|
444
|
+
continue
|
|
445
|
+
rel = os.path.relpath(abspath, prev_release_dir)
|
|
446
|
+
try:
|
|
447
|
+
hashes[rel] = checksums_module.sha256_of_file(abspath)
|
|
448
|
+
except Exception:
|
|
449
|
+
continue
|
|
450
|
+
return hashes
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
def _prepare_release(pkg_dir, pkg_cfg):
|
|
454
|
+
"""
|
|
455
|
+
Build release bundles grouped by top-level include root.
|
|
456
|
+
Layout: <pkg_dir>/release/<root>/release.vX.Y.Z/<files-under-root>
|
|
457
|
+
Returns list of bundle metadata per root.
|
|
458
|
+
"""
|
|
459
|
+
release_root = os.path.join(pkg_dir, "release")
|
|
460
|
+
bundles = []
|
|
461
|
+
source_files = _collect_release_sources(pkg_dir, pkg_cfg)
|
|
462
|
+
|
|
463
|
+
# group files by top-level root name
|
|
464
|
+
grouped = {}
|
|
465
|
+
for src, rel in source_files:
|
|
466
|
+
parts = rel.split("/", 1)
|
|
467
|
+
if len(parts) == 2:
|
|
468
|
+
root, subrel = parts[0], parts[1]
|
|
469
|
+
else:
|
|
470
|
+
root, subrel = "root", rel
|
|
471
|
+
grouped.setdefault(root, []).append((src, subrel))
|
|
472
|
+
|
|
473
|
+
for root, entries in grouped.items():
|
|
474
|
+
root_dir = os.path.join(release_root, root)
|
|
475
|
+
next_ver, prev_dir = _next_release_version(root_dir)
|
|
476
|
+
release_name = _format_version(next_ver)
|
|
477
|
+
prev_hashes = _load_prev_hashes(prev_dir) if prev_dir else {}
|
|
478
|
+
copied = []
|
|
479
|
+
skipped = []
|
|
480
|
+
to_copy = []
|
|
481
|
+
|
|
482
|
+
for src, rel in entries:
|
|
483
|
+
prev_hash = prev_hashes.get(rel)
|
|
484
|
+
try:
|
|
485
|
+
curr_hash = checksums_module.sha256_of_file(src)
|
|
486
|
+
except Exception as e:
|
|
487
|
+
print("[update-pkg] failed to hash %s: %s" % (src, str(e)))
|
|
488
|
+
continue
|
|
489
|
+
if prev_hash and prev_hash == curr_hash:
|
|
490
|
+
skipped.append(rel)
|
|
491
|
+
continue
|
|
492
|
+
copied.append(rel)
|
|
493
|
+
to_copy.append((src, rel))
|
|
494
|
+
|
|
495
|
+
if prev_dir and not copied:
|
|
496
|
+
print("[update-pkg] no changes for %s; skipping release" % root)
|
|
497
|
+
continue
|
|
498
|
+
|
|
499
|
+
release_dir = os.path.join(root_dir, release_name)
|
|
500
|
+
if not os.path.exists(release_dir):
|
|
501
|
+
os.makedirs(release_dir)
|
|
502
|
+
|
|
503
|
+
for src, rel in to_copy:
|
|
504
|
+
dest = os.path.join(release_dir, rel)
|
|
505
|
+
dest_parent = os.path.dirname(dest)
|
|
506
|
+
if dest_parent and not os.path.exists(dest_parent):
|
|
507
|
+
os.makedirs(dest_parent)
|
|
508
|
+
shutil.copy2(src, dest)
|
|
509
|
+
|
|
510
|
+
ts = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
|
|
511
|
+
prev_label = os.path.basename(prev_dir) if prev_dir else "none"
|
|
512
|
+
readme_lines = [
|
|
513
|
+
"Release root: %s" % root,
|
|
514
|
+
"Release: %s" % release_name,
|
|
515
|
+
"Created at: %s" % ts,
|
|
516
|
+
"Base version: %s" % prev_label,
|
|
517
|
+
"Files included: %d (skipped unchanged: %d)" % (len(copied), len(skipped)),
|
|
518
|
+
"Tar example:",
|
|
519
|
+
" tar cvf %s.tar -C %s %s" % (release_name, root_dir, release_name),
|
|
520
|
+
"",
|
|
521
|
+
"Included files:",
|
|
522
|
+
]
|
|
523
|
+
readme_lines.extend([" - %s" % f for f in copied] or [" (none)"])
|
|
524
|
+
readme_lines.append("")
|
|
525
|
+
readme_lines.append("TODO: baseline change detection/notification for future revision.")
|
|
526
|
+
|
|
527
|
+
readme_path = os.path.join(release_dir, "README.txt")
|
|
528
|
+
with open(readme_path, "w") as f:
|
|
529
|
+
f.write("\n".join(readme_lines))
|
|
530
|
+
|
|
531
|
+
print("[update-pkg] prepared %s (files=%d skipped=%d)" % (release_dir, len(copied), len(skipped)))
|
|
532
|
+
bundles.append(
|
|
533
|
+
{
|
|
534
|
+
"root": root,
|
|
535
|
+
"release_dir": release_dir,
|
|
536
|
+
"release_name": release_name,
|
|
537
|
+
"copied": copied,
|
|
538
|
+
"skipped": skipped,
|
|
539
|
+
"prev_release": prev_dir,
|
|
540
|
+
}
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
return bundles
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
def update_pkg(cfg, pkg_id):
|
|
547
|
+
"""Collect git keyword hits and release checksums into a timestamped history."""
|
|
548
|
+
pkg_dir = _pkg_dir(cfg, pkg_id)
|
|
549
|
+
if not os.path.exists(pkg_dir):
|
|
550
|
+
raise RuntimeError("pkg dir not found: %s" % pkg_dir)
|
|
551
|
+
pkg_cfg_path = os.path.join(pkg_dir, "pkg.yaml")
|
|
552
|
+
pkg_cfg = config.load_pkg_config(pkg_cfg_path)
|
|
553
|
+
|
|
554
|
+
ts = time.strftime("%Y%m%dT%H%M%S", time.localtime())
|
|
555
|
+
updates_dir = os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id), "updates")
|
|
556
|
+
if not os.path.exists(updates_dir):
|
|
557
|
+
os.makedirs(updates_dir)
|
|
558
|
+
|
|
559
|
+
git_info, git_files = _collect_git_hits(pkg_cfg, pkg_dir)
|
|
560
|
+
release_files = _collect_release_files(pkg_dir, pkg_cfg)
|
|
561
|
+
|
|
562
|
+
release_bundle = _prepare_release(pkg_dir, pkg_cfg)
|
|
563
|
+
|
|
564
|
+
data = {
|
|
565
|
+
"pkg_id": str(pkg_id),
|
|
566
|
+
"run_at": ts,
|
|
567
|
+
"git": git_info,
|
|
568
|
+
"checksums": {
|
|
569
|
+
"git_files": _hash_paths(git_files),
|
|
570
|
+
"release_files": _hash_paths(release_files),
|
|
571
|
+
},
|
|
572
|
+
"release": release_bundle,
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
out_path = os.path.join(updates_dir, "update-%s.json" % ts)
|
|
576
|
+
with open(out_path, "w") as f:
|
|
577
|
+
json.dump(data, f, ensure_ascii=False, indent=2, sort_keys=True)
|
|
578
|
+
print("[update-pkg] wrote %s" % out_path)
|
|
579
|
+
return out_path
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
"""Shell integration helpers: print PATH/alias instructions per shell."""
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
from . import config
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def ensure_path_and_alias(script_dir, alias_name="pkg", command="pkgmgr"):
|
|
10
|
+
"""
|
|
11
|
+
Print PATH/alias instructions for the current shell.
|
|
12
|
+
script_dir: directory where the pkgmgr console script lives (e.g. venv/bin).
|
|
13
|
+
"""
|
|
14
|
+
if not script_dir:
|
|
15
|
+
print("[install] script_dir not provided; skip shell integration")
|
|
16
|
+
return
|
|
17
|
+
shell = os.environ.get("SHELL", "")
|
|
18
|
+
shell_name = os.path.basename(shell) if shell else ""
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
lines = _instructions_for_shell(shell_name, script_dir, alias_name, command)
|
|
22
|
+
except Exception as e:
|
|
23
|
+
print("[install] shell integration failed for %s: %s" % (shell_name, str(e)))
|
|
24
|
+
return
|
|
25
|
+
|
|
26
|
+
if not lines:
|
|
27
|
+
print("[install] unknown shell '%s'; skipping rc update" % (shell_name or ""))
|
|
28
|
+
return
|
|
29
|
+
|
|
30
|
+
header = "[install] To use pkgmgr, add these lines to your shell rc:"
|
|
31
|
+
for line in _emit_lines_with_header(header, lines):
|
|
32
|
+
print(line)
|
|
33
|
+
readme_path = _write_readme(_emit_lines_with_header(header, lines))
|
|
34
|
+
if readme_path:
|
|
35
|
+
print("[install] Reference saved to: %s" % readme_path)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _instructions_for_shell(shell_name, script_dir, alias_name, command):
|
|
39
|
+
if shell_name == "bash":
|
|
40
|
+
lines = [
|
|
41
|
+
'export PATH="%s:$PATH"' % script_dir,
|
|
42
|
+
'alias %s="%s"' % (alias_name, command),
|
|
43
|
+
]
|
|
44
|
+
return lines
|
|
45
|
+
if shell_name == "zsh":
|
|
46
|
+
lines = [
|
|
47
|
+
'export PATH="%s:$PATH"' % script_dir,
|
|
48
|
+
'alias %s="%s"' % (alias_name, command),
|
|
49
|
+
]
|
|
50
|
+
return lines
|
|
51
|
+
if shell_name in ("csh", "tcsh"):
|
|
52
|
+
lines = [
|
|
53
|
+
"set path = (%s $path)" % script_dir,
|
|
54
|
+
"alias %s %s" % (alias_name, command),
|
|
55
|
+
]
|
|
56
|
+
return lines
|
|
57
|
+
if shell_name == "fish":
|
|
58
|
+
lines = [
|
|
59
|
+
"set -U fish_user_paths %s $fish_user_paths" % script_dir,
|
|
60
|
+
"alias %s %s" % (alias_name, command),
|
|
61
|
+
]
|
|
62
|
+
return lines
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _emit_lines_with_header(header, lines):
|
|
67
|
+
out = [header]
|
|
68
|
+
out.extend(lines)
|
|
69
|
+
return out
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _write_readme(lines):
|
|
73
|
+
readme_path = os.path.join(config.BASE_DIR, "README.txt")
|
|
74
|
+
try:
|
|
75
|
+
base_dir = os.path.dirname(readme_path)
|
|
76
|
+
if base_dir and not os.path.exists(base_dir):
|
|
77
|
+
os.makedirs(base_dir)
|
|
78
|
+
with open(readme_path, "w") as f:
|
|
79
|
+
for line in lines:
|
|
80
|
+
f.write(line + "\n")
|
|
81
|
+
return readme_path
|
|
82
|
+
except Exception:
|
|
83
|
+
return None
|