pkgmgr-kunrunic 0.1.1.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pkgmgr/release.py ADDED
@@ -0,0 +1,1031 @@
1
+ from __future__ import print_function
2
+ """Release/package lifecycle scaffolding."""
3
+
4
+ import json
5
+ import os
6
+ import re
7
+ import shutil
8
+ import shlex
9
+ import sys
10
+ import time
11
+ import tarfile
12
+ import subprocess
13
+
14
+ from . import config, snapshot, shell_integration, points
15
+ from .collectors import checksums as checksums_module
16
+
17
+
18
+ def ensure_environment():
19
+ """Prepare environment: print shell PATH/alias instructions."""
20
+ script_dir = os.path.dirname(sys.executable)
21
+ shell_integration.ensure_path_and_alias(script_dir)
22
+
23
+
24
+ def _pkg_dir(cfg, pkg_id):
25
+ root = os.path.expanduser(cfg.get("pkg_release_root", ""))
26
+ if not root:
27
+ raise RuntimeError("pkg_release_root missing in config")
28
+ return os.path.join(root, str(pkg_id))
29
+
30
+
31
+ def _pkg_state_dir(pkg_id):
32
+ return os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id))
33
+
34
+
35
+ def _pkg_state_path(pkg_id):
36
+ return os.path.join(_pkg_state_dir(pkg_id), "state.json")
37
+
38
+
39
+ def _pkg_summary_path():
40
+ return os.path.join(config.DEFAULT_STATE_DIR, "pkg-summary.json")
41
+
42
+
43
+ def _pkg_release_history_dir(pkg_id):
44
+ return os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id), "release")
45
+
46
+
47
+ def _timestamp():
48
+ return time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime())
49
+
50
+
51
+ def _load_pkg_state(pkg_id):
52
+ path = _pkg_state_path(pkg_id)
53
+ if not os.path.exists(path):
54
+ return None
55
+ try:
56
+ with open(path, "r") as f:
57
+ return json.load(f)
58
+ except Exception:
59
+ return None
60
+
61
+
62
+ def _write_pkg_state(pkg_id, status, extra=None):
63
+ state_dir = _pkg_state_dir(pkg_id)
64
+ if not os.path.exists(state_dir):
65
+ os.makedirs(state_dir)
66
+ now = _timestamp()
67
+ existing = _load_pkg_state(pkg_id) or {}
68
+ state = {
69
+ "pkg_id": str(pkg_id),
70
+ "status": status,
71
+ "opened_at": existing.get("opened_at"),
72
+ "updated_at": now,
73
+ }
74
+ if status == "open":
75
+ state["opened_at"] = state["opened_at"] or now
76
+ state.pop("closed_at", None)
77
+ if status == "closed":
78
+ state["closed_at"] = now
79
+ if extra:
80
+ state.update(extra)
81
+ with open(_pkg_state_path(pkg_id), "w") as f:
82
+ json.dump(state, f, ensure_ascii=False, indent=2, sort_keys=True)
83
+ return state
84
+
85
+
86
+ def _parse_ts(value):
87
+ if not value:
88
+ return 0
89
+ for fmt in ("%Y-%m-%dT%H:%M:%S", "%Y%m%dT%H%M%S"):
90
+ try:
91
+ return int(time.mktime(time.strptime(str(value), fmt)))
92
+ except Exception:
93
+ continue
94
+ return 0
95
+
96
+
97
+ def _load_pkg_summary():
98
+ path = _pkg_summary_path()
99
+ if not os.path.exists(path):
100
+ return {"generated_at": _timestamp(), "pkgs": []}
101
+ try:
102
+ with open(path, "r") as f:
103
+ data = json.load(f)
104
+ if isinstance(data, dict) and isinstance(data.get("pkgs"), list):
105
+ return data
106
+ except Exception:
107
+ pass
108
+ return {"generated_at": _timestamp(), "pkgs": []}
109
+
110
+
111
+ def _find_latest_update(pkg_id):
112
+ updates_dir = os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id), "updates")
113
+ if not os.path.isdir(updates_dir):
114
+ return None, None
115
+ candidates = []
116
+ for name in os.listdir(updates_dir):
117
+ if not name.startswith("update-") or not name.endswith(".json"):
118
+ continue
119
+ ts = name[len("update-"):-len(".json")]
120
+ candidates.append((ts, name))
121
+ if not candidates:
122
+ return None, None
123
+ candidates.sort(key=lambda item: _parse_ts(item[0]))
124
+ latest_ts, latest_name = candidates[-1]
125
+ return os.path.join(updates_dir, latest_name), latest_ts
126
+
127
+
128
+ def _build_pkg_summary_entry(pkg_id):
129
+ state = _load_pkg_state(pkg_id) or {}
130
+ update_path, update_ts = _find_latest_update(pkg_id)
131
+ update_data = {}
132
+ if update_path:
133
+ try:
134
+ with open(update_path, "r") as f:
135
+ update_data = json.load(f) or {}
136
+ except Exception:
137
+ update_data = {}
138
+ git_info = update_data.get("git") or {}
139
+ release_info = update_data.get("release") or []
140
+ checksums = update_data.get("checksums") or {}
141
+ git_files = checksums.get("git_files") or {}
142
+ release_files = checksums.get("release_files") or {}
143
+
144
+ entry = {
145
+ "pkg_id": str(pkg_id),
146
+ "status": state.get("status") or "unknown",
147
+ "opened_at": state.get("opened_at"),
148
+ "updated_at": state.get("updated_at"),
149
+ "closed_at": state.get("closed_at"),
150
+ "last_update_id": os.path.basename(update_path) if update_path else None,
151
+ "last_update_at": update_ts,
152
+ "git": {
153
+ "keywords": git_info.get("keywords") or [],
154
+ "commit_count": len(git_info.get("commits") or []),
155
+ },
156
+ "release": {
157
+ "bundle_count": len(release_info),
158
+ "roots": sorted({b.get("root") for b in release_info if b.get("root")}),
159
+ "names": sorted({b.get("release_name") for b in release_info if b.get("release_name")}),
160
+ },
161
+ "artifacts": {
162
+ "git_files": len(git_files),
163
+ "release_files": len(release_files),
164
+ },
165
+ }
166
+ return entry
167
+
168
+
169
+ def _update_pkg_summary(pkg_id):
170
+ data = _load_pkg_summary()
171
+ pkgs = data.get("pkgs") or []
172
+ by_id = {p.get("pkg_id"): p for p in pkgs if isinstance(p, dict)}
173
+ entry = _build_pkg_summary_entry(pkg_id)
174
+ by_id[entry["pkg_id"]] = entry
175
+
176
+ def _sort_key(item):
177
+ status = item.get("status") or ""
178
+ updated_ts = max(_parse_ts(item.get("updated_at")), _parse_ts(item.get("last_update_at")))
179
+ return (0 if status == "open" else 1, -updated_ts)
180
+
181
+ ordered = sorted(by_id.values(), key=_sort_key)
182
+ data = {
183
+ "generated_at": _timestamp(),
184
+ "pkgs": ordered,
185
+ }
186
+ path = _pkg_summary_path()
187
+ if not os.path.exists(os.path.dirname(path)):
188
+ os.makedirs(os.path.dirname(path))
189
+ with open(path, "w") as f:
190
+ json.dump(data, f, ensure_ascii=False, indent=2, sort_keys=True)
191
+
192
+
193
+ def _write_release_history(pkg_id, run_at, bundles):
194
+ if not bundles:
195
+ return None
196
+ rel_dir = _pkg_release_history_dir(pkg_id)
197
+ if not os.path.exists(rel_dir):
198
+ os.makedirs(rel_dir)
199
+ payload = {
200
+ "pkg_id": str(pkg_id),
201
+ "run_at": run_at,
202
+ "generated_at": _timestamp(),
203
+ "bundles": bundles,
204
+ }
205
+ out_path = os.path.join(rel_dir, "release-%s.json" % run_at)
206
+ with open(out_path, "w") as f:
207
+ json.dump(payload, f, ensure_ascii=False, indent=2, sort_keys=True)
208
+ return out_path
209
+
210
+
211
+ def pkg_is_closed(pkg_id):
212
+ state = _load_pkg_state(pkg_id)
213
+ return bool(state and state.get("status") == "closed")
214
+
215
+
216
+ def pkg_state(pkg_id):
217
+ return _load_pkg_state(pkg_id)
218
+
219
+
220
+ def create_pkg(cfg, pkg_id):
221
+ """Create pkg directory and write pkg.yaml template."""
222
+ dest = _pkg_dir(cfg, pkg_id)
223
+ if not os.path.exists(dest):
224
+ os.makedirs(dest)
225
+ template_path = os.path.join(dest, "pkg.yaml")
226
+
227
+ def _should_write_template(path):
228
+ if not os.path.exists(path):
229
+ return True
230
+ prompt = "[create-pkg] pkg.yaml already exists at %s; overwrite? [y/N]: " % path
231
+ if not sys.stdin.isatty():
232
+ print(prompt + "non-tty -> keeping existing")
233
+ return False
234
+ ans = input(prompt).strip().lower()
235
+ return ans in ("y", "yes")
236
+
237
+ if not _should_write_template(template_path):
238
+ print("[create-pkg] kept existing pkg.yaml; no changes made")
239
+ return
240
+
241
+ git_cfg = cfg.get("git") or {}
242
+ collectors_enabled = (cfg.get("collectors") or {}).get("enabled") or ["checksums"]
243
+ config.write_pkg_template(
244
+ template_path,
245
+ pkg_id=pkg_id,
246
+ pkg_root=dest,
247
+ include_releases=[],
248
+ git_cfg=git_cfg,
249
+ collectors_enabled=collectors_enabled,
250
+ )
251
+ # initial snapshot placeholder (only if no baseline exists yet)
252
+ baseline_path = os.path.join(config.DEFAULT_STATE_DIR, "baseline.json")
253
+ if not os.path.exists(baseline_path):
254
+ snapshot.create_baseline(cfg)
255
+ else:
256
+ print("[create-pkg] baseline already exists; skipping baseline creation")
257
+ _write_pkg_state(pkg_id, "open")
258
+ _update_pkg_summary(pkg_id)
259
+ print("[create-pkg] prepared %s" % dest)
260
+
261
+
262
+ def close_pkg(cfg, pkg_id):
263
+ """Mark pkg closed (stub)."""
264
+ dest = _pkg_dir(cfg, pkg_id)
265
+ if not os.path.exists(dest):
266
+ print("[close-pkg] pkg dir not found, nothing to close: %s" % dest)
267
+ return
268
+ marker = os.path.join(dest, ".closed")
269
+ with open(marker, "w") as f:
270
+ f.write("closed\n")
271
+ _write_pkg_state(pkg_id, "closed")
272
+ _update_pkg_summary(pkg_id)
273
+ print("[close-pkg] marked closed: %s" % dest)
274
+
275
+
276
+ def collect_for_pkg(cfg, pkg_id, collectors=None):
277
+ """Run collector hooks (stub)."""
278
+ if pkg_id and pkg_is_closed(pkg_id):
279
+ print("[collect] pkg=%s is closed; skipping collectors" % pkg_id)
280
+ return
281
+ print(
282
+ "[collect] pkg=%s collectors=%s (stub; wire to collectors.checksums etc.)"
283
+ % (pkg_id, collectors or "default")
284
+ )
285
+
286
+
287
+
288
+ def run_actions(cfg, names, extra_args=None, config_path=None):
289
+ """Run configured actions by name. Returns result list."""
290
+ actions = cfg.get("actions", {}) or {}
291
+ if not names:
292
+ print("[actions] no action names provided")
293
+ return []
294
+ extra_args = extra_args or []
295
+ extra_suffix = ""
296
+ if extra_args:
297
+ quoted = [shlex.quote(str(arg)) for arg in extra_args]
298
+ extra_suffix = " " + " ".join(quoted)
299
+ results = []
300
+ for name in names:
301
+ entries = actions.get(name)
302
+ if not entries:
303
+ print("[actions] unknown action: %s" % name)
304
+ results.append({"name": name, "status": "missing", "rc": None})
305
+ continue
306
+ if isinstance(entries, dict):
307
+ entries = [entries]
308
+ if not isinstance(entries, (list, tuple)):
309
+ print("[actions] invalid action format for %s" % name)
310
+ results.append({"name": name, "status": "invalid", "rc": None})
311
+ continue
312
+ print("[actions] running %s (%d command(s))" % (name, len(entries)))
313
+ for idx, entry in enumerate(entries):
314
+ cmd, cwd, env = _parse_action_entry(entry)
315
+ if not cmd:
316
+ print("[actions] skip empty cmd for %s #%d" % (name, idx + 1))
317
+ continue
318
+ if config_path:
319
+ env = dict(env or {})
320
+ env.setdefault("PKGMGR_CONFIG", config_path)
321
+ if extra_suffix:
322
+ cmd = "%s%s" % (cmd, extra_suffix)
323
+ rc = _run_cmd(cmd, cwd=cwd, env=env, label="%s #%d" % (name, idx + 1))
324
+ results.append(
325
+ {
326
+ "name": name,
327
+ "status": "ok" if rc == 0 else "failed",
328
+ "rc": rc,
329
+ }
330
+ )
331
+ return results
332
+
333
+
334
+ def _parse_action_entry(entry):
335
+ if isinstance(entry, dict):
336
+ cmd = entry.get("cmd")
337
+ cwd = entry.get("cwd")
338
+ env = entry.get("env")
339
+ return cmd, cwd, env
340
+ return entry, None, None
341
+
342
+
343
+ def _run_cmd(cmd, cwd=None, env=None, label=None):
344
+ merged_env = os.environ.copy()
345
+ if env and isinstance(env, dict):
346
+ for k, v in env.items():
347
+ if v is None:
348
+ continue
349
+ merged_env[str(k)] = str(v)
350
+ try:
351
+ p = subprocess.Popen(cmd, shell=True, cwd=cwd, env=merged_env)
352
+ rc = p.wait()
353
+ prefix = "[actions]"
354
+ tag = " (%s)" % label if label else ""
355
+ if rc == 0:
356
+ print("%s command ok%s" % (prefix, tag))
357
+ else:
358
+ print("%s command failed%s (rc=%s)" % (prefix, tag, rc))
359
+ except Exception as e:
360
+ prefix = "[actions]"
361
+ tag = " (%s)" % label if label else ""
362
+ print("%s error%s: %s" % (prefix, tag, str(e)))
363
+ return 1
364
+ return rc
365
+
366
+
367
+ def create_point(cfg, pkg_id, label=None, actions_run=None, actions_result=None, snapshot_data=None):
368
+ """Create a checkpoint for a package (snapshot + meta)."""
369
+ return points.create_point(
370
+ cfg, pkg_id, label=label, actions_run=actions_run, actions_result=actions_result, snapshot_data=snapshot_data
371
+ )
372
+
373
+
374
+ def list_points(cfg, pkg_id):
375
+ """List checkpoints for a package."""
376
+ return points.list_points(pkg_id)
377
+
378
+
379
+ def _git_repo_root(pkg_root, git_cfg):
380
+ # Prefer explicit repo_root from pkg config; if relative, resolve from pkg_root.
381
+ repo_root = (git_cfg or {}).get("repo_root")
382
+ if repo_root:
383
+ repo_root = os.path.expanduser(repo_root)
384
+ if not os.path.isabs(repo_root):
385
+ repo_root = os.path.abspath(os.path.join(pkg_root, repo_root))
386
+ else:
387
+ repo_root = os.path.abspath(repo_root)
388
+ if os.path.isdir(repo_root):
389
+ return repo_root
390
+ print("[git] repo_root %s not found; falling back to git rev-parse" % repo_root)
391
+ try:
392
+ out = subprocess.check_output(
393
+ ["git", "rev-parse", "--show-toplevel"], stderr=subprocess.STDOUT, universal_newlines=True
394
+ )
395
+ return out.strip()
396
+ except Exception:
397
+ print("[git] not a git repo or git unavailable; skipping git collection")
398
+ return None
399
+
400
+
401
+ def _text_type():
402
+ try:
403
+ return unicode # type: ignore[name-defined]
404
+ except Exception:
405
+ return str
406
+
407
+
408
+ def _decode_git_output(raw, encodings):
409
+ if raw is None:
410
+ return ""
411
+ text_type = _text_type()
412
+ if isinstance(raw, text_type):
413
+ return raw
414
+ if not encodings:
415
+ encodings = []
416
+ candidates = [e for e in encodings if e]
417
+ candidates.extend(["utf-8", "euc-kr", "cp949"])
418
+ best_text = None
419
+ best_score = None
420
+ for enc in candidates:
421
+ try:
422
+ text = raw.decode(enc, errors="replace")
423
+ except Exception:
424
+ continue
425
+ score = text.count(u"\ufffd")
426
+ if best_score is None or score < best_score:
427
+ best_score = score
428
+ best_text = text
429
+ if score == 0:
430
+ break
431
+ if best_text is not None:
432
+ return best_text
433
+ try:
434
+ return raw.decode("utf-8", errors="replace")
435
+ except Exception:
436
+ return str(raw)
437
+
438
+
439
+ def _git_output_encoding(repo_root):
440
+ for key in ("i18n.logOutputEncoding", "i18n.commitEncoding"):
441
+ try:
442
+ out = subprocess.check_output(
443
+ ["git", "config", "--get", key],
444
+ cwd=repo_root,
445
+ stderr=subprocess.STDOUT,
446
+ universal_newlines=True,
447
+ ).strip()
448
+ except Exception:
449
+ out = ""
450
+ if out:
451
+ return out
452
+ return "utf-8"
453
+
454
+
455
+ def _collect_git_hits(pkg_cfg, pkg_root):
456
+ git_cfg = pkg_cfg.get("git") or {}
457
+ keywords = [str(k) for k in (git_cfg.get("keywords") or []) if str(k).strip()]
458
+ result = {"keywords": keywords, "commits": []}
459
+ files = set()
460
+ if not keywords:
461
+ return result, files
462
+
463
+ repo_root = _git_repo_root(pkg_root, git_cfg)
464
+ if not repo_root:
465
+ return result, files
466
+
467
+ since = git_cfg.get("since")
468
+ until = git_cfg.get("until")
469
+ commits = {}
470
+ current = None
471
+
472
+ output_encoding = _git_output_encoding(repo_root)
473
+ for kw in keywords:
474
+ cmd = [
475
+ "git",
476
+ "--no-pager",
477
+ "log",
478
+ "--name-only",
479
+ "--pretty=format:%H\t%s",
480
+ "--grep=%s" % kw,
481
+ "--regexp-ignore-case",
482
+ "--all",
483
+ "--",
484
+ ]
485
+ if since:
486
+ cmd.append("--since=%s" % since)
487
+ if until:
488
+ cmd.append("--until=%s" % until)
489
+ try:
490
+ out_raw = subprocess.check_output(
491
+ cmd,
492
+ cwd=repo_root,
493
+ stderr=subprocess.STDOUT,
494
+ universal_newlines=False,
495
+ )
496
+ out = _decode_git_output(out_raw, [output_encoding])
497
+ except Exception as e:
498
+ print("[git] log failed for keyword %s: %s" % (kw, str(e)))
499
+ continue
500
+
501
+ for line in out.splitlines():
502
+ line = line.strip()
503
+ if not line:
504
+ continue
505
+ if "\t" in line:
506
+ parts = line.split("\t", 1)
507
+ commit_hash, subject = parts[0], parts[1]
508
+ current = commits.setdefault(
509
+ commit_hash, {"hash": commit_hash, "subject": subject, "keywords": set(), "files": set()}
510
+ )
511
+ current["keywords"].add(kw)
512
+ continue
513
+ if current:
514
+ current["files"].add(line)
515
+ files.add(os.path.join(repo_root, line))
516
+
517
+ for c in commits.values():
518
+ c["files"] = sorted(c["files"])
519
+ c["keywords"] = sorted(c["keywords"])
520
+ # Provide stable, user-facing aliases.
521
+ c["commit"] = c.get("hash")
522
+ # fetch author and full commit message body for richer context
523
+ try:
524
+ info_raw = subprocess.check_output(
525
+ ["git", "show", "-s", "--format=%an\t%ae\t%ad%n%s%n%b", c["hash"]],
526
+ cwd=repo_root,
527
+ stderr=subprocess.STDOUT,
528
+ universal_newlines=False,
529
+ )
530
+ info = _decode_git_output(info_raw, [output_encoding])
531
+ header, _, body = info.partition("\n")
532
+ parts = header.split("\t")
533
+ c["author_name"] = parts[0] if len(parts) > 0 else ""
534
+ c["author_email"] = parts[1] if len(parts) > 1 else ""
535
+ c["authored_at"] = parts[2] if len(parts) > 2 else ""
536
+ c["message"] = body.rstrip("\n")
537
+ except Exception as e:
538
+ print("[git] show failed for %s: %s" % (c["hash"], str(e)))
539
+ c["message"] = c.get("subject", "")
540
+ if c.get("author_name") or c.get("author_email"):
541
+ if c.get("author_email"):
542
+ c["author"] = "%s <%s>" % (c.get("author_name", ""), c.get("author_email", ""))
543
+ else:
544
+ c["author"] = c.get("author_name", "")
545
+ c["date"] = c.get("authored_at", "")
546
+ result["commits"].append(c)
547
+ result["commits"] = sorted(result["commits"], key=lambda c: c["hash"])
548
+ return result, files
549
+
550
+
551
+ def _collect_release_files(pkg_root, pkg_cfg):
552
+ include_cfg = pkg_cfg.get("include") or {}
553
+ releases = include_cfg.get("releases") or []
554
+ files = []
555
+ for rel in releases:
556
+ target = os.path.abspath(os.path.join(pkg_root, str(rel)))
557
+ if not os.path.exists(target):
558
+ print("[update-pkg] skip missing release dir: %s" % target)
559
+ continue
560
+ for base, _, names in os.walk(target):
561
+ for name in names:
562
+ files.append(os.path.join(base, name))
563
+ return files
564
+
565
+
566
+ def _hash_paths(paths):
567
+ checksums = {}
568
+ for path in sorted(set(paths)):
569
+ if not os.path.exists(path) or not os.path.isfile(path):
570
+ continue
571
+ try:
572
+ checksums[path] = checksums_module.sha256_of_file(path)
573
+ except Exception as e:
574
+ print("[update-pkg] failed to hash %s: %s" % (path, str(e)))
575
+ return checksums
576
+
577
+
578
+ _REL_VER_RE = re.compile(r"release\.v(\d+)\.(\d+)\.(\d+)$")
579
+ _PKG_NOTE_NAME = "PKG_NOTE"
580
+ _PKG_LIST_NAME = "PKG_LIST"
581
+
582
+
583
+ def _list_release_versions(base_dir, include_history=False):
584
+ """Return list of (major, minor, patch, path) under base_dir (optional HISTORY)."""
585
+ versions = []
586
+ if not os.path.isdir(base_dir):
587
+ return versions
588
+ scan_dirs = [base_dir]
589
+ if include_history:
590
+ scan_dirs.append(os.path.join(base_dir, "HISTORY"))
591
+ for scan_dir in scan_dirs:
592
+ if not os.path.isdir(scan_dir):
593
+ continue
594
+ for name in os.listdir(scan_dir):
595
+ m = _REL_VER_RE.match(name)
596
+ if not m:
597
+ continue
598
+ ver = tuple(int(x) for x in m.groups())
599
+ versions.append((ver, os.path.join(scan_dir, name)))
600
+ versions.sort()
601
+ return versions
602
+
603
+
604
+ def _next_release_version(base_dir):
605
+ versions = _list_release_versions(base_dir, include_history=True)
606
+ if not versions:
607
+ return (0, 0, 1), None
608
+ latest_ver, latest_path = versions[-1]
609
+ next_ver = (latest_ver[0], latest_ver[1], latest_ver[2] + 1)
610
+ return next_ver, latest_path
611
+
612
+
613
+ def _format_version(ver_tuple):
614
+ return "release.v%d.%d.%d" % ver_tuple
615
+
616
+
617
+ def _relpath_from_pkg(pkg_dir, path):
618
+ try:
619
+ rel = os.path.relpath(path, pkg_dir)
620
+ if rel.startswith(".."):
621
+ return os.path.basename(path)
622
+ return rel
623
+ except Exception:
624
+ return os.path.basename(path)
625
+
626
+
627
+ def _collect_release_sources(pkg_dir, pkg_cfg):
628
+ include_cfg = pkg_cfg.get("include") or {}
629
+ releases = include_cfg.get("releases") or []
630
+ files = []
631
+ for rel in releases:
632
+ rel_str = str(rel)
633
+ target = rel_str
634
+ if not os.path.isabs(target):
635
+ target = os.path.join(pkg_dir, rel_str)
636
+ target = os.path.abspath(os.path.expanduser(target))
637
+ if not os.path.exists(target):
638
+ print("[update-pkg] skip missing release source: %s" % target)
639
+ continue
640
+ if os.path.isfile(target):
641
+ files.append((target, _relpath_from_pkg(pkg_dir, target)))
642
+ continue
643
+ for base, _, names in os.walk(target):
644
+ for name in names:
645
+ abspath = os.path.join(base, name)
646
+ relpath = _relpath_from_pkg(pkg_dir, abspath)
647
+ files.append((abspath, relpath))
648
+ return files
649
+
650
+
651
+ def _load_prev_hashes(prev_release_dir):
652
+ hashes = {}
653
+ for base, _, names in os.walk(prev_release_dir):
654
+ for name in names:
655
+ abspath = os.path.join(base, name)
656
+ if not os.path.isfile(abspath):
657
+ continue
658
+ rel = os.path.relpath(abspath, prev_release_dir)
659
+ try:
660
+ hashes[rel] = checksums_module.sha256_of_file(abspath)
661
+ except Exception:
662
+ continue
663
+ return hashes
664
+
665
+
666
+ def _prepare_release(pkg_dir, pkg_cfg):
667
+ """
668
+ Build release bundles grouped by top-level include root.
669
+ Layout: <pkg_dir>/release/<root>/release.vX.Y.Z/<files-under-root>
670
+ Returns list of bundle metadata per root.
671
+ """
672
+ release_root = os.path.join(pkg_dir, "release")
673
+ bundles = []
674
+ source_files = _collect_release_sources(pkg_dir, pkg_cfg)
675
+
676
+ # group files by top-level root name
677
+ grouped = {}
678
+ for src, rel in source_files:
679
+ parts = rel.split("/", 1)
680
+ if len(parts) == 2:
681
+ root, subrel = parts[0], parts[1]
682
+ else:
683
+ root, subrel = "root", rel
684
+ grouped.setdefault(root, []).append((src, subrel))
685
+
686
+ for root, entries in grouped.items():
687
+ root_dir = os.path.join(release_root, root)
688
+ active_versions = _list_release_versions(root_dir, include_history=False)
689
+ history_dir = os.path.join(root_dir, "HISTORY")
690
+ history_versions = _list_release_versions(history_dir, include_history=False)
691
+ baseline_dir = os.path.join(history_dir, "BASELINE")
692
+ reuse_active = False
693
+
694
+ if active_versions:
695
+ latest_ver, latest_path = active_versions[-1]
696
+ release_name = _format_version(latest_ver)
697
+ release_dir = latest_path
698
+ prev_dir = latest_path
699
+ reuse_active = True
700
+ base_label = os.path.basename(history_versions[-1][1]) if history_versions else "none"
701
+ else:
702
+ next_ver, prev_dir = _next_release_version(root_dir)
703
+ release_name = _format_version(next_ver)
704
+ release_dir = os.path.join(root_dir, release_name)
705
+ base_label = os.path.basename(prev_dir) if prev_dir else "none"
706
+
707
+ has_baseline = os.path.isdir(baseline_dir)
708
+ baseline_hashes = _load_prev_hashes(baseline_dir) if has_baseline else {}
709
+ release_hashes = _load_prev_hashes(release_dir) if reuse_active and os.path.isdir(release_dir) else {}
710
+ copied = []
711
+ added = []
712
+ updated = []
713
+ skipped = []
714
+ to_copy = []
715
+ expected = set(rel for _, rel in entries)
716
+ existing = set()
717
+ if reuse_active and os.path.isdir(release_dir):
718
+ for base, _, names in os.walk(release_dir):
719
+ for name in names:
720
+ if name in (_PKG_NOTE_NAME, _PKG_LIST_NAME):
721
+ continue
722
+ abspath = os.path.join(base, name)
723
+ if not os.path.isfile(abspath):
724
+ continue
725
+ rel = os.path.relpath(abspath, release_dir)
726
+ existing.add(rel)
727
+ curr_hashes = {}
728
+ removed = set()
729
+
730
+ for src, rel in entries:
731
+ baseline_hash = baseline_hashes.get(rel)
732
+ try:
733
+ curr_hash = checksums_module.sha256_of_file(src)
734
+ curr_hashes[rel] = curr_hash
735
+ except Exception as e:
736
+ print("[update-pkg] failed to hash %s: %s" % (src, str(e)))
737
+ continue
738
+ if baseline_hash and baseline_hash == curr_hash:
739
+ skipped.append(rel)
740
+ continue
741
+ release_hash = release_hashes.get(rel)
742
+ if release_hash and release_hash == curr_hash:
743
+ continue
744
+ copied.append(rel)
745
+ if release_hash:
746
+ updated.append(rel)
747
+ else:
748
+ added.append(rel)
749
+ to_copy.append((src, rel))
750
+
751
+ if reuse_active:
752
+ for rel in existing:
753
+ if rel not in expected:
754
+ removed.add(rel)
755
+ continue
756
+ baseline_hash = baseline_hashes.get(rel)
757
+ curr_hash = curr_hashes.get(rel)
758
+ if baseline_hash and curr_hash and baseline_hash == curr_hash:
759
+ removed.add(rel)
760
+ elif not has_baseline:
761
+ removed = existing - expected
762
+
763
+ if (has_baseline or reuse_active) and not copied and not removed:
764
+ print("[update-pkg] no changes for %s; skipping release" % root)
765
+ continue
766
+
767
+ note_payload = None
768
+ if reuse_active and os.path.exists(release_dir):
769
+ existing_note = os.path.join(release_dir, _PKG_NOTE_NAME)
770
+ if os.path.exists(existing_note):
771
+ try:
772
+ with open(existing_note, "r") as f:
773
+ note_payload = f.read()
774
+ except Exception:
775
+ note_payload = None
776
+ if not os.path.exists(release_dir):
777
+ os.makedirs(release_dir)
778
+
779
+ for src, rel in to_copy:
780
+ dest = os.path.join(release_dir, rel)
781
+ dest_parent = os.path.dirname(dest)
782
+ if dest_parent and not os.path.exists(dest_parent):
783
+ os.makedirs(dest_parent)
784
+ shutil.copy2(src, dest)
785
+ for rel in sorted(removed):
786
+ abspath = os.path.join(release_dir, rel)
787
+ if os.path.isfile(abspath):
788
+ os.remove(abspath)
789
+ for base, dirs, files in os.walk(release_dir, topdown=False):
790
+ if files:
791
+ continue
792
+ if not dirs and base != release_dir:
793
+ os.rmdir(base)
794
+
795
+ ts = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
796
+ note_path = os.path.join(release_dir, _PKG_NOTE_NAME)
797
+ if note_payload is not None:
798
+ with open(note_path, "w") as f:
799
+ f.write(note_payload)
800
+ elif not os.path.exists(note_path):
801
+ with open(note_path, "w") as f:
802
+ f.write(
803
+ "\n".join(
804
+ [
805
+ "Release root: %s" % root,
806
+ "Release: %s" % release_name,
807
+ "Created at: %s" % ts,
808
+ "",
809
+ "[ package note ]",
810
+ "",
811
+ "상세 PKG 항목은 PKG_LIST를 참조하세요.",
812
+ "",
813
+ ]
814
+ )
815
+ )
816
+
817
+ all_files = []
818
+ for base, _, names in os.walk(release_dir):
819
+ for name in names:
820
+ if name in (_PKG_NOTE_NAME, _PKG_LIST_NAME):
821
+ continue
822
+ abspath = os.path.join(base, name)
823
+ if not os.path.isfile(abspath):
824
+ continue
825
+ rel = os.path.relpath(abspath, release_dir)
826
+ all_files.append(rel)
827
+ all_files.sort()
828
+
829
+ change_parts = []
830
+ if added:
831
+ change_parts.append("+%d" % len(added))
832
+ if updated:
833
+ change_parts.append("~%d" % len(updated))
834
+ if removed:
835
+ change_parts.append("-%d" % len(removed))
836
+ change_label = " ".join(change_parts) or "no changes"
837
+
838
+ pkg_list_lines = [
839
+ "Release root: %s" % root,
840
+ "Release: %s" % release_name,
841
+ "Created at: %s" % ts,
842
+ "Base version: %s" % base_label,
843
+ "Files changed: %s (skipped unchanged: %d)" % (change_label, len(skipped)),
844
+ "",
845
+ "Included files:",
846
+ ]
847
+ pkg_list_lines.extend([" - %s" % f for f in all_files] or [" (none)"])
848
+ pkg_list_lines.append("")
849
+ pkg_list_lines.append("Note: 상세 PKG 정보는 PKG_NOTE를 확인하세요.")
850
+
851
+ pkg_list_path = os.path.join(release_dir, _PKG_LIST_NAME)
852
+ with open(pkg_list_path, "w") as f:
853
+ f.write("\n".join(pkg_list_lines))
854
+
855
+ print(
856
+ "[update-pkg] prepared %s (%s skipped=%d)"
857
+ % (release_dir, change_label, len(skipped))
858
+ )
859
+ bundles.append(
860
+ {
861
+ "root": root,
862
+ "release_dir": release_dir,
863
+ "release_name": release_name,
864
+ "created_at": ts,
865
+ "files": all_files,
866
+ "copied": copied,
867
+ "skipped": skipped,
868
+ "added": added,
869
+ "updated": updated,
870
+ "removed": sorted(removed),
871
+ "prev_release": prev_dir,
872
+ }
873
+ )
874
+
875
+ return bundles
876
+
877
+
878
+ def _sync_baseline_root(root_dir, entries):
879
+ baseline_dir = os.path.join(root_dir, "HISTORY", "BASELINE")
880
+ if not os.path.exists(baseline_dir):
881
+ os.makedirs(baseline_dir)
882
+ expected = set(rel for _, rel in entries)
883
+
884
+ for src, rel in entries:
885
+ dest = os.path.join(baseline_dir, rel)
886
+ dest_parent = os.path.dirname(dest)
887
+ if dest_parent and not os.path.exists(dest_parent):
888
+ os.makedirs(dest_parent)
889
+ shutil.copy2(src, dest)
890
+
891
+ for base, _, names in os.walk(baseline_dir):
892
+ for name in names:
893
+ abspath = os.path.join(base, name)
894
+ if not os.path.isfile(abspath):
895
+ continue
896
+ rel = os.path.relpath(abspath, baseline_dir)
897
+ if rel not in expected:
898
+ os.remove(abspath)
899
+ for base, dirs, files in os.walk(baseline_dir, topdown=False):
900
+ if files:
901
+ continue
902
+ if not dirs and base != baseline_dir:
903
+ os.rmdir(base)
904
+
905
+
906
+ def _finalize_release_root(root_dir):
907
+ versions = _list_release_versions(root_dir, include_history=False)
908
+ if not versions:
909
+ print("[update-pkg] no active release dir under %s" % root_dir)
910
+ return None
911
+ latest_ver, latest_path = versions[-1]
912
+ release_name = _format_version(latest_ver)
913
+ tar_path = os.path.join(root_dir, "%s.tar" % release_name)
914
+
915
+ with tarfile.open(tar_path, "w") as tar:
916
+ tar.add(latest_path, arcname=release_name)
917
+
918
+ history_dir = os.path.join(root_dir, "HISTORY")
919
+ if not os.path.exists(history_dir):
920
+ os.makedirs(history_dir)
921
+ history_target = os.path.join(history_dir, release_name)
922
+ if os.path.exists(history_target):
923
+ print("[update-pkg] history already contains %s; skipping move" % history_target)
924
+ else:
925
+ shutil.move(latest_path, history_target)
926
+ print("[update-pkg] finalized %s (tar=%s)" % (history_target, tar_path))
927
+ return tar_path
928
+
929
+
930
+ def finalize_pkg_release(cfg, pkg_id):
931
+ """Finalize latest release bundle by moving to HISTORY and creating tar."""
932
+ pkg_dir = _pkg_dir(cfg, pkg_id)
933
+ if not os.path.exists(pkg_dir):
934
+ raise RuntimeError("pkg dir not found: %s" % pkg_dir)
935
+ pkg_cfg_path = os.path.join(pkg_dir, "pkg.yaml")
936
+ pkg_cfg = config.load_pkg_config(pkg_cfg_path)
937
+ release_root = os.path.join(pkg_dir, "release")
938
+ active_release_found = False
939
+ if os.path.isdir(release_root):
940
+ for name in os.listdir(release_root):
941
+ root_dir = os.path.join(release_root, name)
942
+ if not os.path.isdir(root_dir) or name == "HISTORY":
943
+ continue
944
+ if _list_release_versions(root_dir, include_history=False):
945
+ active_release_found = True
946
+ break
947
+ if not active_release_found:
948
+ print("[update-pkg] no active release; run `pkgmgr update-pkg %s` first" % pkg_id)
949
+ return []
950
+ source_files = _collect_release_sources(pkg_dir, pkg_cfg)
951
+ grouped = {}
952
+ for src, rel in source_files:
953
+ parts = rel.split("/", 1)
954
+ if len(parts) == 2:
955
+ root, subrel = parts[0], parts[1]
956
+ else:
957
+ root, subrel = "root", rel
958
+ grouped.setdefault(root, []).append((src, subrel))
959
+ finalized = []
960
+
961
+ if not os.path.isdir(release_root):
962
+ print("[update-pkg] release root missing: %s" % release_root)
963
+ return finalized
964
+
965
+ for name in sorted(os.listdir(release_root)):
966
+ root_dir = os.path.join(release_root, name)
967
+ if not os.path.isdir(root_dir):
968
+ continue
969
+ if name == "HISTORY":
970
+ continue
971
+ tar_path = _finalize_release_root(root_dir)
972
+ if tar_path:
973
+ _sync_baseline_root(root_dir, grouped.get(name, []))
974
+ finalized.append(tar_path)
975
+
976
+ baseline_synced = False
977
+ if not finalized:
978
+ for name in sorted(os.listdir(release_root)):
979
+ root_dir = os.path.join(release_root, name)
980
+ if not os.path.isdir(root_dir):
981
+ continue
982
+ if name == "HISTORY":
983
+ continue
984
+ baseline_dir = os.path.join(root_dir, "HISTORY", "BASELINE")
985
+ if os.path.isdir(baseline_dir):
986
+ continue
987
+ _sync_baseline_root(root_dir, grouped.get(name, []))
988
+ baseline_synced = True
989
+ print("[update-pkg] baseline synced for %s" % root_dir)
990
+
991
+ if not finalized and not baseline_synced:
992
+ print("[update-pkg] no release bundles finalized")
993
+ return finalized
994
+
995
+
996
+ def update_pkg(cfg, pkg_id):
997
+ """Collect git keyword hits and release checksums into a timestamped history."""
998
+ pkg_dir = _pkg_dir(cfg, pkg_id)
999
+ if not os.path.exists(pkg_dir):
1000
+ raise RuntimeError("pkg dir not found: %s" % pkg_dir)
1001
+ pkg_cfg_path = os.path.join(pkg_dir, "pkg.yaml")
1002
+ pkg_cfg = config.load_pkg_config(pkg_cfg_path)
1003
+
1004
+ ts = time.strftime("%Y%m%dT%H%M%S", time.localtime())
1005
+ updates_dir = os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id), "updates")
1006
+ if not os.path.exists(updates_dir):
1007
+ os.makedirs(updates_dir)
1008
+
1009
+ git_info, git_files = _collect_git_hits(pkg_cfg, pkg_dir)
1010
+ release_files = _collect_release_files(pkg_dir, pkg_cfg)
1011
+
1012
+ release_bundle = _prepare_release(pkg_dir, pkg_cfg)
1013
+
1014
+ data = {
1015
+ "pkg_id": str(pkg_id),
1016
+ "run_at": ts,
1017
+ "git": git_info,
1018
+ "checksums": {
1019
+ "git_files": _hash_paths(git_files),
1020
+ "release_files": _hash_paths(release_files),
1021
+ },
1022
+ "release": release_bundle,
1023
+ }
1024
+
1025
+ out_path = os.path.join(updates_dir, "update-%s.json" % ts)
1026
+ with open(out_path, "w") as f:
1027
+ json.dump(data, f, ensure_ascii=False, indent=2, sort_keys=True)
1028
+ print("[update-pkg] wrote %s" % out_path)
1029
+ _write_release_history(pkg_id, ts, release_bundle)
1030
+ _update_pkg_summary(pkg_id)
1031
+ return out_path