pkgmgr-kunrunic 0.1.1.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pkgmgr/__init__.py +16 -0
- pkgmgr/__main__.py +5 -0
- pkgmgr/cli.py +320 -0
- pkgmgr/collectors/__init__.py +5 -0
- pkgmgr/collectors/base.py +15 -0
- pkgmgr/collectors/checksums.py +35 -0
- pkgmgr/config.py +408 -0
- pkgmgr/points.py +98 -0
- pkgmgr/release.py +1031 -0
- pkgmgr/shell_integration.py +120 -0
- pkgmgr/snapshot.py +306 -0
- pkgmgr/templates/pkg.yaml.sample +16 -0
- pkgmgr/templates/pkgmgr.yaml.sample +51 -0
- pkgmgr/watch.py +79 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/METADATA +159 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/RECORD +24 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/WHEEL +5 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/entry_points.txt +2 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/licenses/LICENSE +21 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/top_level.txt +3 -0
- plugin/export_cksum.py +354 -0
- plugin/export_pkgstore.py +117 -0
- plugin/export_source_review.py +499 -0
- tools/echo_args.py +15 -0
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import sys
|
|
7
|
+
import socket
|
|
8
|
+
import subprocess
|
|
9
|
+
import tempfile
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _default_src():
|
|
13
|
+
home = os.path.expanduser("~")
|
|
14
|
+
return os.path.join(home, "pkgmgr", "local", "state")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _copy_tree(src, dest):
|
|
18
|
+
for base, dirs, files in os.walk(src):
|
|
19
|
+
rel = os.path.relpath(base, src)
|
|
20
|
+
dest_dir = dest if rel == "." else os.path.join(dest, rel)
|
|
21
|
+
if not os.path.exists(dest_dir):
|
|
22
|
+
os.makedirs(dest_dir)
|
|
23
|
+
for name in files:
|
|
24
|
+
s = os.path.join(base, name)
|
|
25
|
+
d = os.path.join(dest_dir, name)
|
|
26
|
+
shutil.copy2(s, d)
|
|
27
|
+
for name in dirs:
|
|
28
|
+
d = os.path.join(dest_dir, name)
|
|
29
|
+
if not os.path.exists(d):
|
|
30
|
+
os.makedirs(d)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _default_release_root():
|
|
34
|
+
home = os.path.expanduser("~")
|
|
35
|
+
return os.path.join(home, "PKG", "RELEASE")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _copy_export_dirs(release_root, dest_state_root):
|
|
39
|
+
if not os.path.isdir(release_root):
|
|
40
|
+
return
|
|
41
|
+
pkg_root = os.path.join(dest_state_root, "pkg")
|
|
42
|
+
if not os.path.exists(pkg_root):
|
|
43
|
+
os.makedirs(pkg_root)
|
|
44
|
+
for name in os.listdir(release_root):
|
|
45
|
+
pkg_dir = os.path.join(release_root, name)
|
|
46
|
+
if not os.path.isdir(pkg_dir):
|
|
47
|
+
continue
|
|
48
|
+
export_dir = os.path.join(pkg_dir, "export")
|
|
49
|
+
if not os.path.isdir(export_dir):
|
|
50
|
+
continue
|
|
51
|
+
dest_export = os.path.join(pkg_root, name, "export")
|
|
52
|
+
_copy_tree(export_dir, dest_export)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def export_pkgstore(src, dest, clean=False, release_root=None):
|
|
56
|
+
if not os.path.isdir(src):
|
|
57
|
+
raise RuntimeError("source not found: %s" % src)
|
|
58
|
+
if clean and os.path.exists(dest):
|
|
59
|
+
shutil.rmtree(dest)
|
|
60
|
+
if not os.path.exists(dest):
|
|
61
|
+
os.makedirs(dest)
|
|
62
|
+
_copy_tree(src, dest)
|
|
63
|
+
if release_root:
|
|
64
|
+
_copy_export_dirs(release_root, dest)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def main(argv=None):
|
|
68
|
+
parser = argparse.ArgumentParser(description="Export pkgmgr state into a pkgstore directory.")
|
|
69
|
+
parser.add_argument("--src", default=_default_src(), help="pkgmgr state root (default: ~/pkgmgr/local/state)")
|
|
70
|
+
parser.add_argument("--dest", help="pkgstore destination root (will create /state)")
|
|
71
|
+
parser.add_argument("--clean", action="store_true", help="clean destination state before export")
|
|
72
|
+
parser.add_argument("--release-root", default=_default_release_root(), help="PKG/RELEASE root (default: ~/PKG/RELEASE)")
|
|
73
|
+
parser.add_argument("--system", help="system identifier (writes to pkgstore/state/systems/<system>)")
|
|
74
|
+
parser.add_argument("--push", help="rsync target like user@host (pushes to remote)")
|
|
75
|
+
parser.add_argument("--remote-dest", default="~/data/pkgstore", help="remote pkgstore root (default: ~/data/pkgstore)")
|
|
76
|
+
parser.add_argument("--identity", help="ssh private key path for rsync (optional)")
|
|
77
|
+
args = parser.parse_args(argv)
|
|
78
|
+
if not args.dest and not args.push:
|
|
79
|
+
parser.error("--dest is required when --push is not set")
|
|
80
|
+
|
|
81
|
+
src = os.path.abspath(os.path.expanduser(args.src))
|
|
82
|
+
tmp_root = None
|
|
83
|
+
if args.dest:
|
|
84
|
+
dest_root = os.path.abspath(os.path.expanduser(args.dest))
|
|
85
|
+
else:
|
|
86
|
+
tmp_root = tempfile.mkdtemp(prefix="pkgstore_")
|
|
87
|
+
dest_root = tmp_root
|
|
88
|
+
system_name = args.system or socket.gethostname()
|
|
89
|
+
if system_name:
|
|
90
|
+
dest_state = os.path.join(dest_root, "state", "systems", system_name)
|
|
91
|
+
else:
|
|
92
|
+
dest_state = os.path.join(dest_root, "state")
|
|
93
|
+
release_root = os.path.abspath(os.path.expanduser(args.release_root)) if args.release_root else None
|
|
94
|
+
|
|
95
|
+
export_pkgstore(src, dest_state, clean=args.clean, release_root=release_root)
|
|
96
|
+
print("[export_pkgstore] synced %s -> %s" % (src, dest_state))
|
|
97
|
+
|
|
98
|
+
if args.push:
|
|
99
|
+
remote_root = args.remote_dest.rstrip("/")
|
|
100
|
+
if system_name:
|
|
101
|
+
remote_state = "%s/state/systems/%s" % (remote_root, system_name)
|
|
102
|
+
else:
|
|
103
|
+
remote_state = "%s/state" % remote_root
|
|
104
|
+
src_dir = dest_state.rstrip("/") + "/"
|
|
105
|
+
rsync_cmd = ["rsync", "-avzc", "--delete"]
|
|
106
|
+
if args.identity:
|
|
107
|
+
rsync_cmd.extend(["-e", "ssh -i %s" % args.identity])
|
|
108
|
+
rsync_cmd.extend([src_dir, "%s:%s" % (args.push, remote_state)])
|
|
109
|
+
print("[export_pkgstore] rsync -> %s" % (remote_state,))
|
|
110
|
+
subprocess.check_call(rsync_cmd)
|
|
111
|
+
if tmp_root:
|
|
112
|
+
shutil.rmtree(tmp_root)
|
|
113
|
+
return 0
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
if __name__ == "__main__":
|
|
117
|
+
sys.exit(main())
|
|
@@ -0,0 +1,499 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import argparse
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
import fnmatch
|
|
10
|
+
|
|
11
|
+
from pkgmgr import config
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from docx import Document
|
|
15
|
+
from docx.shared import Pt, RGBColor
|
|
16
|
+
except Exception:
|
|
17
|
+
Document = None
|
|
18
|
+
Pt = None
|
|
19
|
+
RGBColor = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _load_pkg_yaml(pkg_dir, pkg_yaml):
|
|
23
|
+
if pkg_yaml:
|
|
24
|
+
return pkg_yaml
|
|
25
|
+
if not pkg_dir:
|
|
26
|
+
return None
|
|
27
|
+
return os.path.join(pkg_dir, "pkg.yaml")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _read_update_json(path):
|
|
31
|
+
with open(path, "rb") as f:
|
|
32
|
+
raw = f.read()
|
|
33
|
+
for encoding in ("utf-8", "euc-kr", "cp949"):
|
|
34
|
+
try:
|
|
35
|
+
return json.loads(raw.decode(encoding))
|
|
36
|
+
except Exception:
|
|
37
|
+
continue
|
|
38
|
+
return json.loads(raw.decode("utf-8", errors="replace"))
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _find_latest_update(pkg_id):
|
|
42
|
+
updates_dir = os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(pkg_id), "updates")
|
|
43
|
+
if not os.path.isdir(updates_dir):
|
|
44
|
+
return None
|
|
45
|
+
candidates = [name for name in os.listdir(updates_dir) if name.startswith("update-") and name.endswith(".json")]
|
|
46
|
+
if not candidates:
|
|
47
|
+
return None
|
|
48
|
+
candidates.sort()
|
|
49
|
+
return os.path.join(updates_dir, candidates[-1])
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _decode_output(raw):
|
|
53
|
+
if isinstance(raw, str):
|
|
54
|
+
return raw.strip()
|
|
55
|
+
for encoding in ("utf-8", "euc-kr", "cp949"):
|
|
56
|
+
try:
|
|
57
|
+
return raw.decode(encoding).strip()
|
|
58
|
+
except Exception:
|
|
59
|
+
continue
|
|
60
|
+
return raw.decode("utf-8", errors="replace").strip()
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _git_rev_parse(repo_root, args):
|
|
64
|
+
try:
|
|
65
|
+
out = subprocess.check_output(
|
|
66
|
+
["git", "rev-parse"] + args,
|
|
67
|
+
cwd=repo_root,
|
|
68
|
+
stderr=subprocess.STDOUT,
|
|
69
|
+
)
|
|
70
|
+
return _decode_output(out)
|
|
71
|
+
except Exception:
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _git_log_first_commit(repo_root, keyword):
|
|
76
|
+
cmd = [
|
|
77
|
+
"git",
|
|
78
|
+
"--no-pager",
|
|
79
|
+
"log",
|
|
80
|
+
"--reverse",
|
|
81
|
+
"--format=%H",
|
|
82
|
+
"--grep=%s" % keyword,
|
|
83
|
+
"--regexp-ignore-case",
|
|
84
|
+
"--all",
|
|
85
|
+
"--",
|
|
86
|
+
]
|
|
87
|
+
try:
|
|
88
|
+
out = subprocess.check_output(cmd, cwd=repo_root, stderr=subprocess.STDOUT)
|
|
89
|
+
except Exception:
|
|
90
|
+
return None
|
|
91
|
+
lines = _decode_output(out).splitlines()
|
|
92
|
+
return lines[0].strip() if lines else None
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _resolve_repo_root(pkg_cfg, pkg_dir, update_data):
|
|
96
|
+
repo_root = (pkg_cfg.get("git") or {}).get("repo_root") if pkg_cfg else None
|
|
97
|
+
if repo_root:
|
|
98
|
+
repo_root = os.path.expanduser(repo_root)
|
|
99
|
+
if not os.path.isabs(repo_root):
|
|
100
|
+
repo_root = os.path.abspath(os.path.join(pkg_dir or os.getcwd(), repo_root))
|
|
101
|
+
if os.path.isdir(repo_root):
|
|
102
|
+
return repo_root
|
|
103
|
+
git_files = list(((update_data.get("checksums") or {}).get("git_files") or {}).keys())
|
|
104
|
+
for path in git_files:
|
|
105
|
+
base = os.path.dirname(path)
|
|
106
|
+
root = _git_rev_parse(base, ["--show-toplevel"])
|
|
107
|
+
if root:
|
|
108
|
+
return root
|
|
109
|
+
root = _git_rev_parse(os.getcwd(), ["--show-toplevel"])
|
|
110
|
+
return root
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _resolve_pkg_output_dir(pkg_id, pkg_cfg, config_path=None):
|
|
114
|
+
if pkg_cfg:
|
|
115
|
+
pkg_block = pkg_cfg.get("pkg") or {}
|
|
116
|
+
root = pkg_block.get("root")
|
|
117
|
+
if root:
|
|
118
|
+
return os.path.abspath(os.path.expanduser(str(root)))
|
|
119
|
+
if config_path:
|
|
120
|
+
try:
|
|
121
|
+
main_cfg = config.load_main(path=config_path, allow_interactive=False)
|
|
122
|
+
except Exception:
|
|
123
|
+
main_cfg = None
|
|
124
|
+
else:
|
|
125
|
+
try:
|
|
126
|
+
main_cfg = config.load_main(allow_interactive=False)
|
|
127
|
+
except Exception:
|
|
128
|
+
main_cfg = None
|
|
129
|
+
if not main_cfg:
|
|
130
|
+
return None
|
|
131
|
+
release_root = main_cfg.get("pkg_release_root")
|
|
132
|
+
if not release_root:
|
|
133
|
+
return None
|
|
134
|
+
return os.path.abspath(os.path.expanduser(os.path.join(release_root, str(pkg_id))))
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _collect_keywords(git_info):
|
|
138
|
+
keywords = set()
|
|
139
|
+
for kw in git_info.get("keywords") or []:
|
|
140
|
+
if kw:
|
|
141
|
+
keywords.add(str(kw))
|
|
142
|
+
for commit in git_info.get("commits") or []:
|
|
143
|
+
for kw in commit.get("keywords") or []:
|
|
144
|
+
if kw:
|
|
145
|
+
keywords.add(str(kw))
|
|
146
|
+
return sorted(keywords)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def _select_keyword(git_info):
|
|
150
|
+
keywords = _collect_keywords(git_info)
|
|
151
|
+
if len(keywords) == 1:
|
|
152
|
+
return keywords[0], False
|
|
153
|
+
if keywords:
|
|
154
|
+
return keywords[0], True
|
|
155
|
+
return None, False
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def _parse_commit_time(value):
|
|
159
|
+
if not value:
|
|
160
|
+
return None
|
|
161
|
+
try:
|
|
162
|
+
return time.strptime(value, "%a %b %d %H:%M:%S %Y %z")
|
|
163
|
+
except Exception:
|
|
164
|
+
return None
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _find_first_commit(commits, keyword):
|
|
168
|
+
matches = [c for c in commits if keyword in (c.get("keywords") or [])]
|
|
169
|
+
if not matches:
|
|
170
|
+
return None
|
|
171
|
+
dated = []
|
|
172
|
+
for commit in matches:
|
|
173
|
+
dt = _parse_commit_time(commit.get("authored_at") or commit.get("date"))
|
|
174
|
+
dated.append((dt, commit))
|
|
175
|
+
dated = [item for item in dated if item[0] is not None]
|
|
176
|
+
if dated:
|
|
177
|
+
return min(dated, key=lambda item: item[0])[1]
|
|
178
|
+
return sorted(matches, key=lambda c: c.get("hash") or "")[0]
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _collect_files(commits, keyword):
|
|
182
|
+
files = set()
|
|
183
|
+
for commit in commits:
|
|
184
|
+
if keyword not in (commit.get("keywords") or []):
|
|
185
|
+
continue
|
|
186
|
+
for path in commit.get("files") or []:
|
|
187
|
+
if path:
|
|
188
|
+
files.add(path)
|
|
189
|
+
return sorted(files)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _parse_ignore_patterns(values):
|
|
193
|
+
patterns = []
|
|
194
|
+
for raw in values or []:
|
|
195
|
+
if raw is None:
|
|
196
|
+
continue
|
|
197
|
+
if isinstance(raw, (list, tuple)):
|
|
198
|
+
items = raw
|
|
199
|
+
else:
|
|
200
|
+
items = str(raw).replace("\n", ",").replace(";", ",").split(",")
|
|
201
|
+
for item in items:
|
|
202
|
+
item = item.strip()
|
|
203
|
+
if item:
|
|
204
|
+
patterns.append(item)
|
|
205
|
+
return patterns
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def _is_ignored(path, repo_root, patterns):
|
|
209
|
+
if not patterns:
|
|
210
|
+
return False
|
|
211
|
+
candidates = [path]
|
|
212
|
+
basename = os.path.basename(path)
|
|
213
|
+
if basename:
|
|
214
|
+
candidates.append(basename)
|
|
215
|
+
if repo_root and not os.path.isabs(path):
|
|
216
|
+
candidates.append(os.path.join(repo_root, path))
|
|
217
|
+
if repo_root and os.path.isabs(path):
|
|
218
|
+
try:
|
|
219
|
+
candidates.append(os.path.relpath(path, repo_root))
|
|
220
|
+
except Exception:
|
|
221
|
+
pass
|
|
222
|
+
for candidate in candidates:
|
|
223
|
+
for pattern in patterns:
|
|
224
|
+
if fnmatch.fnmatch(candidate, pattern):
|
|
225
|
+
return True
|
|
226
|
+
return False
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def _git_parent(repo_root, commit_hash):
|
|
230
|
+
parent = _git_rev_parse(repo_root, ["%s^" % commit_hash])
|
|
231
|
+
return parent or commit_hash
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def _git_file_exists(repo_root, commit_hash, path):
|
|
235
|
+
cmd = ["git", "cat-file", "-e", "%s:%s" % (commit_hash, path)]
|
|
236
|
+
result = subprocess.run(cmd, cwd=repo_root, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
237
|
+
return result.returncode == 0
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def _git_diff_no_index(repo_root, abs_path):
|
|
241
|
+
cmd = ["git", "--no-pager", "diff", "--no-index", "/dev/null", abs_path]
|
|
242
|
+
result = subprocess.run(cmd, cwd=repo_root, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
243
|
+
return _decode_output(result.stdout)
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def _git_log_name_status(repo_root, path):
|
|
247
|
+
cmd = [
|
|
248
|
+
"git",
|
|
249
|
+
"--no-pager",
|
|
250
|
+
"log",
|
|
251
|
+
"--follow",
|
|
252
|
+
"--name-status",
|
|
253
|
+
"--format=%H",
|
|
254
|
+
"--",
|
|
255
|
+
path,
|
|
256
|
+
]
|
|
257
|
+
result = subprocess.run(cmd, cwd=repo_root, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
258
|
+
return _decode_output(result.stdout)
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def _resolve_paths_with_history(repo_root, path):
|
|
262
|
+
output = _git_log_name_status(repo_root, path)
|
|
263
|
+
if not output:
|
|
264
|
+
return [path], None
|
|
265
|
+
current = None
|
|
266
|
+
paths = [path]
|
|
267
|
+
last_commit_with_file = None
|
|
268
|
+
for line in output.splitlines():
|
|
269
|
+
line = line.strip()
|
|
270
|
+
if not line:
|
|
271
|
+
continue
|
|
272
|
+
if "\t" not in line and " " not in line:
|
|
273
|
+
last_commit_with_file = line
|
|
274
|
+
continue
|
|
275
|
+
parts = line.split("\t")
|
|
276
|
+
status = parts[0]
|
|
277
|
+
if status.startswith("R") and len(parts) >= 3:
|
|
278
|
+
old_path, new_path = parts[1], parts[2]
|
|
279
|
+
current = new_path
|
|
280
|
+
paths.append(new_path)
|
|
281
|
+
continue
|
|
282
|
+
if status.startswith("D") and len(parts) >= 2:
|
|
283
|
+
current = parts[1]
|
|
284
|
+
paths.append(current)
|
|
285
|
+
if current:
|
|
286
|
+
paths.append(current)
|
|
287
|
+
# keep unique order
|
|
288
|
+
seen = set()
|
|
289
|
+
result = []
|
|
290
|
+
for item in paths:
|
|
291
|
+
if item not in seen:
|
|
292
|
+
seen.add(item)
|
|
293
|
+
result.append(item)
|
|
294
|
+
return result, last_commit_with_file
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def _pick_head_path(repo_root, path):
|
|
298
|
+
if _git_file_exists(repo_root, "HEAD", path):
|
|
299
|
+
return path
|
|
300
|
+
candidates, _ = _resolve_paths_with_history(repo_root, path)
|
|
301
|
+
for candidate in candidates:
|
|
302
|
+
if _git_file_exists(repo_root, "HEAD", candidate):
|
|
303
|
+
return candidate
|
|
304
|
+
return None
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def _git_diff(repo_root, start_commit, path):
|
|
308
|
+
cmd = [
|
|
309
|
+
"git",
|
|
310
|
+
"--no-pager",
|
|
311
|
+
"diff",
|
|
312
|
+
"-U3",
|
|
313
|
+
start_commit,
|
|
314
|
+
"HEAD",
|
|
315
|
+
"-M",
|
|
316
|
+
"-C",
|
|
317
|
+
"--",
|
|
318
|
+
path,
|
|
319
|
+
]
|
|
320
|
+
try:
|
|
321
|
+
out = subprocess.check_output(cmd, cwd=repo_root, stderr=subprocess.STDOUT)
|
|
322
|
+
diff_text = _decode_output(out)
|
|
323
|
+
if diff_text.strip():
|
|
324
|
+
return diff_text
|
|
325
|
+
head_has = _git_file_exists(repo_root, "HEAD", path)
|
|
326
|
+
start_has = _git_file_exists(repo_root, start_commit, path)
|
|
327
|
+
if head_has and not start_has:
|
|
328
|
+
abs_path = os.path.join(repo_root, path)
|
|
329
|
+
if os.path.exists(abs_path):
|
|
330
|
+
return _git_diff_no_index(repo_root, abs_path)
|
|
331
|
+
if not head_has:
|
|
332
|
+
candidates, last_commit = _resolve_paths_with_history(repo_root, path)
|
|
333
|
+
for candidate in candidates:
|
|
334
|
+
if candidate == path:
|
|
335
|
+
continue
|
|
336
|
+
diff_retry = _git_diff(repo_root, start_commit, candidate)
|
|
337
|
+
if diff_retry.strip():
|
|
338
|
+
return diff_retry
|
|
339
|
+
if last_commit and _git_file_exists(repo_root, last_commit, path):
|
|
340
|
+
cmd_deleted = [
|
|
341
|
+
"git",
|
|
342
|
+
"--no-pager",
|
|
343
|
+
"diff",
|
|
344
|
+
"-U3",
|
|
345
|
+
start_commit,
|
|
346
|
+
last_commit,
|
|
347
|
+
"-M",
|
|
348
|
+
"-C",
|
|
349
|
+
"--",
|
|
350
|
+
path,
|
|
351
|
+
]
|
|
352
|
+
out_deleted = subprocess.check_output(
|
|
353
|
+
cmd_deleted, cwd=repo_root, stderr=subprocess.STDOUT
|
|
354
|
+
)
|
|
355
|
+
return _decode_output(out_deleted)
|
|
356
|
+
return diff_text
|
|
357
|
+
except Exception as exc:
|
|
358
|
+
return "[export_source_review] git diff failed for %s: %s" % (path, str(exc))
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def _add_diff_table(doc, file_path, diff_text):
|
|
362
|
+
table = doc.add_table(rows=2, cols=1)
|
|
363
|
+
try:
|
|
364
|
+
table.style = "Table Grid"
|
|
365
|
+
except Exception:
|
|
366
|
+
pass
|
|
367
|
+
header = table.cell(0, 0)
|
|
368
|
+
header.text = file_path
|
|
369
|
+
if header.paragraphs and header.paragraphs[0].runs:
|
|
370
|
+
header.paragraphs[0].runs[0].bold = True
|
|
371
|
+
body = table.cell(1, 0)
|
|
372
|
+
body.text = ""
|
|
373
|
+
paragraph = body.paragraphs[0]
|
|
374
|
+
text = diff_text or "No changes in range."
|
|
375
|
+
lines = text.splitlines() or [text]
|
|
376
|
+
for idx, line in enumerate(lines):
|
|
377
|
+
run = paragraph.add_run(line)
|
|
378
|
+
if Pt is not None:
|
|
379
|
+
run.font.name = "Courier New"
|
|
380
|
+
run.font.size = Pt(9)
|
|
381
|
+
if RGBColor is not None:
|
|
382
|
+
if line.startswith("+") and not line.startswith("+++"):
|
|
383
|
+
run.font.color.rgb = RGBColor(0xFF, 0x00, 0x00)
|
|
384
|
+
elif line.startswith("-") and not line.startswith("---"):
|
|
385
|
+
run.font.color.rgb = RGBColor(0x80, 0x80, 0x80)
|
|
386
|
+
if idx < len(lines) - 1:
|
|
387
|
+
run.add_break()
|
|
388
|
+
doc.add_paragraph("")
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def main(argv=None):
|
|
392
|
+
argv = argv if argv is not None else sys.argv[1:]
|
|
393
|
+
parser = argparse.ArgumentParser(
|
|
394
|
+
description="Export per-file diff history for a keyword into a Word document."
|
|
395
|
+
)
|
|
396
|
+
parser.add_argument("--config", help="pkgmgr main config path")
|
|
397
|
+
parser.add_argument("--pkg-id", required=True, help="pkg id (used to locate latest update JSON)")
|
|
398
|
+
parser.add_argument("--docx", required=True, help="output docx path")
|
|
399
|
+
parser.add_argument("--ignore", action="append", help="glob patterns to ignore")
|
|
400
|
+
args = parser.parse_args(argv)
|
|
401
|
+
|
|
402
|
+
if Document is None:
|
|
403
|
+
print("[export_source_review] python-docx is required (pip install python-docx)")
|
|
404
|
+
return 1
|
|
405
|
+
|
|
406
|
+
pkg_yaml = None
|
|
407
|
+
pkg_dir = None
|
|
408
|
+
pkg_cfg = None
|
|
409
|
+
|
|
410
|
+
update_path = _find_latest_update(args.pkg_id)
|
|
411
|
+
if not update_path:
|
|
412
|
+
print("[export_source_review] update json not found for pkg: %s" % args.pkg_id)
|
|
413
|
+
return 1
|
|
414
|
+
update_path = os.path.abspath(os.path.expanduser(update_path))
|
|
415
|
+
if not os.path.exists(update_path):
|
|
416
|
+
print("[export_source_review] update json not found: %s" % update_path)
|
|
417
|
+
return 1
|
|
418
|
+
|
|
419
|
+
data = _read_update_json(update_path)
|
|
420
|
+
git_info = data.get("git") or {}
|
|
421
|
+
commits = git_info.get("commits") or []
|
|
422
|
+
keyword, multi_keywords = _select_keyword(git_info)
|
|
423
|
+
if not keyword:
|
|
424
|
+
print("[export_source_review] keyword not found in update json for pkg: %s" % args.pkg_id)
|
|
425
|
+
return 1
|
|
426
|
+
if multi_keywords:
|
|
427
|
+
print("[export_source_review] multiple keywords found; using %s" % keyword)
|
|
428
|
+
|
|
429
|
+
repo_root = _resolve_repo_root(pkg_cfg, pkg_dir, data)
|
|
430
|
+
if not repo_root:
|
|
431
|
+
print("[export_source_review] repo root not found for pkg: %s" % args.pkg_id)
|
|
432
|
+
return 1
|
|
433
|
+
|
|
434
|
+
commit_hash = _git_log_first_commit(repo_root, keyword)
|
|
435
|
+
if not commit_hash:
|
|
436
|
+
first_commit = _find_first_commit(commits, keyword)
|
|
437
|
+
if not first_commit:
|
|
438
|
+
print("[export_source_review] no commits found for keyword: %s" % keyword)
|
|
439
|
+
return 1
|
|
440
|
+
commit_hash = first_commit.get("hash") or first_commit.get("commit")
|
|
441
|
+
if not commit_hash:
|
|
442
|
+
print("[export_source_review] commit hash missing for keyword: %s" % keyword)
|
|
443
|
+
return 1
|
|
444
|
+
start_commit = _git_parent(repo_root, commit_hash)
|
|
445
|
+
file_list = _collect_files(commits, keyword)
|
|
446
|
+
if not file_list:
|
|
447
|
+
git_files = list(((data.get("checksums") or {}).get("git_files") or {}).keys())
|
|
448
|
+
for path in git_files:
|
|
449
|
+
if path.startswith(repo_root):
|
|
450
|
+
file_list.append(os.path.relpath(path, repo_root))
|
|
451
|
+
file_list = sorted(set(file_list))
|
|
452
|
+
|
|
453
|
+
if not file_list:
|
|
454
|
+
print("[export_source_review] no files found for keyword: %s" % keyword)
|
|
455
|
+
return 1
|
|
456
|
+
|
|
457
|
+
ignore_patterns = _parse_ignore_patterns([args.ignore, os.environ.get("PKGMGR_REVIEW_IGNORE")])
|
|
458
|
+
|
|
459
|
+
doc = Document()
|
|
460
|
+
doc.add_paragraph("Source Review Export")
|
|
461
|
+
doc.add_paragraph("Keyword: %s" % keyword)
|
|
462
|
+
doc.add_paragraph("Range: %s..HEAD" % start_commit)
|
|
463
|
+
doc.add_paragraph("Update JSON: %s" % update_path)
|
|
464
|
+
doc.add_paragraph("")
|
|
465
|
+
|
|
466
|
+
for path in file_list:
|
|
467
|
+
if _is_ignored(path, repo_root, ignore_patterns):
|
|
468
|
+
print("[export_source_review] skip (ignored): %s" % path)
|
|
469
|
+
continue
|
|
470
|
+
head_path = _pick_head_path(repo_root, path)
|
|
471
|
+
if not head_path:
|
|
472
|
+
print("[export_source_review] skip (file deleted or untracked in HEAD): %s" % path)
|
|
473
|
+
continue
|
|
474
|
+
if _is_ignored(head_path, repo_root, ignore_patterns):
|
|
475
|
+
print("[export_source_review] skip (ignored): %s" % head_path)
|
|
476
|
+
continue
|
|
477
|
+
diff_text = _git_diff(repo_root, start_commit, head_path)
|
|
478
|
+
_add_diff_table(doc, head_path, diff_text)
|
|
479
|
+
|
|
480
|
+
out_path = args.docx
|
|
481
|
+
if not out_path.lower().endswith(".docx"):
|
|
482
|
+
out_path = out_path + ".docx"
|
|
483
|
+
config_path = args.config or os.environ.get("PKGMGR_CONFIG")
|
|
484
|
+
if os.sep not in out_path:
|
|
485
|
+
base_dir = _resolve_pkg_output_dir(args.pkg_id, pkg_cfg, config_path=config_path)
|
|
486
|
+
if not base_dir:
|
|
487
|
+
base_dir = pkg_dir or os.path.join(config.DEFAULT_STATE_DIR, "pkg", str(args.pkg_id))
|
|
488
|
+
export_dir = os.path.join(base_dir, "export")
|
|
489
|
+
out_path = os.path.join(export_dir, out_path)
|
|
490
|
+
out_dir = os.path.dirname(os.path.abspath(out_path))
|
|
491
|
+
if out_dir and not os.path.exists(out_dir):
|
|
492
|
+
os.makedirs(out_dir)
|
|
493
|
+
doc.save(out_path)
|
|
494
|
+
print("[export_source_review] wrote %s" % out_path)
|
|
495
|
+
return 0
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
if __name__ == "__main__":
|
|
499
|
+
sys.exit(main())
|