pkgmgr-kunrunic 0.1.1.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pkgmgr/__init__.py +16 -0
- pkgmgr/__main__.py +5 -0
- pkgmgr/cli.py +320 -0
- pkgmgr/collectors/__init__.py +5 -0
- pkgmgr/collectors/base.py +15 -0
- pkgmgr/collectors/checksums.py +35 -0
- pkgmgr/config.py +408 -0
- pkgmgr/points.py +98 -0
- pkgmgr/release.py +1031 -0
- pkgmgr/shell_integration.py +120 -0
- pkgmgr/snapshot.py +306 -0
- pkgmgr/templates/pkg.yaml.sample +16 -0
- pkgmgr/templates/pkgmgr.yaml.sample +51 -0
- pkgmgr/watch.py +79 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/METADATA +159 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/RECORD +24 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/WHEEL +5 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/entry_points.txt +2 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/licenses/LICENSE +21 -0
- pkgmgr_kunrunic-0.1.1.dev4.dist-info/top_level.txt +3 -0
- plugin/export_cksum.py +354 -0
- plugin/export_pkgstore.py +117 -0
- plugin/export_source_review.py +499 -0
- tools/echo_args.py +15 -0
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
"""Shell integration helpers: print PATH/alias instructions per shell."""
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
from . import config
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def ensure_path_and_alias(script_dir, alias_name="pkg", command="pkgmgr"):
|
|
10
|
+
"""
|
|
11
|
+
Print PATH/alias instructions for the current shell.
|
|
12
|
+
script_dir: directory where the pkgmgr console script lives (e.g. venv/bin).
|
|
13
|
+
"""
|
|
14
|
+
if not script_dir:
|
|
15
|
+
print("[install] script_dir not provided; skip shell integration")
|
|
16
|
+
return
|
|
17
|
+
shell = os.environ.get("SHELL", "")
|
|
18
|
+
shell_name = os.path.basename(shell) if shell else ""
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
lines = _instructions_for_shell(shell_name, script_dir, alias_name, command)
|
|
22
|
+
except Exception as e:
|
|
23
|
+
print("[install] shell integration failed for %s: %s" % (shell_name, str(e)))
|
|
24
|
+
return
|
|
25
|
+
|
|
26
|
+
if not lines:
|
|
27
|
+
print("[install] unknown shell '%s'; skipping rc update" % (shell_name or ""))
|
|
28
|
+
return
|
|
29
|
+
|
|
30
|
+
if not _path_contains_dir(script_dir):
|
|
31
|
+
print("[install] PATH missing: %s" % script_dir)
|
|
32
|
+
print("[install] add to PATH, for example:")
|
|
33
|
+
for line in _path_only_instructions(shell_name, script_dir):
|
|
34
|
+
print(" " + line)
|
|
35
|
+
|
|
36
|
+
header = "[install] To use pkgmgr, add these lines to your shell rc:"
|
|
37
|
+
for line in _emit_lines_with_header(header, lines):
|
|
38
|
+
print(line)
|
|
39
|
+
readme_path = _write_readme(_emit_lines_with_header(header, lines))
|
|
40
|
+
if readme_path:
|
|
41
|
+
print("[install] Reference saved to: %s" % readme_path)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _instructions_for_shell(shell_name, script_dir, alias_name, command):
|
|
45
|
+
if shell_name == "bash":
|
|
46
|
+
lines = [
|
|
47
|
+
'export PATH="%s:$PATH"' % script_dir,
|
|
48
|
+
'alias %s="%s"' % (alias_name, command),
|
|
49
|
+
]
|
|
50
|
+
return lines
|
|
51
|
+
if shell_name == "zsh":
|
|
52
|
+
lines = [
|
|
53
|
+
'export PATH="%s:$PATH"' % script_dir,
|
|
54
|
+
'alias %s="%s"' % (alias_name, command),
|
|
55
|
+
]
|
|
56
|
+
return lines
|
|
57
|
+
if shell_name in ("csh", "tcsh"):
|
|
58
|
+
lines = [
|
|
59
|
+
"set path = (%s $path)" % script_dir,
|
|
60
|
+
"alias %s %s" % (alias_name, command),
|
|
61
|
+
]
|
|
62
|
+
return lines
|
|
63
|
+
if shell_name == "fish":
|
|
64
|
+
lines = [
|
|
65
|
+
"set -U fish_user_paths %s $fish_user_paths" % script_dir,
|
|
66
|
+
"alias %s %s" % (alias_name, command),
|
|
67
|
+
]
|
|
68
|
+
return lines
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _path_only_instructions(shell_name, script_dir):
|
|
73
|
+
if shell_name == "bash":
|
|
74
|
+
return ['export PATH="%s:$PATH"' % script_dir]
|
|
75
|
+
if shell_name == "zsh":
|
|
76
|
+
return ['export PATH="%s:$PATH"' % script_dir]
|
|
77
|
+
if shell_name in ("csh", "tcsh"):
|
|
78
|
+
return ["set path = (%s $path)" % script_dir]
|
|
79
|
+
if shell_name == "fish":
|
|
80
|
+
return ["set -U fish_user_paths %s $fish_user_paths" % script_dir]
|
|
81
|
+
return ['export PATH="%s:$PATH"' % script_dir]
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _path_contains_dir(path):
|
|
85
|
+
if not path:
|
|
86
|
+
return False
|
|
87
|
+
try:
|
|
88
|
+
target = os.path.realpath(os.path.abspath(os.path.expanduser(path)))
|
|
89
|
+
except Exception:
|
|
90
|
+
target = path
|
|
91
|
+
for entry in os.environ.get("PATH", "").split(os.pathsep):
|
|
92
|
+
if not entry:
|
|
93
|
+
continue
|
|
94
|
+
try:
|
|
95
|
+
entry_path = os.path.realpath(os.path.abspath(os.path.expanduser(entry)))
|
|
96
|
+
except Exception:
|
|
97
|
+
entry_path = entry
|
|
98
|
+
if entry_path == target:
|
|
99
|
+
return True
|
|
100
|
+
return False
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _emit_lines_with_header(header, lines):
|
|
104
|
+
out = [header]
|
|
105
|
+
out.extend(lines)
|
|
106
|
+
return out
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _write_readme(lines):
|
|
110
|
+
readme_path = os.path.join(config.BASE_DIR, "README.txt")
|
|
111
|
+
try:
|
|
112
|
+
base_dir = os.path.dirname(readme_path)
|
|
113
|
+
if base_dir and not os.path.exists(base_dir):
|
|
114
|
+
os.makedirs(base_dir)
|
|
115
|
+
with open(readme_path, "w") as f:
|
|
116
|
+
for line in lines:
|
|
117
|
+
f.write(line + "\n")
|
|
118
|
+
return readme_path
|
|
119
|
+
except Exception:
|
|
120
|
+
return None
|
pkgmgr/snapshot.py
ADDED
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
"""Snapshot utilities: hashing and state persistence."""
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import fnmatch
|
|
8
|
+
import time
|
|
9
|
+
import sys
|
|
10
|
+
|
|
11
|
+
from . import config
|
|
12
|
+
|
|
13
|
+
STATE_DIR = config.DEFAULT_STATE_DIR
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class DuplicateBaselineError(RuntimeError):
|
|
17
|
+
"""Raised when attempting to create a baseline that already exists."""
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ProgressReporter(object):
|
|
21
|
+
"""TTY-friendly one-line progress reporter (no-op when stdout is not a TTY)."""
|
|
22
|
+
|
|
23
|
+
def __init__(self, prefix):
|
|
24
|
+
self.prefix = prefix
|
|
25
|
+
self._is_tty = sys.stdout.isatty()
|
|
26
|
+
self._last_len = 0
|
|
27
|
+
self._label = None
|
|
28
|
+
self._total = 0
|
|
29
|
+
self._current = 0
|
|
30
|
+
|
|
31
|
+
def start(self, label, total):
|
|
32
|
+
if not self._is_tty:
|
|
33
|
+
return
|
|
34
|
+
self._label = label
|
|
35
|
+
self._total = int(total or 0)
|
|
36
|
+
self._current = 0
|
|
37
|
+
self._render()
|
|
38
|
+
|
|
39
|
+
def advance(self, step=1):
|
|
40
|
+
if not self._is_tty:
|
|
41
|
+
return
|
|
42
|
+
self._current += int(step or 0)
|
|
43
|
+
if self._current > self._total:
|
|
44
|
+
self._current = self._total
|
|
45
|
+
self._render()
|
|
46
|
+
|
|
47
|
+
def finish(self):
|
|
48
|
+
if not self._is_tty:
|
|
49
|
+
return
|
|
50
|
+
if self._total == 0:
|
|
51
|
+
self._current = 0
|
|
52
|
+
else:
|
|
53
|
+
self._current = self._total
|
|
54
|
+
self._render(final=True)
|
|
55
|
+
sys.stdout.write("\n")
|
|
56
|
+
sys.stdout.flush()
|
|
57
|
+
|
|
58
|
+
def _render(self, final=False):
|
|
59
|
+
total = self._total
|
|
60
|
+
current = self._current
|
|
61
|
+
denom = total if total > 0 else 1
|
|
62
|
+
pct = int((float(current) / float(denom)) * 100)
|
|
63
|
+
if total == 0 and final:
|
|
64
|
+
pct = 100
|
|
65
|
+
bar_len = 30
|
|
66
|
+
filled = int((float(current) / float(denom)) * bar_len)
|
|
67
|
+
bar = "#" * filled + "-" * (bar_len - filled)
|
|
68
|
+
label = self._label or ""
|
|
69
|
+
line = "[%s] %s %d/%d %3d%% [%s]" % (
|
|
70
|
+
self.prefix,
|
|
71
|
+
label,
|
|
72
|
+
current,
|
|
73
|
+
total,
|
|
74
|
+
pct,
|
|
75
|
+
bar,
|
|
76
|
+
)
|
|
77
|
+
pad = " " * max(0, self._last_len - len(line))
|
|
78
|
+
sys.stdout.write("\r" + line + pad)
|
|
79
|
+
sys.stdout.flush()
|
|
80
|
+
self._last_len = len(line)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _ensure_state_dir():
|
|
84
|
+
if not os.path.exists(STATE_DIR):
|
|
85
|
+
os.makedirs(STATE_DIR)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _sha256(path, chunk=1024 * 1024):
|
|
89
|
+
h = hashlib.sha256()
|
|
90
|
+
f = open(path, "rb")
|
|
91
|
+
try:
|
|
92
|
+
while True:
|
|
93
|
+
b = f.read(chunk)
|
|
94
|
+
if not b:
|
|
95
|
+
break
|
|
96
|
+
h.update(b)
|
|
97
|
+
finally:
|
|
98
|
+
f.close()
|
|
99
|
+
return h.hexdigest()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def _should_skip(relpath, patterns):
|
|
103
|
+
for p in patterns or []:
|
|
104
|
+
if fnmatch.fnmatch(relpath, p):
|
|
105
|
+
return True
|
|
106
|
+
return False
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _count_files(root_abs, exclude):
|
|
110
|
+
total = 0
|
|
111
|
+
for base, _, files in os.walk(root_abs):
|
|
112
|
+
for name in files:
|
|
113
|
+
abspath = os.path.join(base, name)
|
|
114
|
+
rel = os.path.relpath(abspath, root_abs).replace("\\", "/")
|
|
115
|
+
if _should_skip(rel, exclude):
|
|
116
|
+
continue
|
|
117
|
+
total += 1
|
|
118
|
+
return total
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def _scan(root, exclude, progress=None, label=None):
|
|
122
|
+
res = {}
|
|
123
|
+
root_abs = os.path.abspath(os.path.expanduser(root))
|
|
124
|
+
if not os.path.exists(root_abs):
|
|
125
|
+
print("[snap] skip missing root: %s" % root_abs)
|
|
126
|
+
return res
|
|
127
|
+
if progress and label:
|
|
128
|
+
total = _count_files(root_abs, exclude)
|
|
129
|
+
progress.start(label, total)
|
|
130
|
+
for base, _, files in os.walk(root_abs):
|
|
131
|
+
for name in files:
|
|
132
|
+
abspath = os.path.join(base, name)
|
|
133
|
+
rel = os.path.relpath(abspath, root_abs).replace("\\", "/")
|
|
134
|
+
if _should_skip(rel, exclude):
|
|
135
|
+
continue
|
|
136
|
+
try:
|
|
137
|
+
st = os.stat(abspath)
|
|
138
|
+
res[rel] = {
|
|
139
|
+
"hash": _sha256(abspath),
|
|
140
|
+
"size": int(st.st_size),
|
|
141
|
+
"mtime": int(st.st_mtime),
|
|
142
|
+
}
|
|
143
|
+
except Exception as e:
|
|
144
|
+
print("[snap] warn skip %s: %s" % (abspath, str(e)))
|
|
145
|
+
if progress:
|
|
146
|
+
progress.advance()
|
|
147
|
+
if progress and label:
|
|
148
|
+
progress.finish()
|
|
149
|
+
return res
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _maybe_keep_existing_baseline(path, prompt_overwrite):
|
|
153
|
+
"""
|
|
154
|
+
When prompt_overwrite=True and baseline exists, ask user (if tty) whether to overwrite.
|
|
155
|
+
Raises DuplicateBaselineError when overwrite is declined or non-interactive.
|
|
156
|
+
"""
|
|
157
|
+
if not prompt_overwrite:
|
|
158
|
+
return None
|
|
159
|
+
if not os.path.exists(path):
|
|
160
|
+
return None
|
|
161
|
+
|
|
162
|
+
if not sys.stdin.isatty():
|
|
163
|
+
msg = "[baseline] existing baseline at %s; non-tty -> refusing overwrite" % path
|
|
164
|
+
raise DuplicateBaselineError(msg)
|
|
165
|
+
|
|
166
|
+
ans = input("[baseline] existing baseline at %s; overwrite? [y/N]: " % path).strip().lower()
|
|
167
|
+
if ans not in ("y", "yes"):
|
|
168
|
+
msg = "[baseline] keeping existing baseline; skipped overwrite"
|
|
169
|
+
raise DuplicateBaselineError(msg)
|
|
170
|
+
return None
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _scan_artifacts(cfg, progress=None):
|
|
174
|
+
"""
|
|
175
|
+
Scan artifact roots/targets similar to sources.
|
|
176
|
+
artifacts.root: base path (optional)
|
|
177
|
+
artifacts.targets: names or absolute paths
|
|
178
|
+
artifacts.exclude: patterns
|
|
179
|
+
"""
|
|
180
|
+
artifacts_cfg = cfg.get("artifacts") or {}
|
|
181
|
+
art_root = artifacts_cfg.get("root")
|
|
182
|
+
art_targets = artifacts_cfg.get("targets") or []
|
|
183
|
+
art_exclude = artifacts_cfg.get("exclude") or []
|
|
184
|
+
|
|
185
|
+
result = {}
|
|
186
|
+
base_root = os.path.abspath(os.path.expanduser(art_root)) if art_root else None
|
|
187
|
+
for t in art_targets:
|
|
188
|
+
target_str = str(t)
|
|
189
|
+
if base_root and not os.path.isabs(target_str):
|
|
190
|
+
target_path = os.path.join(base_root, target_str)
|
|
191
|
+
else:
|
|
192
|
+
target_path = target_str
|
|
193
|
+
target_path = os.path.abspath(os.path.expanduser(target_path))
|
|
194
|
+
label = "artifact %s" % target_path
|
|
195
|
+
result[target_path] = _scan(target_path, art_exclude, progress=progress, label=label)
|
|
196
|
+
return result
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def create_baseline(cfg, prompt_overwrite=False, progress=None):
|
|
200
|
+
"""
|
|
201
|
+
Collect initial baseline snapshot.
|
|
202
|
+
Scans sources and artifacts (if configured).
|
|
203
|
+
"""
|
|
204
|
+
_ensure_state_dir()
|
|
205
|
+
sources = cfg.get("sources", []) or []
|
|
206
|
+
src_exclude = (cfg.get("source") or {}).get("exclude", []) or []
|
|
207
|
+
|
|
208
|
+
snapshot_data = {
|
|
209
|
+
"meta": {
|
|
210
|
+
"ts": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()),
|
|
211
|
+
"type": "baseline",
|
|
212
|
+
},
|
|
213
|
+
"sources": {},
|
|
214
|
+
"artifacts": {},
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
for root in sources:
|
|
218
|
+
label = "source %s" % root
|
|
219
|
+
snapshot_data["sources"][root] = _scan(
|
|
220
|
+
root, src_exclude, progress=progress, label=label
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
snapshot_data["artifacts"] = _scan_artifacts(cfg, progress=progress)
|
|
224
|
+
|
|
225
|
+
path = os.path.join(STATE_DIR, "baseline.json")
|
|
226
|
+
existing = _maybe_keep_existing_baseline(path, prompt_overwrite)
|
|
227
|
+
if existing is not None:
|
|
228
|
+
return existing
|
|
229
|
+
|
|
230
|
+
f = open(path, "w")
|
|
231
|
+
try:
|
|
232
|
+
json.dump(snapshot_data, f, ensure_ascii=False, indent=2, sort_keys=True)
|
|
233
|
+
finally:
|
|
234
|
+
f.close()
|
|
235
|
+
print("[baseline] saved to %s" % path)
|
|
236
|
+
return snapshot_data
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def create_snapshot(cfg, progress=None):
|
|
240
|
+
"""
|
|
241
|
+
Collect a fresh snapshot (for updates).
|
|
242
|
+
"""
|
|
243
|
+
_ensure_state_dir()
|
|
244
|
+
sources = cfg.get("sources", []) or []
|
|
245
|
+
src_exclude = (cfg.get("source") or {}).get("exclude", []) or []
|
|
246
|
+
|
|
247
|
+
snapshot_data = {
|
|
248
|
+
"meta": {
|
|
249
|
+
"ts": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()),
|
|
250
|
+
"type": "snapshot",
|
|
251
|
+
},
|
|
252
|
+
"sources": {},
|
|
253
|
+
"artifacts": {},
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
for root in sources:
|
|
257
|
+
label = "source %s" % root
|
|
258
|
+
snapshot_data["sources"][root] = _scan(
|
|
259
|
+
root, src_exclude, progress=progress, label=label
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
snapshot_data["artifacts"] = _scan_artifacts(cfg, progress=progress)
|
|
263
|
+
|
|
264
|
+
path = os.path.join(STATE_DIR, "snapshot.json")
|
|
265
|
+
f = open(path, "w")
|
|
266
|
+
try:
|
|
267
|
+
json.dump(snapshot_data, f, ensure_ascii=False, indent=2, sort_keys=True)
|
|
268
|
+
finally:
|
|
269
|
+
f.close()
|
|
270
|
+
print("[snap] snapshot saved to %s" % path)
|
|
271
|
+
return snapshot_data
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def diff_snapshots(base, latest):
|
|
275
|
+
"""Diff two snapshot dicts."""
|
|
276
|
+
added = []
|
|
277
|
+
modified = []
|
|
278
|
+
deleted = []
|
|
279
|
+
|
|
280
|
+
def _diff_map(a, b):
|
|
281
|
+
a_keys = set(a.keys())
|
|
282
|
+
b_keys = set(b.keys())
|
|
283
|
+
for k in b_keys - a_keys:
|
|
284
|
+
added.append(k)
|
|
285
|
+
for k in a_keys - b_keys:
|
|
286
|
+
deleted.append(k)
|
|
287
|
+
for k in a_keys & b_keys:
|
|
288
|
+
if a[k].get("hash") != b[k].get("hash"):
|
|
289
|
+
modified.append(k)
|
|
290
|
+
|
|
291
|
+
# flatten per-root
|
|
292
|
+
def _flatten_section(snap, section):
|
|
293
|
+
flat = {}
|
|
294
|
+
for root, entries in (snap or {}).get(section, {}).items():
|
|
295
|
+
for rel, meta in (entries or {}).items():
|
|
296
|
+
flat[root + "/" + rel] = meta
|
|
297
|
+
return flat
|
|
298
|
+
|
|
299
|
+
a_flat = {}
|
|
300
|
+
b_flat = {}
|
|
301
|
+
for section in ("sources", "artifacts"):
|
|
302
|
+
a_flat.update(_flatten_section(base or {}, section))
|
|
303
|
+
b_flat.update(_flatten_section(latest or {}, section))
|
|
304
|
+
_diff_map(a_flat, b_flat)
|
|
305
|
+
|
|
306
|
+
return {"added": sorted(added), "modified": sorted(modified), "deleted": sorted(deleted)}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
pkg:
|
|
2
|
+
id: "<pkg-id>"
|
|
3
|
+
root: "/path/to/release/<pkg-id>"
|
|
4
|
+
status: "open"
|
|
5
|
+
|
|
6
|
+
include:
|
|
7
|
+
releases: []
|
|
8
|
+
|
|
9
|
+
git:
|
|
10
|
+
repo_root: null # optional override; default is git rev-parse from cwd
|
|
11
|
+
keywords: []
|
|
12
|
+
since: null
|
|
13
|
+
until: null
|
|
14
|
+
|
|
15
|
+
collectors:
|
|
16
|
+
enabled: ["checksums"]
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
pkg_release_root: ~/PKG/RELEASE
|
|
2
|
+
sources:
|
|
3
|
+
- /path/to/source-A
|
|
4
|
+
- /path/to/source-B
|
|
5
|
+
|
|
6
|
+
source:
|
|
7
|
+
# 소스 스캔 제외 (glob 지원)
|
|
8
|
+
exclude:
|
|
9
|
+
- "**/build/**"
|
|
10
|
+
- "**/*.tmp"
|
|
11
|
+
- "**/bk"
|
|
12
|
+
- "**/*.sc*"
|
|
13
|
+
- "**/unit_test/**"
|
|
14
|
+
- "Jamrules*"
|
|
15
|
+
- "Jamfile*"
|
|
16
|
+
- "**/Jamrules*"
|
|
17
|
+
- "**/Jamfile*"
|
|
18
|
+
|
|
19
|
+
artifacts:
|
|
20
|
+
root: ~/HOME
|
|
21
|
+
targets: [bin, lib, data]
|
|
22
|
+
# 배포/설치 영역 제외 (glob/패턴)
|
|
23
|
+
exclude:
|
|
24
|
+
- log # 정확히 log 디렉터리
|
|
25
|
+
- tmp/** # tmp 이하 전체
|
|
26
|
+
- "*.bak" # 확장자 패턴
|
|
27
|
+
- "**/*.tmp" # 모든 하위에서 .tmp
|
|
28
|
+
|
|
29
|
+
watch:
|
|
30
|
+
interval_sec: 60
|
|
31
|
+
on_change: [] # 변경 발생 시 실행할 action 이름 리스트
|
|
32
|
+
|
|
33
|
+
collectors:
|
|
34
|
+
enabled: ["checksums"]
|
|
35
|
+
|
|
36
|
+
actions:
|
|
37
|
+
# 각 액션은 하나 이상의 커맨드를 가질 수 있음.
|
|
38
|
+
# 필드: cmd (필수, 보통 cwd 기준 경로), cwd(선택), env(선택; 이 커맨드에만 적용)
|
|
39
|
+
export_cksum:
|
|
40
|
+
- cmd: python export_cksum.py --pkg-dir /path/to/pkg --excel /path/to/template.xlsx
|
|
41
|
+
cwd: /app/script # 선택: 작업 디렉터리
|
|
42
|
+
env: { APP_ENV: dev } # 선택: 이 명령에만 적용할 환경변수
|
|
43
|
+
export_world_dev:
|
|
44
|
+
- cmd: python dev_world.py
|
|
45
|
+
cwd: /app/script
|
|
46
|
+
export_world_security:
|
|
47
|
+
- cmd: python security_world.py
|
|
48
|
+
cwd: /app/script
|
|
49
|
+
noti_email:
|
|
50
|
+
- cmd: sh noti_email.sh
|
|
51
|
+
cwd: /app/script
|
pkgmgr/watch.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
from __future__ import print_function
|
|
2
|
+
"""Watcher/daemon scaffold."""
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
from . import snapshot, release, points
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def run(cfg, run_once=False, pkg_id=None, auto_point=False, point_label=None):
|
|
12
|
+
"""
|
|
13
|
+
Basic poller:
|
|
14
|
+
- loads last point snapshot (if pkg_id provided) or baseline
|
|
15
|
+
- takes new snapshot
|
|
16
|
+
- if diff exists, run watch.on_change actions
|
|
17
|
+
- optionally create a point after actions
|
|
18
|
+
"""
|
|
19
|
+
interval = cfg.get("watch", {}).get("interval_sec", 60)
|
|
20
|
+
print("[watch] starting poller interval=%ss once=%s pkg=%s auto_point=%s" % (interval, run_once, pkg_id, auto_point))
|
|
21
|
+
if run_once:
|
|
22
|
+
_tick(cfg, pkg_id=pkg_id, auto_point=auto_point, point_label=point_label)
|
|
23
|
+
return
|
|
24
|
+
while True:
|
|
25
|
+
_tick(cfg, pkg_id=pkg_id, auto_point=auto_point, point_label=point_label)
|
|
26
|
+
time.sleep(interval)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _load_json(path):
|
|
30
|
+
try:
|
|
31
|
+
with open(path, "r") as f:
|
|
32
|
+
return json.load(f)
|
|
33
|
+
except Exception:
|
|
34
|
+
return None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _previous_snapshot(pkg_id):
|
|
38
|
+
"""Return previous snapshot data for diff: latest point snapshot if available, else baseline."""
|
|
39
|
+
if pkg_id:
|
|
40
|
+
_, snap = points.load_latest_point(pkg_id)
|
|
41
|
+
if snap:
|
|
42
|
+
return snap
|
|
43
|
+
baseline_path = os.path.join(snapshot.STATE_DIR, "baseline.json")
|
|
44
|
+
return _load_json(baseline_path)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _tick(cfg, pkg_id=None, auto_point=False, point_label=None):
|
|
48
|
+
if pkg_id and release.pkg_is_closed(pkg_id):
|
|
49
|
+
print("[watch] pkg=%s is closed; skipping poll" % pkg_id)
|
|
50
|
+
return
|
|
51
|
+
prev_snap = _previous_snapshot(pkg_id)
|
|
52
|
+
current_snap = snapshot.create_snapshot(cfg)
|
|
53
|
+
if prev_snap:
|
|
54
|
+
diff = snapshot.diff_snapshots(prev_snap, current_snap)
|
|
55
|
+
if not any(diff.values()):
|
|
56
|
+
print("[watch] no changes since last point/baseline")
|
|
57
|
+
return
|
|
58
|
+
print("[watch] changes detected: added=%d modified=%d deleted=%d" % (len(diff["added"]), len(diff["modified"]), len(diff["deleted"])))
|
|
59
|
+
else:
|
|
60
|
+
print("[watch] no previous snapshot; treating as initial run")
|
|
61
|
+
diff = None
|
|
62
|
+
|
|
63
|
+
actions_to_run = (cfg.get("watch") or {}).get("on_change", []) or []
|
|
64
|
+
results = []
|
|
65
|
+
if actions_to_run:
|
|
66
|
+
results = release.run_actions(cfg, actions_to_run)
|
|
67
|
+
else:
|
|
68
|
+
print("[watch] no watch.on_change actions configured")
|
|
69
|
+
|
|
70
|
+
if auto_point and pkg_id:
|
|
71
|
+
label = point_label or "watch-auto"
|
|
72
|
+
release.create_point(
|
|
73
|
+
cfg,
|
|
74
|
+
pkg_id,
|
|
75
|
+
label=label,
|
|
76
|
+
actions_run=actions_to_run,
|
|
77
|
+
actions_result=results,
|
|
78
|
+
snapshot_data=current_snap,
|
|
79
|
+
)
|