@misterhuydo/sentinel 1.4.66 → 1.4.68
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +21 -21
- package/python/sentinel/dependency_manager.py +206 -0
- package/python/sentinel/git_manager.py +113 -0
- package/python/sentinel/main.py +32 -4
- package/python/sentinel/notify.py +376 -331
- package/python/sentinel/sentinel_boss.py +306 -1
package/package.json
CHANGED
|
@@ -1,21 +1,21 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "@misterhuydo/sentinel",
|
|
3
|
-
"version": "1.4.
|
|
4
|
-
"description": "Sentinel — Autonomous DevOps Agent installer and manager",
|
|
5
|
-
"bin": {
|
|
6
|
-
"sentinel": "./bin/sentinel.js"
|
|
7
|
-
},
|
|
8
|
-
"scripts": {
|
|
9
|
-
"prepublishOnly": "node scripts/test-scripts.js && node scripts/bundle.js"
|
|
10
|
-
},
|
|
11
|
-
"dependencies": {
|
|
12
|
-
"chalk": "^4.1.2",
|
|
13
|
-
"fs-extra": "^11.2.0",
|
|
14
|
-
"prompts": "^2.4.2"
|
|
15
|
-
},
|
|
16
|
-
"engines": {
|
|
17
|
-
"node": ">=16"
|
|
18
|
-
},
|
|
19
|
-
"author": "misterhuydo",
|
|
20
|
-
"license": "MIT"
|
|
21
|
-
}
|
|
1
|
+
{
|
|
2
|
+
"name": "@misterhuydo/sentinel",
|
|
3
|
+
"version": "1.4.68",
|
|
4
|
+
"description": "Sentinel — Autonomous DevOps Agent installer and manager",
|
|
5
|
+
"bin": {
|
|
6
|
+
"sentinel": "./bin/sentinel.js"
|
|
7
|
+
},
|
|
8
|
+
"scripts": {
|
|
9
|
+
"prepublishOnly": "node scripts/test-scripts.js && node scripts/bundle.js"
|
|
10
|
+
},
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"chalk": "^4.1.2",
|
|
13
|
+
"fs-extra": "^11.2.0",
|
|
14
|
+
"prompts": "^2.4.2"
|
|
15
|
+
},
|
|
16
|
+
"engines": {
|
|
17
|
+
"node": ">=16"
|
|
18
|
+
},
|
|
19
|
+
"author": "misterhuydo",
|
|
20
|
+
"license": "MIT"
|
|
21
|
+
}
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
"""
|
|
2
|
+
dependency_manager.py — Maven pom.xml dependency cascade updates.
|
|
3
|
+
|
|
4
|
+
Handles:
|
|
5
|
+
- Parsing pom.xml for artifact ID and version
|
|
6
|
+
- Finding repos that depend on a given artifact
|
|
7
|
+
- Updating dependency versions in pom.xml
|
|
8
|
+
- Full cascade: update all dependents, commit, push / open PR
|
|
9
|
+
"""
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import logging
|
|
13
|
+
import re
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
|
|
17
|
+
from .config_loader import RepoConfig, SentinelConfig
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class CascadeResult:
|
|
24
|
+
repo_name: str
|
|
25
|
+
success: bool
|
|
26
|
+
old_version: str = ""
|
|
27
|
+
new_version: str = ""
|
|
28
|
+
branch: str = ""
|
|
29
|
+
pr_url: str = ""
|
|
30
|
+
error: str = ""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# ── pom.xml helpers ────────────────────────────────────────────────────────────
|
|
34
|
+
|
|
35
|
+
def get_artifact_id(local_path: str) -> str:
|
|
36
|
+
"""Return the root <artifactId> from pom.xml."""
|
|
37
|
+
pom = Path(local_path) / "pom.xml"
|
|
38
|
+
if not pom.exists():
|
|
39
|
+
return ""
|
|
40
|
+
text = pom.read_text(encoding="utf-8", errors="ignore")
|
|
41
|
+
m = re.search(r"<artifactId>([^<]+)</artifactId>", text)
|
|
42
|
+
return m.group(1).strip() if m else ""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def get_pom_version(local_path: str) -> str:
|
|
46
|
+
"""Return current version from pom.xml (may be SNAPSHOT)."""
|
|
47
|
+
pom = Path(local_path) / "pom.xml"
|
|
48
|
+
if not pom.exists():
|
|
49
|
+
return ""
|
|
50
|
+
text = pom.read_text(encoding="utf-8", errors="ignore")
|
|
51
|
+
m = re.search(r"<version>([^<]+)</version>", text)
|
|
52
|
+
return m.group(1).strip() if m else ""
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def get_release_version(local_path: str) -> str:
|
|
56
|
+
"""Strip -SNAPSHOT to get the last release version."""
|
|
57
|
+
return get_pom_version(local_path).replace("-SNAPSHOT", "")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def find_dependents(
|
|
61
|
+
artifact_id: str,
|
|
62
|
+
repos: dict,
|
|
63
|
+
skip_repo: str = "",
|
|
64
|
+
) -> list[tuple[RepoConfig, str]]:
|
|
65
|
+
"""
|
|
66
|
+
Scan all repo pom.xml files for a <dependency> on artifact_id.
|
|
67
|
+
Returns list of (RepoConfig, current_version) — sorted by repo_name.
|
|
68
|
+
skip_repo: repo_name to exclude (the source repo itself).
|
|
69
|
+
"""
|
|
70
|
+
results = []
|
|
71
|
+
for repo in repos.values():
|
|
72
|
+
if repo.repo_name == skip_repo or not repo.local_path:
|
|
73
|
+
continue
|
|
74
|
+
pom = Path(repo.local_path) / "pom.xml"
|
|
75
|
+
if not pom.exists():
|
|
76
|
+
continue
|
|
77
|
+
text = pom.read_text(encoding="utf-8", errors="ignore")
|
|
78
|
+
pattern = rf"<dependency>.*?<artifactId>{re.escape(artifact_id)}</artifactId>.*?</dependency>"
|
|
79
|
+
block_m = re.search(pattern, text, re.DOTALL)
|
|
80
|
+
if block_m:
|
|
81
|
+
ver_m = re.search(r"<version>([^<]+)</version>", block_m.group(0))
|
|
82
|
+
current_ver = ver_m.group(1).strip() if ver_m else "?"
|
|
83
|
+
results.append((repo, current_ver))
|
|
84
|
+
return sorted(results, key=lambda t: t[0].repo_name)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def update_dependency(local_path: str, artifact_id: str, new_version: str) -> bool:
|
|
88
|
+
"""
|
|
89
|
+
Update the <version> inside the <dependency> block for artifact_id in pom.xml.
|
|
90
|
+
Returns True if the file was changed.
|
|
91
|
+
"""
|
|
92
|
+
pom = Path(local_path) / "pom.xml"
|
|
93
|
+
if not pom.exists():
|
|
94
|
+
return False
|
|
95
|
+
text = pom.read_text(encoding="utf-8", errors="ignore")
|
|
96
|
+
|
|
97
|
+
def _replace_version(match: re.Match) -> str:
|
|
98
|
+
block = match.group(0)
|
|
99
|
+
return re.sub(r"<version>[^<]+</version>", f"<version>{new_version}</version>", block, count=1)
|
|
100
|
+
|
|
101
|
+
pattern = rf"<dependency>.*?<artifactId>{re.escape(artifact_id)}</artifactId>.*?</dependency>"
|
|
102
|
+
new_text, n = re.subn(pattern, _replace_version, text, flags=re.DOTALL)
|
|
103
|
+
if n == 0 or new_text == text:
|
|
104
|
+
return False
|
|
105
|
+
pom.write_text(new_text, encoding="utf-8")
|
|
106
|
+
return True
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# ── Cascade planning & execution ───────────────────────────────────────────────
|
|
110
|
+
|
|
111
|
+
def plan_cascade(
|
|
112
|
+
source_repo_name: str,
|
|
113
|
+
repos: dict,
|
|
114
|
+
target_repo_names: list[str] | None = None,
|
|
115
|
+
) -> dict:
|
|
116
|
+
"""
|
|
117
|
+
Build a cascade plan without executing anything.
|
|
118
|
+
Returns a dict suitable for presenting to the admin for confirmation.
|
|
119
|
+
"""
|
|
120
|
+
source = repos.get(source_repo_name)
|
|
121
|
+
if not source:
|
|
122
|
+
return {"error": f"Repo '{source_repo_name}' not found in config"}
|
|
123
|
+
|
|
124
|
+
artifact_id = get_artifact_id(source.local_path)
|
|
125
|
+
if not artifact_id:
|
|
126
|
+
return {"error": f"Could not read artifactId from {source_repo_name}/pom.xml"}
|
|
127
|
+
|
|
128
|
+
new_version = get_release_version(source.local_path)
|
|
129
|
+
if not new_version:
|
|
130
|
+
return {"error": f"Could not determine release version from {source_repo_name}/pom.xml"}
|
|
131
|
+
|
|
132
|
+
all_dependents = find_dependents(artifact_id, repos, skip_repo=source_repo_name)
|
|
133
|
+
|
|
134
|
+
if target_repo_names:
|
|
135
|
+
# Filter to only the requested repos that actually depend on this artifact
|
|
136
|
+
all_dependents = [(r, v) for r, v in all_dependents if r.repo_name in target_repo_names]
|
|
137
|
+
|
|
138
|
+
return {
|
|
139
|
+
"source_repo": source_repo_name,
|
|
140
|
+
"artifact_id": artifact_id,
|
|
141
|
+
"new_version": new_version,
|
|
142
|
+
"dependents": [
|
|
143
|
+
{
|
|
144
|
+
"repo": r.repo_name,
|
|
145
|
+
"current_version": v,
|
|
146
|
+
"auto_publish": r.auto_publish,
|
|
147
|
+
"action": "push to main" if r.auto_publish else "open PR",
|
|
148
|
+
}
|
|
149
|
+
for r, v in all_dependents
|
|
150
|
+
],
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def execute_cascade(
|
|
155
|
+
source_repo_name: str,
|
|
156
|
+
new_version: str,
|
|
157
|
+
artifact_id: str,
|
|
158
|
+
repos: dict,
|
|
159
|
+
cfg: SentinelConfig,
|
|
160
|
+
target_repo_names: list[str] | None = None,
|
|
161
|
+
) -> list[CascadeResult]:
|
|
162
|
+
"""
|
|
163
|
+
Update the dependency in each dependent repo, commit, and push/PR.
|
|
164
|
+
Returns one CascadeResult per dependent repo attempted.
|
|
165
|
+
"""
|
|
166
|
+
# Import here to avoid circular imports
|
|
167
|
+
from .git_manager import commit_file_change, push_dep_update, open_dep_pr, _git_env
|
|
168
|
+
|
|
169
|
+
all_dependents = find_dependents(artifact_id, repos, skip_repo=source_repo_name)
|
|
170
|
+
if target_repo_names:
|
|
171
|
+
all_dependents = [(r, v) for r, v in all_dependents if r.repo_name in target_repo_names]
|
|
172
|
+
|
|
173
|
+
results: list[CascadeResult] = []
|
|
174
|
+
|
|
175
|
+
for repo, old_version in all_dependents:
|
|
176
|
+
result = CascadeResult(repo_name=repo.repo_name, success=False, old_version=old_version, new_version=new_version)
|
|
177
|
+
try:
|
|
178
|
+
changed = update_dependency(repo.local_path, artifact_id, new_version)
|
|
179
|
+
if not changed:
|
|
180
|
+
result.error = "pom.xml not changed (already up to date?)"
|
|
181
|
+
results.append(result)
|
|
182
|
+
continue
|
|
183
|
+
|
|
184
|
+
commit_msg = (
|
|
185
|
+
f"chore(deps): update {artifact_id} to {new_version} [sentinel]\n\n"
|
|
186
|
+
f"Automated dependency bump by Sentinel after {source_repo_name} release."
|
|
187
|
+
)
|
|
188
|
+
status, commit_hash = commit_file_change(repo, ["pom.xml"], commit_msg)
|
|
189
|
+
if status != "committed":
|
|
190
|
+
result.error = "git commit failed"
|
|
191
|
+
results.append(result)
|
|
192
|
+
continue
|
|
193
|
+
|
|
194
|
+
branch, pr_url = push_dep_update(repo, cfg, artifact_id, new_version, commit_hash)
|
|
195
|
+
result.success = True
|
|
196
|
+
result.branch = branch
|
|
197
|
+
result.pr_url = pr_url
|
|
198
|
+
logger.info("Cascade updated %s → %s=%s (branch: %s)", repo.repo_name, artifact_id, new_version, branch)
|
|
199
|
+
|
|
200
|
+
except Exception as exc:
|
|
201
|
+
result.error = str(exc)
|
|
202
|
+
logger.error("Cascade failed for %s: %s", repo.repo_name, exc)
|
|
203
|
+
|
|
204
|
+
results.append(result)
|
|
205
|
+
|
|
206
|
+
return results
|
|
@@ -9,6 +9,7 @@ from __future__ import annotations
|
|
|
9
9
|
|
|
10
10
|
import logging
|
|
11
11
|
import os
|
|
12
|
+
import re
|
|
12
13
|
import subprocess
|
|
13
14
|
from pathlib import Path
|
|
14
15
|
|
|
@@ -157,6 +158,118 @@ def _append_changelog(repo: RepoConfig, event: ErrorEvent, commit_hash: str):
|
|
|
157
158
|
_git(["commit", "--amend", "--no-edit"], cwd=repo.local_path, env=env)
|
|
158
159
|
|
|
159
160
|
|
|
161
|
+
def commit_file_change(
|
|
162
|
+
repo: RepoConfig,
|
|
163
|
+
files: list[str],
|
|
164
|
+
commit_msg: str,
|
|
165
|
+
) -> tuple[str, str]:
|
|
166
|
+
"""
|
|
167
|
+
Pull, stage the given files, and commit.
|
|
168
|
+
Returns (status, commit_hash) — status: "committed" | "failed".
|
|
169
|
+
Does NOT run tests (dependency bumps don't need them).
|
|
170
|
+
"""
|
|
171
|
+
env = _git_env(repo)
|
|
172
|
+
local_path = repo.local_path
|
|
173
|
+
|
|
174
|
+
r = _git(["pull", "--rebase", "origin", repo.branch], cwd=local_path, env=env)
|
|
175
|
+
if r.returncode != 0:
|
|
176
|
+
logger.error("git pull failed for %s:\n%s", repo.repo_name, r.stderr)
|
|
177
|
+
return "failed", ""
|
|
178
|
+
|
|
179
|
+
_git(["add"] + files, cwd=local_path, env=env)
|
|
180
|
+
r = _git(["commit", "-m", commit_msg], cwd=local_path, env=env)
|
|
181
|
+
if r.returncode != 0:
|
|
182
|
+
logger.error("git commit failed for %s:\n%s", repo.repo_name, r.stderr)
|
|
183
|
+
_git(["checkout", "."], cwd=local_path, env=env)
|
|
184
|
+
return "failed", ""
|
|
185
|
+
|
|
186
|
+
commit_hash = _git(["rev-parse", "HEAD"], cwd=local_path, env=env).stdout.strip()
|
|
187
|
+
logger.info("Committed %s for %s", commit_hash[:8], repo.repo_name)
|
|
188
|
+
return "committed", commit_hash
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def push_dep_update(
|
|
192
|
+
repo: RepoConfig,
|
|
193
|
+
cfg: SentinelConfig,
|
|
194
|
+
artifact_id: str,
|
|
195
|
+
new_version: str,
|
|
196
|
+
commit_hash: str,
|
|
197
|
+
) -> tuple[str, str]:
|
|
198
|
+
"""
|
|
199
|
+
Push a dependency update commit. AUTO_PUBLISH=true → main branch.
|
|
200
|
+
AUTO_PUBLISH=false → sentinel/dep-<artifact>-<version> branch + open PR.
|
|
201
|
+
Returns (branch, pr_url).
|
|
202
|
+
"""
|
|
203
|
+
env = _git_env(repo)
|
|
204
|
+
local_path = repo.local_path
|
|
205
|
+
|
|
206
|
+
if repo.auto_publish:
|
|
207
|
+
r = _git(["push", "origin", repo.branch], cwd=local_path, env=env)
|
|
208
|
+
if r.returncode != 0:
|
|
209
|
+
logger.error("git push failed for %s:\n%s", repo.repo_name, r.stderr)
|
|
210
|
+
return repo.branch, ""
|
|
211
|
+
else:
|
|
212
|
+
safe_artifact = re.sub(r"[^a-zA-Z0-9_-]", "-", artifact_id)
|
|
213
|
+
safe_version = re.sub(r"[^a-zA-Z0-9._-]", "-", new_version)
|
|
214
|
+
branch = f"{cfg.project_name or 'sentinel'}/dep-{safe_artifact}-{safe_version}"
|
|
215
|
+
_git(["checkout", "-B", branch], cwd=local_path, env=env)
|
|
216
|
+
r = _git(["push", "-u", "origin", branch], cwd=local_path, env=env)
|
|
217
|
+
if r.returncode != 0:
|
|
218
|
+
logger.error("git push branch failed for %s:\n%s", repo.repo_name, r.stderr)
|
|
219
|
+
_git(["checkout", repo.branch], cwd=local_path, env=env)
|
|
220
|
+
return branch, ""
|
|
221
|
+
_git(["checkout", repo.branch], cwd=local_path, env=env)
|
|
222
|
+
pr_url = open_dep_pr(repo, cfg, branch, artifact_id, new_version, commit_hash)
|
|
223
|
+
return branch, pr_url
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def open_dep_pr(
|
|
227
|
+
repo: RepoConfig,
|
|
228
|
+
cfg: SentinelConfig,
|
|
229
|
+
branch: str,
|
|
230
|
+
artifact_id: str,
|
|
231
|
+
new_version: str,
|
|
232
|
+
commit_hash: str,
|
|
233
|
+
) -> str:
|
|
234
|
+
"""Open a GitHub PR for a dependency version bump."""
|
|
235
|
+
if not cfg.github_token:
|
|
236
|
+
logger.warning("GITHUB_TOKEN not set — cannot open PR")
|
|
237
|
+
return ""
|
|
238
|
+
|
|
239
|
+
repo_url = repo.repo_url
|
|
240
|
+
if repo_url.startswith("git@"):
|
|
241
|
+
owner_repo = repo_url.split(":")[-1].removesuffix(".git")
|
|
242
|
+
else:
|
|
243
|
+
owner_repo = "/".join(repo_url.rstrip("/").split("/")[-2:]).removesuffix(".git")
|
|
244
|
+
|
|
245
|
+
title = f"[Sentinel] chore(deps): update {artifact_id} to {new_version}"
|
|
246
|
+
body = (
|
|
247
|
+
f"## Automated dependency update by Sentinel\n\n"
|
|
248
|
+
f"| | |\n|---|---|\n"
|
|
249
|
+
f"| **Dependency** | `{artifact_id}` |\n"
|
|
250
|
+
f"| **New version** | `{new_version}` |\n"
|
|
251
|
+
f"| **Commit** | `{commit_hash[:8]}` |\n\n"
|
|
252
|
+
f"This PR was opened automatically after a Sentinel-triggered release of `{artifact_id}`.\n\n"
|
|
253
|
+
f"---\n_To apply: merge this PR. To reject: close it._"
|
|
254
|
+
)
|
|
255
|
+
resp = requests.post(
|
|
256
|
+
f"https://api.github.com/repos/{owner_repo}/pulls",
|
|
257
|
+
json={"title": title, "body": body, "head": branch, "base": repo.branch},
|
|
258
|
+
headers={
|
|
259
|
+
"Authorization": f"Bearer {cfg.github_token}",
|
|
260
|
+
"Accept": "application/vnd.github+json",
|
|
261
|
+
},
|
|
262
|
+
timeout=30,
|
|
263
|
+
)
|
|
264
|
+
if resp.status_code == 201:
|
|
265
|
+
pr_url = resp.json().get("html_url", "")
|
|
266
|
+
logger.info("Dep PR opened: %s", pr_url)
|
|
267
|
+
return pr_url
|
|
268
|
+
else:
|
|
269
|
+
logger.error("Failed to open dep PR (%s): %s", resp.status_code, resp.text[:300])
|
|
270
|
+
return ""
|
|
271
|
+
|
|
272
|
+
|
|
160
273
|
def remote_fix_exists(repo: RepoConfig, fingerprint: str, cfg: SentinelConfig) -> bool:
|
|
161
274
|
"""Return True if any Sentinel instance already pushed a fix branch for this fingerprint."""
|
|
162
275
|
pattern = f"refs/heads/*/fix-{fingerprint[:8]}"
|
package/python/sentinel/main.py
CHANGED
|
@@ -29,7 +29,7 @@ from .log_parser import parse_all, scan_all_for_markers, ErrorEvent
|
|
|
29
29
|
from .issue_watcher import scan_issues, mark_done, IssueEvent
|
|
30
30
|
from .repo_router import route
|
|
31
31
|
from .reporter import build_and_send, send_fix_notification, send_failure_notification, send_confirmed_notification, send_regression_notification, send_startup_notification, send_upgrade_notification
|
|
32
|
-
from .notify import notify_fix_blocked, notify_fix_applied, notify_missing_tool, notify_tool_installing
|
|
32
|
+
from .notify import notify_fix_blocked, notify_fix_applied, notify_missing_tool, notify_tool_installing, notify_cascade_started, notify_cascade_result
|
|
33
33
|
from .health_checker import evaluate_repos
|
|
34
34
|
from .state_store import StateStore
|
|
35
35
|
|
|
@@ -147,6 +147,28 @@ def _auto_install_if_safe(
|
|
|
147
147
|
|
|
148
148
|
# ── Fix pipeline ──────────────────────────────────────────────────────────────
|
|
149
149
|
|
|
150
|
+
|
|
151
|
+
def _run_cascade(repo, sentinel, cfg_loader):
|
|
152
|
+
"""After a jenkins_release succeeds, cascade the version bump to all dependent repos."""
|
|
153
|
+
from .dependency_manager import get_artifact_id, get_release_version, plan_cascade, execute_cascade
|
|
154
|
+
artifact_id = get_artifact_id(repo.local_path)
|
|
155
|
+
new_version = get_release_version(repo.local_path)
|
|
156
|
+
if not artifact_id or not new_version:
|
|
157
|
+
logger.warning("Cascade skipped for %s — could not read artifact/version from pom.xml", repo.repo_name)
|
|
158
|
+
return
|
|
159
|
+
cascade_plan = plan_cascade(repo.repo_name, cfg_loader.repos)
|
|
160
|
+
target_repo_names = [d["repo"] for d in cascade_plan.get("dependents", [])]
|
|
161
|
+
if not target_repo_names:
|
|
162
|
+
logger.info("Cascade: no dependents found for %s", artifact_id)
|
|
163
|
+
return
|
|
164
|
+
logger.info("Cascade: updating %s in %d repo(s): %s", artifact_id, len(target_repo_names), target_repo_names)
|
|
165
|
+
notify_cascade_started(sentinel, artifact_id, new_version, target_repo_names)
|
|
166
|
+
results = execute_cascade(repo.repo_name, new_version, artifact_id, cfg_loader.repos, sentinel)
|
|
167
|
+
notify_cascade_result(sentinel, artifact_id, new_version, results)
|
|
168
|
+
ok = sum(1 for r in results if r.success)
|
|
169
|
+
logger.info("Cascade complete: %d/%d repos updated for %s=%s", ok, len(results), artifact_id, new_version)
|
|
170
|
+
|
|
171
|
+
|
|
150
172
|
async def _handle_error(event: ErrorEvent, cfg_loader: ConfigLoader, store: StateStore):
|
|
151
173
|
sentinel = cfg_loader.sentinel
|
|
152
174
|
|
|
@@ -248,7 +270,9 @@ async def _handle_error(event: ErrorEvent, cfg_loader: ConfigLoader, store: Stat
|
|
|
248
270
|
})
|
|
249
271
|
|
|
250
272
|
if repo.auto_publish:
|
|
251
|
-
cicd_trigger(repo, store, event.fingerprint)
|
|
273
|
+
ok = cicd_trigger(repo, store, event.fingerprint)
|
|
274
|
+
if ok and repo.cicd_type.lower() in ("jenkins_release", "jenkins-release"):
|
|
275
|
+
_run_cascade(repo, sentinel, cfg_loader)
|
|
252
276
|
|
|
253
277
|
|
|
254
278
|
# ── Poll cycle ────────────────────────────────────────────────────────────────
|
|
@@ -344,7 +368,9 @@ async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: Stat
|
|
|
344
368
|
mark_done(event.issue_file)
|
|
345
369
|
|
|
346
370
|
if repo.auto_publish:
|
|
347
|
-
cicd_trigger(repo, store, event.fingerprint)
|
|
371
|
+
ok = cicd_trigger(repo, store, event.fingerprint)
|
|
372
|
+
if ok and repo.cicd_type.lower() in ("jenkins_release", "jenkins-release"):
|
|
373
|
+
_run_cascade(repo, sentinel, cfg_loader)
|
|
348
374
|
|
|
349
375
|
except MissingToolError as e:
|
|
350
376
|
logger.warning("Missing tool for %s: %s", event.source, e)
|
|
@@ -390,7 +416,9 @@ async def _handle_issue(event: IssueEvent, cfg_loader: ConfigLoader, store: Stat
|
|
|
390
416
|
submitter_user_id=submitter_uid)
|
|
391
417
|
mark_done(event.issue_file)
|
|
392
418
|
if repo.auto_publish:
|
|
393
|
-
cicd_trigger(repo, store, event.fingerprint)
|
|
419
|
+
ok = cicd_trigger(repo, store, event.fingerprint)
|
|
420
|
+
if ok and repo.cicd_type.lower() in ("jenkins_release", "jenkins-release"):
|
|
421
|
+
_run_cascade(repo, sentinel, cfg_loader)
|
|
394
422
|
|
|
395
423
|
except Exception:
|
|
396
424
|
logger.exception("Unexpected error processing issue %s — archiving to prevent retry loop", event.source)
|