agent-control-plane 0.4.9 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/README.md +109 -13
  2. package/npm/bin/agent-control-plane.js +1 -1
  3. package/package.json +39 -33
  4. package/tools/bin/debug-session.sh +106 -0
  5. package/tools/bin/flow-config-lib.sh +13 -3508
  6. package/tools/bin/flow-execution-lib.sh +243 -0
  7. package/tools/bin/flow-forge-lib.sh +1770 -0
  8. package/tools/bin/flow-profile-lib.sh +335 -0
  9. package/tools/bin/flow-provider-lib.sh +981 -0
  10. package/tools/bin/flow-runtime-doctor-linux.sh +136 -0
  11. package/tools/bin/flow-runtime-doctor.sh +5 -1
  12. package/tools/bin/flow-session-lib.sh +317 -0
  13. package/tools/bin/install-project-systemd.sh +255 -0
  14. package/tools/bin/project-runtimectl.sh +45 -0
  15. package/tools/bin/project-systemd-bootstrap.sh +74 -0
  16. package/tools/bin/uninstall-project-systemd.sh +87 -0
  17. package/tools/dashboard/app.js +238 -8
  18. package/tools/dashboard/issue_queue_state.py +101 -0
  19. package/tools/dashboard/requirements.txt +3 -0
  20. package/tools/dashboard/server.py +250 -30
  21. package/tools/dashboard/styles.css +526 -455
  22. package/tools/bin/agent-cleanup-worktree +0 -247
  23. package/tools/bin/agent-github-update-labels +0 -105
  24. package/tools/bin/agent-init-worktree +0 -216
  25. package/tools/bin/agent-project-archive-run +0 -52
  26. package/tools/bin/agent-project-capture-worker +0 -46
  27. package/tools/bin/agent-project-catch-up-issue-pr-links +0 -118
  28. package/tools/bin/agent-project-catch-up-merged-prs +0 -195
  29. package/tools/bin/agent-project-catch-up-scheduled-issue-retries +0 -123
  30. package/tools/bin/agent-project-cleanup-session +0 -513
  31. package/tools/bin/agent-project-detached-launch +0 -127
  32. package/tools/bin/agent-project-heartbeat-loop +0 -1029
  33. package/tools/bin/agent-project-open-issue-worktree +0 -89
  34. package/tools/bin/agent-project-open-pr-worktree +0 -80
  35. package/tools/bin/agent-project-publish-issue-pr +0 -468
  36. package/tools/bin/agent-project-reconcile-issue-session +0 -1409
  37. package/tools/bin/agent-project-reconcile-pr-session +0 -1288
  38. package/tools/bin/agent-project-retry-state +0 -158
  39. package/tools/bin/agent-project-run-claude-session +0 -805
  40. package/tools/bin/agent-project-run-codex-resilient +0 -963
  41. package/tools/bin/agent-project-run-codex-session +0 -435
  42. package/tools/bin/agent-project-run-kilo-session +0 -369
  43. package/tools/bin/agent-project-run-ollama-session +0 -658
  44. package/tools/bin/agent-project-run-openclaw-session +0 -1309
  45. package/tools/bin/agent-project-run-opencode-session +0 -377
  46. package/tools/bin/agent-project-run-pi-session +0 -479
  47. package/tools/bin/agent-project-sync-anchor-repo +0 -139
  48. package/tools/bin/agent-project-sync-source-repo-main +0 -163
  49. package/tools/bin/agent-project-worker-status +0 -188
  50. package/tools/bin/branch-verification-guard.sh +0 -364
  51. package/tools/bin/capture-worker.sh +0 -18
  52. package/tools/bin/cleanup-worktree.sh +0 -52
  53. package/tools/bin/codex-quota +0 -31
  54. package/tools/bin/create-follow-up-issue.sh +0 -114
  55. package/tools/bin/dashboard-launchd-bootstrap.sh +0 -50
  56. package/tools/bin/issue-publish-localization-guard.sh +0 -142
  57. package/tools/bin/issue-publish-scope-guard.sh +0 -242
  58. package/tools/bin/issue-requires-local-workspace-install.sh +0 -31
  59. package/tools/bin/issue-resource-class.sh +0 -12
  60. package/tools/bin/kick-scheduler.sh +0 -75
  61. package/tools/bin/label-follow-up-issues.sh +0 -14
  62. package/tools/bin/new-pr-worktree.sh +0 -50
  63. package/tools/bin/new-worktree.sh +0 -49
  64. package/tools/bin/pr-risk.sh +0 -12
  65. package/tools/bin/prepare-worktree.sh +0 -142
  66. package/tools/bin/provider-cooldown-state.sh +0 -204
  67. package/tools/bin/publish-issue-worker.sh +0 -31
  68. package/tools/bin/reconcile-bootstrap-lib.sh +0 -113
  69. package/tools/bin/reconcile-issue-worker.sh +0 -34
  70. package/tools/bin/reconcile-pr-worker.sh +0 -34
  71. package/tools/bin/record-verification.sh +0 -71
  72. package/tools/bin/render-flow-config.sh +0 -98
  73. package/tools/bin/resident-issue-controller-lib.sh +0 -448
  74. package/tools/bin/retry-state.sh +0 -31
  75. package/tools/bin/reuse-issue-worktree.sh +0 -121
  76. package/tools/bin/run-codex-bypass.sh +0 -3
  77. package/tools/bin/run-codex-safe.sh +0 -3
  78. package/tools/bin/run-codex-task.sh +0 -280
  79. package/tools/bin/serve-dashboard.sh +0 -5
  80. package/tools/bin/start-issue-worker.sh +0 -943
  81. package/tools/bin/start-pr-fix-worker.sh +0 -528
  82. package/tools/bin/start-pr-merge-repair-worker.sh +0 -8
  83. package/tools/bin/start-pr-review-worker.sh +0 -261
  84. package/tools/bin/start-resident-issue-loop.sh +0 -499
  85. package/tools/bin/update-github-labels.sh +0 -14
  86. package/tools/bin/worker-status.sh +0 -19
  87. package/tools/bin/workflow-catalog.sh +0 -77
@@ -1,364 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- usage() {
5
- cat <<'EOF'
6
- Usage:
7
- branch-verification-guard.sh --worktree <path> --base-ref <git-ref> --run-dir <path>
8
-
9
- Fail fast when a branch update is about to be pushed without sufficient local
10
- verification evidence for the touched surface.
11
- EOF
12
- }
13
-
14
- worktree=""
15
- base_ref=""
16
- run_dir=""
17
-
18
- while [[ $# -gt 0 ]]; do
19
- case "$1" in
20
- --worktree) worktree="${2:-}"; shift 2 ;;
21
- --base-ref) base_ref="${2:-}"; shift 2 ;;
22
- --run-dir) run_dir="${2:-}"; shift 2 ;;
23
- --help|-h) usage; exit 0 ;;
24
- *)
25
- echo "Unknown argument: $1" >&2
26
- usage >&2
27
- exit 1
28
- ;;
29
- esac
30
- done
31
-
32
- if [[ -z "$worktree" || -z "$base_ref" || -z "$run_dir" ]]; then
33
- usage >&2
34
- exit 1
35
- fi
36
-
37
- if [[ ! -d "$worktree" ]]; then
38
- echo "missing worktree: $worktree" >&2
39
- exit 1
40
- fi
41
-
42
- changed_files="$(
43
- {
44
- git -C "$worktree" diff --name-only --diff-filter=ACMR "${base_ref}...HEAD"
45
- git -C "$worktree" diff --name-only --diff-filter=ACMR
46
- git -C "$worktree" diff --cached --name-only --diff-filter=ACMR
47
- git -C "$worktree" ls-files --others --exclude-standard 2>/dev/null || true
48
- } | awk '
49
- NF == 0 { next }
50
- !seen[$0]++ { print $0 }
51
- '
52
- )"
53
-
54
- verification_file="${run_dir}/verification.jsonl"
55
-
56
- CHANGED_FILES="$changed_files" VERIFICATION_FILE="$verification_file" node <<'EOF'
57
- const fs = require('fs');
58
- const path = require('path');
59
-
60
- const files = String(process.env.CHANGED_FILES || '')
61
- .split('\n')
62
- .map((file) => file.trim())
63
- .filter(Boolean);
64
- const normalizePath = (file) => String(file || '').replace(/\\/g, '/').toLowerCase();
65
- const stripCodeExtension = (file) => normalizePath(file).replace(/\.[cm]?[jt]sx?$/i, '');
66
- const stripTestSuffix = (file) => stripCodeExtension(file).replace(/\.(spec|test)$/i, '');
67
- const lastPathSegments = (file, count = 2) => {
68
- const parts = normalizePath(file).split('/').filter(Boolean);
69
- return parts.slice(-count).join('/');
70
- };
71
- const unique = (values) => [...new Set(values.filter(Boolean))];
72
-
73
- const verificationFile = String(process.env.VERIFICATION_FILE || '');
74
- const isDoc = (file) =>
75
- /^openspec\//.test(file) ||
76
- /^docs\//.test(file) ||
77
- /^scripts\/README\.md$/.test(file) ||
78
- /^AGENTS\.md$/.test(file) ||
79
- /^openspec\/AGENT_RULES\.md$/.test(file) ||
80
- /\.md$/i.test(file);
81
-
82
- const isTest = (file) =>
83
- /(?:^|\/)__tests__\//.test(file) ||
84
- /(?:^|\/)e2e\//.test(file) ||
85
- /\.(?:spec|test)\.[cm]?[jt]sx?$/.test(file);
86
-
87
- const isLocaleResource = (file) =>
88
- /^packages\/i18n\/src\/resources\/[^/]+\.json$/.test(file);
89
- const isAgentGeneratedArtifact = (file) =>
90
- /^\.agent-session\.env$/i.test(file) ||
91
- /^(?:\.openclaw-artifacts|\.openclaw)(?:\/|$)/i.test(file) ||
92
- /^(?:SOUL|TOOLS|IDENTITY|USER|HEARTBEAT|BOOTSTRAP)\.md$/i.test(file);
93
- const isDependencyLockfile = (file) =>
94
- /(?:^|\/)(?:pnpm-lock\.yaml|package-lock\.json|yarn\.lock|bun\.lockb|npm-shrinkwrap\.json)$/i.test(file);
95
- const isDependencyManifest = (file) =>
96
- /(?:^|\/)package\.json$/i.test(file) ||
97
- /(?:^|\/)pnpm-workspace\.yaml$/i.test(file) ||
98
- /(?:^|\/)\.npmrc$/i.test(file) ||
99
- /(?:^|\/)\.yarnrc(?:\.yml)?$/i.test(file) ||
100
- /(?:^|\/)bunfig\.toml$/i.test(file);
101
-
102
- const productNonTestFiles = files.filter(
103
- (file) => !isDoc(file) && !isTest(file) && !isLocaleResource(file),
104
- );
105
- const generatedArtifacts = unique(files.filter(isAgentGeneratedArtifact));
106
- const dependencyLockfiles = unique(files.filter(isDependencyLockfile));
107
- const dependencyInputsChanged = files.some(isDependencyManifest);
108
- const apiTouched = productNonTestFiles.some((file) => /^apps\/api\//.test(file));
109
- const webTouched = productNonTestFiles.some((file) => /^apps\/web\//.test(file));
110
- const mobileTouched = productNonTestFiles.some((file) => /^apps\/mobile\//.test(file));
111
- const apiProductFiles = productNonTestFiles.filter((file) => /^apps\/api\//.test(file));
112
- const packageNames = [
113
- ...new Set(
114
- productNonTestFiles
115
- .filter((file) => /^packages\//.test(file))
116
- .map((file) => file.split('/')[1])
117
- .filter(Boolean),
118
- ),
119
- ];
120
- const changedTestFiles = files.filter(isTest);
121
- const localeTouched = files.some(isLocaleResource);
122
-
123
- if (productNonTestFiles.length === 0 && !localeTouched && changedTestFiles.length === 0) {
124
- process.stdout.write('VERIFICATION_GUARD_STATUS=ok\n');
125
- process.stdout.write('VERIFICATION_REASON=docs-or-spec-only\n');
126
- process.exit(0);
127
- }
128
-
129
- let entries = [];
130
- if (verificationFile && fs.existsSync(verificationFile)) {
131
- const raw = fs.readFileSync(verificationFile, 'utf8');
132
- entries = raw
133
- .split('\n')
134
- .map((line) => line.trim())
135
- .filter(Boolean)
136
- .flatMap((line) => {
137
- try {
138
- return [JSON.parse(line)];
139
- } catch (error) {
140
- return [];
141
- }
142
- });
143
- }
144
-
145
- const passedCommands = entries
146
- .filter((entry) => entry && entry.status === 'pass' && typeof entry.command === 'string')
147
- .map((entry) => entry.command.trim())
148
- .filter(Boolean);
149
- const passedLower = passedCommands.map((command) => command.toLowerCase());
150
-
151
- const escapeRegex = (value) => String(value || '').replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
152
- const hasCommand = (...patterns) =>
153
- passedLower.some((command) => patterns.some((pattern) => pattern.test(command)));
154
- const hasScopedCommand = (scopePatterns, ...actionPatterns) =>
155
- passedLower.some(
156
- (command) =>
157
- scopePatterns.some((pattern) => pattern.test(command)) &&
158
- actionPatterns.some((pattern) => pattern.test(command)),
159
- );
160
- const workspaceScopePatterns = (workspace) => {
161
- const name = escapeRegex(workspace);
162
- return [
163
- new RegExp(`--filter(?:=|\\s+)(?:@[^/\\s]+/)?${name}\\b`),
164
- new RegExp(`(?:\\bapps/${name}\\b|(?:--dir|-C)\\s+apps/${name}\\b)`),
165
- ];
166
- };
167
- const packageScopePatterns = (pkg) => {
168
- const name = escapeRegex(pkg);
169
- return [
170
- new RegExp(`--filter(?:=|\\s+)(?:@[^/\\s]+/)?${name}\\b`),
171
- new RegExp(`@[^/\\s]+/${name}\\b`),
172
- new RegExp(`(?:\\bpackages/${name}\\b|(?:--dir|-C)\\s+packages/${name}\\b)`),
173
- ];
174
- };
175
- const apiScopePattern = workspaceScopePatterns('api');
176
- const webScopePattern = workspaceScopePatterns('web');
177
- const mobileScopePattern = workspaceScopePatterns('mobile');
178
- const targetedAnchorsForFile = (file) =>
179
- unique([
180
- stripCodeExtension(lastPathSegments(file, 2)),
181
- stripCodeExtension(path.basename(file)),
182
- path.basename(stripTestSuffix(file)),
183
- ]);
184
- const hasScopedRunnerCoverage = (file, command) => {
185
- if (/(?:^|\/)e2e\//.test(file)) {
186
- return /\bplaywright\b/.test(command);
187
- }
188
- if (/^apps\/mobile\//.test(file)) {
189
- return /\b(?:detox|maestro)\b/.test(command);
190
- }
191
- return false;
192
- };
193
- const changedTestCoverage = changedTestFiles.map((file) => {
194
- const anchors = targetedAnchorsForFile(file);
195
- const covered = passedLower.some(
196
- (command) =>
197
- anchors.some((anchor) => anchor && command.includes(anchor)) ||
198
- hasScopedRunnerCoverage(file, command),
199
- );
200
- return { file, anchors, covered };
201
- });
202
- const missingChangedTestFiles = changedTestCoverage.filter(({ covered }) => !covered);
203
- const apiChangedTests = changedTestCoverage.filter(({ file }) => /^apps\/api\//.test(file));
204
-
205
- const rootTypecheck = hasCommand(/\bpnpm (?:run )?typecheck\b/, /\bturbo\b.*\btypecheck\b/);
206
- const rootBuild = hasCommand(/\bpnpm (?:run )?build\b/, /\bturbo\b.*\bbuild\b/);
207
- const rootLint = hasCommand(/\bpnpm (?:run )?lint\b/, /\bturbo\b.*\blint\b/);
208
- const rootTest = hasCommand(/\bpnpm (?:run )?test\b/, /\bturbo\b.*\btest\b/);
209
- const scopedApiTypecheck =
210
- hasScopedCommand(apiScopePattern, /\btypecheck\b/, /\btsc --noemit\b/, /\btsc --noemit\b/);
211
- const scopedApiConfidence =
212
- hasScopedCommand(apiScopePattern, /\blint\b/, /\bbuild\b/, /\btest\b/, /\bjest\b/, /\bvitest\b/);
213
- const apiNarrowSliceTargetedCoverage =
214
- apiProductFiles.length > 0 &&
215
- apiProductFiles.length <= 2 &&
216
- apiProductFiles.every((file) => /(?:^|\/)(?:services?|utils?|helpers?|policies?)\/|(?:\.service|\.util|\.helper|\.policy)\.[cm]?[jt]s$/.test(file)) &&
217
- apiChangedTests.length > 0 &&
218
- apiChangedTests.every(({ covered }) => covered) &&
219
- scopedApiConfidence;
220
-
221
- const reasons = [];
222
- if (passedCommands.length === 0) {
223
- reasons.push('missing verification journal or no successful verification commands were recorded');
224
- }
225
-
226
- if (generatedArtifacts.length > 0) {
227
- reasons.push('generated agent/session artifacts were included in the branch diff');
228
- }
229
-
230
- if (dependencyLockfiles.length > 0 && !dependencyInputsChanged) {
231
- reasons.push('lockfile changes were introduced without dependency manifest changes');
232
- }
233
-
234
- if (localeTouched) {
235
- if (
236
- !hasCommand(
237
- /(?:@[^/\s]+\/i18n\b|--filter(?:=|\s+)(?:@[^/\s]+\/i18n|i18n)\b).*?\bvalidate\b/,
238
- /\bi18n-validation\b/,
239
- /\bi18n-gate\b/,
240
- /\b(?:i18n|locale|translation|translations)\b.*\bvalidate\b/,
241
- )
242
- ) {
243
- reasons.push('missing i18n validate command for locale resource changes');
244
- }
245
- if (
246
- !hasCommand(
247
- /(?:@[^/\s]+\/i18n\b|--filter(?:=|\s+)(?:@[^/\s]+\/i18n|i18n)\b).*?\b(?:scan-hardcoded|i18n:scan)\b/,
248
- /\bi18n-checks\b/,
249
- /\bi18n-gate\b/,
250
- /\b(?:i18n|locale|translation|translations)\b.*\b(?:scan-hardcoded|i18n:scan|scan)\b/,
251
- )
252
- ) {
253
- reasons.push('missing i18n scan-hardcoded command for locale resource changes');
254
- }
255
- }
256
-
257
- if (apiTouched) {
258
- if (!(scopedApiTypecheck || rootTypecheck || apiNarrowSliceTargetedCoverage)) {
259
- reasons.push('missing API typecheck or repo typecheck for API changes');
260
- }
261
- if (!(scopedApiConfidence || rootBuild || rootLint || rootTest)) {
262
- reasons.push('missing API confidence verification (lint, build, or test) for API changes');
263
- }
264
- }
265
-
266
- if (webTouched) {
267
- if (!(hasScopedCommand(webScopePattern, /\blint\b/, /\btypecheck\b/, /\bbuild\b/, /\btest\b/, /\bjest\b/, /\bvitest\b/) || rootTypecheck || rootBuild || rootLint || rootTest)) {
268
- reasons.push('missing Web verification command for web changes');
269
- }
270
- }
271
-
272
- if (mobileTouched) {
273
- if (!(hasScopedCommand(mobileScopePattern, /\blint\b/, /\btypecheck\b/, /\bbuild\b/, /\btest\b/, /\bjest\b/, /\bvitest\b/, /\bdetox\b/, /\bmaestro\b/) || rootTypecheck || rootBuild || rootLint || rootTest || hasCommand(/\bdetox\b/, /\bmaestro\b/))) {
274
- reasons.push('missing Mobile verification command for mobile changes');
275
- }
276
- }
277
-
278
- if (!apiTouched && !webTouched && !mobileTouched && packageNames.length > 0) {
279
- for (const pkg of packageNames) {
280
- if (
281
- !(hasScopedCommand(
282
- packageScopePatterns(pkg),
283
- /\blint\b/,
284
- /\btypecheck\b/,
285
- /\bbuild\b/,
286
- /\btest\b/,
287
- /\bjest\b/,
288
- /\bvitest\b/,
289
- ) || rootTypecheck || rootBuild || rootLint || rootTest)
290
- ) {
291
- reasons.push(`missing shared package verification for packages/${pkg}`);
292
- }
293
- }
294
- }
295
-
296
- if (missingChangedTestFiles.length > 0) {
297
- reasons.push('changed test files were not covered by a targeted test command');
298
- }
299
-
300
- if (reasons.length === 0) {
301
- process.stdout.write('VERIFICATION_GUARD_STATUS=ok\n');
302
- process.stdout.write(`VERIFICATION_COMMAND_COUNT=${passedCommands.length}\n`);
303
- process.exit(0);
304
- }
305
-
306
- const lines = [
307
- 'Verification guard blocked branch publication.',
308
- '',
309
- 'Why it was blocked:',
310
- ...reasons.map((reason) => `- ${reason}`),
311
- ];
312
-
313
- if (productNonTestFiles.length > 0) {
314
- lines.push('', 'Changed product files:');
315
- for (const file of productNonTestFiles.slice(0, 15)) {
316
- lines.push(`- ${file}`);
317
- }
318
- }
319
-
320
- if (generatedArtifacts.length > 0) {
321
- lines.push('', 'Generated artifacts that must be removed before publish:');
322
- for (const file of generatedArtifacts.slice(0, 15)) {
323
- lines.push(`- ${file}`);
324
- }
325
- }
326
-
327
- if (dependencyLockfiles.length > 0 && !dependencyInputsChanged) {
328
- lines.push('', 'Lockfiles changed without a matching dependency manifest update:');
329
- for (const file of dependencyLockfiles.slice(0, 15)) {
330
- lines.push(`- ${file}`);
331
- }
332
- }
333
-
334
- if (missingChangedTestFiles.length > 0) {
335
- lines.push('', 'Changed test files still missing explicit coverage:');
336
- for (const { file, anchors } of missingChangedTestFiles.slice(0, 15)) {
337
- const acceptedAnchors = anchors.map((anchor) => `\`${anchor}\``);
338
- if (/(?:^|\/)e2e\//.test(file)) {
339
- acceptedAnchors.push('scoped `playwright` command');
340
- } else if (/^apps\/mobile\//.test(file)) {
341
- acceptedAnchors.push('scoped `detox` or `maestro` command');
342
- }
343
- lines.push(`- ${file} | accepted anchors: ${acceptedAnchors.join(', ')}`);
344
- }
345
- }
346
-
347
- if (passedCommands.length > 0) {
348
- lines.push('', 'Recorded verification commands:');
349
- for (const command of passedCommands.slice(0, 20)) {
350
- lines.push(`- ${command}`);
351
- }
352
- } else {
353
- lines.push('', `Verification journal file: ${verificationFile || '(missing)'}`);
354
- }
355
-
356
- lines.push(
357
- '',
358
- 'Required next step:',
359
- '- rerun the narrowest relevant local verification, record each successful command into verification.jsonl, then publish again',
360
- );
361
-
362
- process.stderr.write(`${lines.join('\n')}\n`);
363
- process.exit(43);
364
- EOF
@@ -1,18 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5
- # shellcheck source=/dev/null
6
- source "${SCRIPT_DIR}/flow-config-lib.sh"
7
-
8
- SESSION="${1:?usage: capture-worker.sh SESSION [LINES]}"
9
- LINES="${2:-80}"
10
- CONFIG_YAML="$(resolve_flow_config_yaml "${BASH_SOURCE[0]}")"
11
- RUNS_ROOT="$(flow_resolve_runs_root "${CONFIG_YAML}")"
12
- FLOW_SKILL_DIR="$(resolve_flow_skill_dir "${BASH_SOURCE[0]}")"
13
- FLOW_TOOLS_DIR="${FLOW_SKILL_DIR}/tools/bin"
14
-
15
- exec bash "${FLOW_TOOLS_DIR}/agent-project-capture-worker" \
16
- --runs-root "$RUNS_ROOT" \
17
- --session "$SESSION" \
18
- --lines "$LINES"
@@ -1,52 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5
- # shellcheck source=/dev/null
6
- source "${SCRIPT_DIR}/flow-config-lib.sh"
7
-
8
- FLOW_SKILL_DIR="$(resolve_flow_skill_dir "${BASH_SOURCE[0]}")"
9
- FLOW_TOOLS_DIR="${FLOW_SKILL_DIR}/tools/bin"
10
- CONFIG_YAML="$(resolve_flow_config_yaml "${BASH_SOURCE[0]}")"
11
- AGENT_REPO_ROOT="$(flow_resolve_agent_repo_root "${CONFIG_YAML}")"
12
- AGENT_ROOT="$(flow_resolve_agent_root "${CONFIG_YAML}")"
13
- RUNS_ROOT="$(flow_resolve_runs_root "${CONFIG_YAML}")"
14
- HISTORY_ROOT="$(flow_resolve_history_root "${CONFIG_YAML}")"
15
- WORKTREE_ROOT="$(flow_resolve_worktree_root "${CONFIG_YAML}")"
16
- RETAINED_REPO_ROOT="$(flow_resolve_retained_repo_root "${CONFIG_YAML}")"
17
- VSCODE_WORKSPACE_FILE="$(flow_resolve_vscode_workspace_file "${CONFIG_YAML}")"
18
- ISSUE_SESSION_PREFIX="$(flow_resolve_issue_session_prefix "${CONFIG_YAML}")"
19
- PR_SESSION_PREFIX="$(flow_resolve_pr_session_prefix "${CONFIG_YAML}")"
20
- WORKTREE="${1-}"
21
- SESSION="${2:-}"
22
- MODE="generic"
23
- ARGS=(
24
- --repo-root "$AGENT_REPO_ROOT"
25
- --runs-root "$RUNS_ROOT"
26
- --history-root "$HISTORY_ROOT"
27
- --worktree "${WORKTREE:-}"
28
- )
29
-
30
- case "$SESSION" in
31
- "${ISSUE_SESSION_PREFIX}"*) MODE="issue" ;;
32
- "${PR_SESSION_PREFIX}"*) MODE="pr" ;;
33
- esac
34
-
35
- ARGS+=(--mode "$MODE")
36
- if [[ -n "$SESSION" ]]; then
37
- ARGS+=(--session "$SESSION")
38
- fi
39
-
40
- cleanup_exit=0
41
- AGENT_PROJECT_WORKTREE_ROOT="$WORKTREE_ROOT" \
42
- F_LOSNING_WORKTREE_ROOT="$WORKTREE_ROOT" \
43
- bash "${FLOW_TOOLS_DIR}/agent-project-cleanup-session" "${ARGS[@]}" >/dev/null || cleanup_exit=$?
44
-
45
- F_LOSNING_AGENT_REPO_ROOT="$AGENT_REPO_ROOT" \
46
- F_LOSNING_RETAINED_REPO_ROOT="$RETAINED_REPO_ROOT" \
47
- F_LOSNING_VSCODE_WORKSPACE_FILE="$VSCODE_WORKSPACE_FILE" \
48
- "${FLOW_TOOLS_DIR}/sync-vscode-workspace.sh" >/dev/null 2>&1 || true
49
-
50
- if [[ "$cleanup_exit" -ne 0 ]]; then
51
- exit "$cleanup_exit"
52
- fi
@@ -1,31 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5
- QUOTA_ENTRY="${SCRIPT_DIR}/../vendor/codex-quota/codex-quota.js"
6
- NODE_BIN="${ACP_CODEX_QUOTA_NODE_BIN:-${F_LOSNING_CODEX_QUOTA_NODE_BIN:-}}"
7
-
8
- if [[ -z "${NODE_BIN}" ]]; then
9
- NODE_BIN="$(command -v node 2>/dev/null || true)"
10
- fi
11
-
12
- if [[ -z "${NODE_BIN}" ]]; then
13
- export NVM_DIR="${NVM_DIR:-$HOME/.nvm}"
14
- if [[ -s "${NVM_DIR}/nvm.sh" ]]; then
15
- # shellcheck source=/dev/null
16
- . "${NVM_DIR}/nvm.sh"
17
- NODE_BIN="$(command -v node 2>/dev/null || true)"
18
- fi
19
- fi
20
-
21
- if [[ -z "${NODE_BIN}" ]]; then
22
- echo "node is required to run the bundled codex-quota tool" >&2
23
- exit 1
24
- fi
25
-
26
- if [[ ! -f "${QUOTA_ENTRY}" ]]; then
27
- echo "bundled codex-quota entrypoint not found: ${QUOTA_ENTRY}" >&2
28
- exit 1
29
- fi
30
-
31
- exec "${NODE_BIN}" "${QUOTA_ENTRY}" "$@"
@@ -1,114 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5
- # shellcheck source=/dev/null
6
- source "${SCRIPT_DIR}/flow-config-lib.sh"
7
-
8
- usage() {
9
- cat <<'EOF'
10
- Usage:
11
- create-follow-up-issue.sh --parent ISSUE_ID --title "Title" [--body "text" | --body-file path] [--label LABEL ...]
12
-
13
- Create a focused follow-up issue linked back to the umbrella issue. By default the
14
- new issue is left unlabeled so the scheduler can pick it up normally.
15
- EOF
16
- }
17
-
18
- FLOW_TOOLS_DIR="${SCRIPT_DIR}"
19
- CONFIG_YAML="$(resolve_flow_config_yaml "${BASH_SOURCE[0]}")"
20
- REPO_SLUG="$(flow_resolve_repo_slug "${CONFIG_YAML}")"
21
- UPDATE_LABELS_BIN="${UPDATE_LABELS_BIN:-${FLOW_TOOLS_DIR}/agent-github-update-labels}"
22
-
23
- parent_issue=""
24
- title=""
25
- body=""
26
- body_file=""
27
- labels=()
28
-
29
- while [[ $# -gt 0 ]]; do
30
- case "$1" in
31
- --parent)
32
- parent_issue="${2:-}"
33
- shift 2
34
- ;;
35
- --title)
36
- title="${2:-}"
37
- shift 2
38
- ;;
39
- --body)
40
- body="${2:-}"
41
- shift 2
42
- ;;
43
- --body-file)
44
- body_file="${2:-}"
45
- shift 2
46
- ;;
47
- --label)
48
- labels+=("${2:-}")
49
- shift 2
50
- ;;
51
- --help|-h)
52
- usage
53
- exit 0
54
- ;;
55
- *)
56
- echo "Unknown argument: $1" >&2
57
- usage >&2
58
- exit 1
59
- ;;
60
- esac
61
- done
62
-
63
- if [[ -z "$parent_issue" || -z "$title" ]]; then
64
- usage >&2
65
- exit 1
66
- fi
67
-
68
- if [[ -n "$body" && -n "$body_file" ]]; then
69
- echo "Provide either --body or --body-file, not both." >&2
70
- exit 1
71
- fi
72
-
73
- tmp_body_file="$(mktemp)"
74
- cleanup() {
75
- rm -f "$tmp_body_file"
76
- }
77
- trap cleanup EXIT
78
-
79
- {
80
- printf 'Parent issue: #%s\n\n' "$parent_issue"
81
- if [[ -n "$body_file" ]]; then
82
- cat "$body_file"
83
- elif [[ -n "$body" ]]; then
84
- printf '%s\n' "$body"
85
- else
86
- printf 'Follow-up slice decomposed from umbrella issue #%s.\n' "$parent_issue"
87
- fi
88
- } >"$tmp_body_file"
89
-
90
- issue_url="$(flow_github_issue_create "$REPO_SLUG" "$title" "$tmp_body_file")"
91
- issue_url="$(printf '%s' "$issue_url" | tail -n 1)"
92
- issue_number="$(sed -nE 's#.*/issues/([0-9]+)$#\1#p' <<<"$issue_url" | tail -n 1)"
93
-
94
- if [[ -z "$issue_number" ]]; then
95
- echo "Unable to determine created issue number from gh output: $issue_url" >&2
96
- exit 1
97
- fi
98
-
99
- if [[ ${#labels[@]} -gt 0 ]]; then
100
- update_args=()
101
- for label in "${labels[@]}"; do
102
- [[ -n "$label" ]] || continue
103
- update_args+=(--add "$label")
104
- done
105
- if [[ ${#update_args[@]} -gt 0 ]]; then
106
- bash "${UPDATE_LABELS_BIN}" \
107
- --repo-slug "$REPO_SLUG" \
108
- --number "$issue_number" \
109
- "${update_args[@]}" >/dev/null || true
110
- fi
111
- fi
112
-
113
- printf 'ISSUE_NUMBER=%s\n' "$issue_number"
114
- printf 'ISSUE_URL=%s\n' "$issue_url"
@@ -1,50 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5
- FLOW_SKILL_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
6
- HOME_DIR="${ACP_DASHBOARD_HOME_DIR:-${HOME:-}}"
7
- SOURCE_HOME="${ACP_DASHBOARD_SOURCE_HOME:-}"
8
- RUNTIME_HOME="${ACP_DASHBOARD_RUNTIME_HOME:-${HOME_DIR}/.agent-runtime/runtime-home}"
9
- PROFILE_REGISTRY_ROOT="${ACP_DASHBOARD_PROFILE_REGISTRY_ROOT:-${ACP_PROFILE_REGISTRY_ROOT:-${HOME_DIR}/.agent-runtime/control-plane/profiles}}"
10
- HOST="${ACP_DASHBOARD_HOST:-127.0.0.1}"
11
- PORT="${ACP_DASHBOARD_PORT:-8765}"
12
- BASE_PATH="${ACP_DASHBOARD_PATH:-/opt/homebrew/bin:/usr/bin:/bin:/usr/sbin:/sbin}"
13
- SYNC_SCRIPT="${ACP_DASHBOARD_SYNC_SCRIPT:-${FLOW_SKILL_DIR}/tools/bin/sync-shared-agent-home.sh}"
14
- ENSURE_SYNC_SCRIPT="${ACP_DASHBOARD_ENSURE_SYNC_SCRIPT:-${FLOW_SKILL_DIR}/tools/bin/ensure-runtime-sync.sh}"
15
- RUNTIME_SERVE_SCRIPT="${ACP_DASHBOARD_RUNTIME_SERVE_SCRIPT:-${RUNTIME_HOME}/skills/openclaw/agent-control-plane/tools/bin/serve-dashboard.sh}"
16
-
17
- if [[ -z "${HOME_DIR}" ]]; then
18
- echo "dashboard launchd bootstrap requires HOME or ACP_DASHBOARD_HOME_DIR" >&2
19
- exit 64
20
- fi
21
-
22
- export HOME="${HOME_DIR}"
23
- export PATH="${BASE_PATH}"
24
- export ACP_PROFILE_REGISTRY_ROOT="${PROFILE_REGISTRY_ROOT}"
25
- export PYTHONDONTWRITEBYTECODE=1
26
-
27
- if [[ ! -x "${ENSURE_SYNC_SCRIPT}" && ! -x "${SYNC_SCRIPT}" ]]; then
28
- echo "dashboard launchd bootstrap missing sync helper: ${ENSURE_SYNC_SCRIPT}" >&2
29
- exit 65
30
- fi
31
-
32
- if [[ -x "${ENSURE_SYNC_SCRIPT}" ]]; then
33
- ensure_args=(--runtime-home "${RUNTIME_HOME}" --quiet)
34
- if [[ -n "${SOURCE_HOME}" ]]; then
35
- ensure_args=(--source-home "${SOURCE_HOME}" "${ensure_args[@]}")
36
- fi
37
- bash "${ENSURE_SYNC_SCRIPT}" "${ensure_args[@]}"
38
- else
39
- if [[ -z "${SOURCE_HOME}" ]]; then
40
- SOURCE_HOME="${FLOW_SKILL_DIR}"
41
- fi
42
- bash "${SYNC_SCRIPT}" "${SOURCE_HOME}" "${RUNTIME_HOME}" >/dev/null
43
- fi
44
-
45
- if [[ ! -x "${RUNTIME_SERVE_SCRIPT}" ]]; then
46
- echo "dashboard launchd bootstrap missing runtime serve script: ${RUNTIME_SERVE_SCRIPT}" >&2
47
- exit 66
48
- fi
49
-
50
- exec bash "${RUNTIME_SERVE_SCRIPT}" --host "${HOST}" --port "${PORT}"