@tekyzinc/gsd-t 3.13.16 → 3.16.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +44 -0
- package/README.md +1 -0
- package/bin/gsd-t-benchmark-orchestrator.js +437 -0
- package/bin/gsd-t-capture-lint.cjs +276 -0
- package/bin/gsd-t-completion-check.cjs +106 -0
- package/bin/gsd-t-orchestrator-config.cjs +64 -0
- package/bin/gsd-t-orchestrator-queue.cjs +180 -0
- package/bin/gsd-t-orchestrator-recover.cjs +231 -0
- package/bin/gsd-t-orchestrator-worker.cjs +219 -0
- package/bin/gsd-t-orchestrator.js +534 -0
- package/bin/gsd-t-stream-feed-client.cjs +151 -0
- package/bin/gsd-t-task-brief-compactor.cjs +89 -0
- package/bin/gsd-t-task-brief-template.cjs +96 -0
- package/bin/gsd-t-task-brief.js +249 -0
- package/bin/gsd-t-token-backfill.cjs +366 -0
- package/bin/gsd-t-token-capture.cjs +306 -0
- package/bin/gsd-t-token-dashboard.cjs +318 -0
- package/bin/gsd-t-token-regenerate-log.cjs +129 -0
- package/bin/gsd-t-transcript-tee.cjs +246 -0
- package/bin/gsd-t-unattended-heartbeat.cjs +188 -0
- package/bin/gsd-t-unattended-platform.cjs +191 -27
- package/bin/gsd-t-unattended-safety.cjs +8 -1
- package/bin/gsd-t-unattended.cjs +192 -31
- package/bin/gsd-t.js +329 -2
- package/bin/supervisor-pid-fingerprint.cjs +126 -0
- package/commands/gsd-t-debug.md +63 -51
- package/commands/gsd-t-design-decompose.md +2 -7
- package/commands/gsd-t-doc-ripple.md +20 -11
- package/commands/gsd-t-execute.md +82 -50
- package/commands/gsd-t-integrate.md +43 -16
- package/commands/gsd-t-plan.md +20 -7
- package/commands/gsd-t-prd.md +19 -12
- package/commands/gsd-t-quick.md +64 -29
- package/commands/gsd-t-resume.md +51 -4
- package/commands/gsd-t-unattended.md +19 -20
- package/commands/gsd-t-verify.md +48 -32
- package/commands/gsd-t-visualize.md +19 -17
- package/commands/gsd-t-wave.md +29 -27
- package/docs/architecture.md +16 -0
- package/docs/m40-benchmark-report.md +35 -0
- package/docs/requirements.md +20 -0
- package/package.json +1 -1
- package/scripts/gsd-t-dashboard-server.js +291 -4
- package/scripts/gsd-t-dashboard.html +31 -1
- package/scripts/gsd-t-design-review-server.js +3 -1
- package/scripts/gsd-t-stream-feed-server.js +428 -0
- package/scripts/gsd-t-stream-feed.html +1168 -0
- package/scripts/gsd-t-token-aggregator.js +373 -0
- package/scripts/gsd-t-transcript.html +422 -0
- package/scripts/hooks/gsd-t-in-session-probe.js +62 -0
- package/scripts/hooks/pre-commit-capture-lint +26 -0
- package/templates/CLAUDE-global.md +69 -0
- package/scripts/gsd-t-agent-dashboard-server.js +0 -424
- package/scripts/gsd-t-agent-dashboard.html +0 -1043
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
/**
|
|
3
|
+
* GSD-T Capture Lint (M41 D5)
|
|
4
|
+
*
|
|
5
|
+
* Scans files for bare subagent spawn patterns that bypass the
|
|
6
|
+
* `bin/gsd-t-token-capture.cjs` wrapper. Flags:
|
|
7
|
+
* - `Task(` without captureSpawn/recordSpawnRow within ±20 lines
|
|
8
|
+
* - `claude -p` executable lines without wrapper
|
|
9
|
+
* - `spawn('claude', ...)` / `spawn("claude", ...)` without wrapper
|
|
10
|
+
*
|
|
11
|
+
* Whitelisted:
|
|
12
|
+
* - The wrapper module itself (bin/gsd-t-token-capture.cjs)
|
|
13
|
+
* - The linter (bin/gsd-t-capture-lint.cjs) — meta-self
|
|
14
|
+
* - Anything under test/
|
|
15
|
+
* - Comment-only lines (//, #, <!--)
|
|
16
|
+
* - Markdown prose lines (only matches inside ``` fences are executable)
|
|
17
|
+
* - Any line carrying the literal `GSD-T-CAPTURE-LINT: skip` on the same
|
|
18
|
+
* or adjacent line
|
|
19
|
+
*
|
|
20
|
+
* Zero external deps. `.cjs` for ESM/CJS compat.
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
const fs = require('fs');
|
|
24
|
+
const path = require('path');
|
|
25
|
+
const { execSync } = require('child_process');
|
|
26
|
+
|
|
27
|
+
// Patterns match tool-invocation syntax, not prose. `Task(s)` in a markdown
|
|
28
|
+
// table and `claude -p` inside a help-text string are false positives and
|
|
29
|
+
// are excluded by the shape matchers below.
|
|
30
|
+
const SPAWN_PATTERNS = [
|
|
31
|
+
// Task({...}) or Task(\n — the Task subagent tool invocation shape.
|
|
32
|
+
// Excludes Task(s), Task(X), Task(foo) plain-identifier calls which
|
|
33
|
+
// are almost always docs/markdown.
|
|
34
|
+
{ name: 'Task(', re: /\bTask\(\s*(?:\{|$)/ },
|
|
35
|
+
// spawn('claude', ...) / spawn("claude", ...)
|
|
36
|
+
{ name: "spawn('claude'", re: /\bspawn\(\s*['"]claude['"]\s*,/ },
|
|
37
|
+
// `claude -p` as an actual shell command — not inside string literals
|
|
38
|
+
// that describe it. Require it to appear outside quotes/backticks. This
|
|
39
|
+
// is heuristic; the skip marker and comment-line guard catch the rest.
|
|
40
|
+
{ name: 'claude -p', re: /(?:^|\s|[;&|`$(])claude\s+-p\b/ },
|
|
41
|
+
];
|
|
42
|
+
|
|
43
|
+
const WRAPPER_PATTERNS = /\bcaptureSpawn\s*\(|\brecordSpawnRow\s*\(/;
|
|
44
|
+
const SKIP_MARKER = 'GSD-T-CAPTURE-LINT: skip';
|
|
45
|
+
const CONTEXT_RADIUS = 20;
|
|
46
|
+
|
|
47
|
+
function _isWhitelistedPath(relPath) {
|
|
48
|
+
const norm = relPath.replace(/\\/g, '/');
|
|
49
|
+
if (norm.endsWith('bin/gsd-t-token-capture.cjs')) return true;
|
|
50
|
+
if (norm.endsWith('bin/gsd-t-capture-lint.cjs')) return true;
|
|
51
|
+
// commands/gsd-t-help.md is a pure-prose help reference — every mention
|
|
52
|
+
// of `claude -p` or `Task(...)` in it is documentation, not a live spawn.
|
|
53
|
+
if (norm.endsWith('commands/gsd-t-help.md')) return true;
|
|
54
|
+
if (norm.startsWith('test/') || norm.includes('/test/')) return true;
|
|
55
|
+
return false;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function _isCommentOnlyLine(line) {
|
|
59
|
+
const t = line.trim();
|
|
60
|
+
if (!t) return true;
|
|
61
|
+
if (t.startsWith('//')) return true;
|
|
62
|
+
if (t.startsWith('#')) return true;
|
|
63
|
+
if (t.startsWith('<!--')) return true;
|
|
64
|
+
if (t.startsWith('*')) return true;
|
|
65
|
+
return false;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Check whether `match` starts inside a string literal (single, double, or
|
|
70
|
+
* backtick-quoted) on `line`. Conservative: only flags balanced-quote cases.
|
|
71
|
+
* This catches help-text strings like `log("... claude -p ...")` without
|
|
72
|
+
* needing a full JS parser.
|
|
73
|
+
*/
|
|
74
|
+
function _matchInsideStringLiteral(line, match) {
|
|
75
|
+
const idx = line.indexOf(match);
|
|
76
|
+
if (idx < 0) return false;
|
|
77
|
+
const prefix = line.slice(0, idx);
|
|
78
|
+
const countUnescaped = (ch) => {
|
|
79
|
+
let n = 0;
|
|
80
|
+
for (let i = 0; i < prefix.length; i++) {
|
|
81
|
+
if (prefix[i] === ch && prefix[i - 1] !== '\\') n++;
|
|
82
|
+
}
|
|
83
|
+
return n;
|
|
84
|
+
};
|
|
85
|
+
// Odd count before the match → inside an open string
|
|
86
|
+
if (countUnescaped('"') % 2 === 1) return true;
|
|
87
|
+
if (countUnescaped("'") % 2 === 1) return true;
|
|
88
|
+
if (countUnescaped('`') % 2 === 1) return true;
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* For markdown files, match patterns only inside ``` fenced code blocks.
|
|
94
|
+
* For non-markdown files, all lines are "executable" (fence tracking is
|
|
95
|
+
* a no-op).
|
|
96
|
+
*/
|
|
97
|
+
function _buildFenceMap(lines, isMarkdown) {
|
|
98
|
+
const inFence = new Array(lines.length).fill(!isMarkdown);
|
|
99
|
+
if (!isMarkdown) return inFence;
|
|
100
|
+
let open = false;
|
|
101
|
+
for (let i = 0; i < lines.length; i++) {
|
|
102
|
+
const t = lines[i].trim();
|
|
103
|
+
if (t.startsWith('```')) {
|
|
104
|
+
open = !open;
|
|
105
|
+
inFence[i] = false;
|
|
106
|
+
continue;
|
|
107
|
+
}
|
|
108
|
+
inFence[i] = open;
|
|
109
|
+
}
|
|
110
|
+
return inFence;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
function _hasWrapperNearby(lines, idx) {
|
|
114
|
+
const lo = Math.max(0, idx - CONTEXT_RADIUS);
|
|
115
|
+
const hi = Math.min(lines.length - 1, idx + CONTEXT_RADIUS);
|
|
116
|
+
for (let j = lo; j <= hi; j++) {
|
|
117
|
+
if (WRAPPER_PATTERNS.test(lines[j])) return true;
|
|
118
|
+
}
|
|
119
|
+
return false;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
function _hasSkipMarkerNearby(lines, idx) {
|
|
123
|
+
const lo = Math.max(0, idx - 1);
|
|
124
|
+
const hi = Math.min(lines.length - 1, idx + 1);
|
|
125
|
+
for (let j = lo; j <= hi; j++) {
|
|
126
|
+
if (lines[j].includes(SKIP_MARKER)) return true;
|
|
127
|
+
}
|
|
128
|
+
return false;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Lint a single file. Returns an array of violation objects.
|
|
133
|
+
* @param {string} absPath
|
|
134
|
+
* @param {string} projectDir
|
|
135
|
+
* @returns {Array<{file, line, pattern, message}>}
|
|
136
|
+
*/
|
|
137
|
+
function lintFile(absPath, projectDir) {
|
|
138
|
+
const relPath = path.relative(projectDir, absPath);
|
|
139
|
+
if (_isWhitelistedPath(relPath)) return [];
|
|
140
|
+
|
|
141
|
+
let src;
|
|
142
|
+
try {
|
|
143
|
+
src = fs.readFileSync(absPath, 'utf8');
|
|
144
|
+
} catch (_) {
|
|
145
|
+
return [];
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const lines = src.split('\n');
|
|
149
|
+
const isMarkdown = absPath.endsWith('.md');
|
|
150
|
+
const executable = _buildFenceMap(lines, isMarkdown);
|
|
151
|
+
|
|
152
|
+
const violations = [];
|
|
153
|
+
for (let i = 0; i < lines.length; i++) {
|
|
154
|
+
if (!executable[i]) continue;
|
|
155
|
+
const line = lines[i];
|
|
156
|
+
if (_isCommentOnlyLine(line)) continue;
|
|
157
|
+
if (_hasSkipMarkerNearby(lines, i)) continue;
|
|
158
|
+
|
|
159
|
+
for (const { name, re } of SPAWN_PATTERNS) {
|
|
160
|
+
const m = line.match(re);
|
|
161
|
+
if (m) {
|
|
162
|
+
// Skip prose mentions inside string literals (help text, log strings).
|
|
163
|
+
// Markdown files already gate via fence map; only apply the quote
|
|
164
|
+
// heuristic to JS/CJS source where help-text strings live.
|
|
165
|
+
if (!isMarkdown && _matchInsideStringLiteral(line, m[0])) break;
|
|
166
|
+
if (!_hasWrapperNearby(lines, i)) {
|
|
167
|
+
violations.push({
|
|
168
|
+
file: relPath,
|
|
169
|
+
line: i + 1,
|
|
170
|
+
pattern: name,
|
|
171
|
+
message: `bare ${name} spawn without captureSpawn/recordSpawnRow wrapper`,
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
break;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return violations;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Lint a list of paths.
|
|
184
|
+
* @param {string[]} paths Absolute or relative to projectDir
|
|
185
|
+
* @param {object} opts
|
|
186
|
+
* @param {string} opts.projectDir
|
|
187
|
+
* @returns {{violations: Array}}
|
|
188
|
+
*/
|
|
189
|
+
function lintFiles(paths, opts) {
|
|
190
|
+
const projectDir = (opts && opts.projectDir) || process.cwd();
|
|
191
|
+
const all = [];
|
|
192
|
+
for (const p of paths) {
|
|
193
|
+
const abs = path.isAbsolute(p) ? p : path.join(projectDir, p);
|
|
194
|
+
if (!fs.existsSync(abs)) continue;
|
|
195
|
+
const st = fs.statSync(abs);
|
|
196
|
+
if (!st.isFile()) continue;
|
|
197
|
+
all.push(...lintFile(abs, projectDir));
|
|
198
|
+
}
|
|
199
|
+
return { violations: all };
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
function _listStaged(projectDir) {
|
|
203
|
+
let out;
|
|
204
|
+
try {
|
|
205
|
+
out = execSync('git diff --name-only --cached', { cwd: projectDir, encoding: 'utf8' });
|
|
206
|
+
} catch (_) {
|
|
207
|
+
return [];
|
|
208
|
+
}
|
|
209
|
+
return out
|
|
210
|
+
.split('\n')
|
|
211
|
+
.map((l) => l.trim())
|
|
212
|
+
.filter(Boolean)
|
|
213
|
+
.filter((p) => /^(commands|bin|scripts)\//.test(p));
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
function _listAll(projectDir) {
|
|
217
|
+
const results = [];
|
|
218
|
+
const dirs = [
|
|
219
|
+
{ dir: 'commands', exts: ['.md'] },
|
|
220
|
+
{ dir: 'bin', exts: ['.js', '.cjs'] },
|
|
221
|
+
{ dir: 'scripts', exts: ['.js', '.cjs', '.html'] },
|
|
222
|
+
];
|
|
223
|
+
for (const { dir, exts } of dirs) {
|
|
224
|
+
const abs = path.join(projectDir, dir);
|
|
225
|
+
if (!fs.existsSync(abs)) continue;
|
|
226
|
+
const entries = fs.readdirSync(abs);
|
|
227
|
+
for (const entry of entries) {
|
|
228
|
+
const p = path.join(abs, entry);
|
|
229
|
+
try {
|
|
230
|
+
const st = fs.statSync(p);
|
|
231
|
+
if (!st.isFile()) continue;
|
|
232
|
+
} catch (_) { continue; }
|
|
233
|
+
if (exts.some((e) => entry.endsWith(e))) {
|
|
234
|
+
results.push(path.join(dir, entry));
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
return results;
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
/**
|
|
242
|
+
* CLI entry point.
|
|
243
|
+
* @param {object} opts
|
|
244
|
+
* @param {string} opts.projectDir
|
|
245
|
+
* @param {'staged'|'all'} opts.mode
|
|
246
|
+
* @returns {{exitCode: number, violations: Array, files: string[]}}
|
|
247
|
+
*/
|
|
248
|
+
function main(opts) {
|
|
249
|
+
const projectDir = opts.projectDir || process.cwd();
|
|
250
|
+
const mode = opts.mode || 'staged';
|
|
251
|
+
let files;
|
|
252
|
+
try {
|
|
253
|
+
files = mode === 'all' ? _listAll(projectDir) : _listStaged(projectDir);
|
|
254
|
+
} catch (e) {
|
|
255
|
+
return { exitCode: 2, violations: [], files: [], error: e.message || String(e) };
|
|
256
|
+
}
|
|
257
|
+
const { violations } = lintFiles(files, { projectDir });
|
|
258
|
+
return {
|
|
259
|
+
exitCode: violations.length === 0 ? 0 : 1,
|
|
260
|
+
violations,
|
|
261
|
+
files,
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
module.exports = {
|
|
266
|
+
lintFile,
|
|
267
|
+
lintFiles,
|
|
268
|
+
main,
|
|
269
|
+
SPAWN_PATTERNS,
|
|
270
|
+
_isWhitelistedPath,
|
|
271
|
+
_isCommentOnlyLine,
|
|
272
|
+
_buildFenceMap,
|
|
273
|
+
_hasWrapperNearby,
|
|
274
|
+
_hasSkipMarkerNearby,
|
|
275
|
+
_matchInsideStringLiteral,
|
|
276
|
+
};
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { execSync } = require('child_process');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
7
|
+
function run(cmd, cwd) {
|
|
8
|
+
return execSync(cmd, { cwd, encoding: 'utf8', stdio: ['ignore', 'pipe', 'pipe'] });
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function tryRun(cmd, cwd) {
|
|
12
|
+
try {
|
|
13
|
+
return { ok: true, out: run(cmd, cwd), code: 0 };
|
|
14
|
+
} catch (err) {
|
|
15
|
+
return { ok: false, out: (err.stdout || '') + (err.stderr || ''), code: err.status == null ? -1 : err.status };
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function globToRegex(pattern) {
|
|
20
|
+
const escaped = pattern.replace(/[.+^${}()|[\]\\]/g, '\\$&');
|
|
21
|
+
const re = escaped.replace(/\*\*/g, '::DBLSTAR::').replace(/\*/g, '[^/]*').replace(/::DBLSTAR::/g, '.*').replace(/\?/g, '[^/]');
|
|
22
|
+
return new RegExp('^' + re + '$');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function matchesAny(filePath, patterns) {
|
|
26
|
+
if (!patterns || patterns.length === 0) return true;
|
|
27
|
+
return patterns.some(p => globToRegex(p).test(filePath));
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function assertCompletion(opts) {
|
|
31
|
+
const {
|
|
32
|
+
taskId,
|
|
33
|
+
projectDir,
|
|
34
|
+
expectedBranch,
|
|
35
|
+
taskStart,
|
|
36
|
+
skipTest = false,
|
|
37
|
+
ownedPatterns = []
|
|
38
|
+
} = opts || {};
|
|
39
|
+
|
|
40
|
+
if (!taskId || !projectDir || !expectedBranch || !taskStart) {
|
|
41
|
+
throw new Error('assertCompletion requires taskId, projectDir, expectedBranch, taskStart');
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const missing = [];
|
|
45
|
+
const details = {};
|
|
46
|
+
|
|
47
|
+
const branchRes = tryRun('git branch --show-current', projectDir);
|
|
48
|
+
const currentBranch = branchRes.ok ? branchRes.out.trim() : '';
|
|
49
|
+
details.currentBranch = currentBranch;
|
|
50
|
+
|
|
51
|
+
const sinceArg = JSON.stringify(taskStart);
|
|
52
|
+
const logCmd = `git log ${JSON.stringify(expectedBranch)} --since=${sinceArg} --pretty=format:%H%x00%s`;
|
|
53
|
+
const logRes = tryRun(logCmd, projectDir);
|
|
54
|
+
const commits = [];
|
|
55
|
+
const taskIdRe = new RegExp('^' + taskId.replace(/[.+^${}()|[\]\\]/g, '\\$&') + '(\\b|:)');
|
|
56
|
+
if (logRes.ok) {
|
|
57
|
+
for (const line of logRes.out.split('\n')) {
|
|
58
|
+
if (!line) continue;
|
|
59
|
+
const [sha, subject] = line.split('\x00');
|
|
60
|
+
if (subject && taskIdRe.test(subject)) commits.push({ sha, subject });
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
details.commits = commits;
|
|
64
|
+
if (commits.length === 0) missing.push('no_commit_on_branch');
|
|
65
|
+
|
|
66
|
+
const progressPath = path.join(projectDir, '.gsd-t', 'progress.md');
|
|
67
|
+
let progressEntry = null;
|
|
68
|
+
if (fs.existsSync(progressPath)) {
|
|
69
|
+
const body = fs.readFileSync(progressPath, 'utf8');
|
|
70
|
+
const lines = body.split('\n');
|
|
71
|
+
const entryRe = new RegExp('^- \\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}.*' + taskId.replace(/[.+^${}()|[\]\\]/g, '\\$&'));
|
|
72
|
+
for (const line of lines) {
|
|
73
|
+
if (entryRe.test(line)) {
|
|
74
|
+
progressEntry = line;
|
|
75
|
+
break;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
details.progressEntry = progressEntry;
|
|
80
|
+
if (!progressEntry) missing.push('no_progress_entry');
|
|
81
|
+
|
|
82
|
+
if (!skipTest) {
|
|
83
|
+
const testRes = tryRun('npm test --silent', projectDir);
|
|
84
|
+
details.testExitCode = testRes.code;
|
|
85
|
+
details.testOutput = (testRes.out || '').slice(-2000);
|
|
86
|
+
if (!testRes.ok) missing.push('tests_failed');
|
|
87
|
+
} else {
|
|
88
|
+
details.testSkipped = true;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const statusRes = tryRun('git status --porcelain', projectDir);
|
|
92
|
+
const uncommitted = [];
|
|
93
|
+
if (statusRes.ok) {
|
|
94
|
+
for (const line of statusRes.out.split('\n')) {
|
|
95
|
+
if (!line.trim()) continue;
|
|
96
|
+
const filePath = line.slice(3).trim();
|
|
97
|
+
if (matchesAny(filePath, ownedPatterns)) uncommitted.push(filePath);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
details.uncommitted = uncommitted;
|
|
101
|
+
if (uncommitted.length > 0) missing.push('uncommitted_owned_changes');
|
|
102
|
+
|
|
103
|
+
return { ok: missing.length === 0, missing, details };
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
module.exports = { assertCompletion };
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
|
|
6
|
+
const DEFAULTS = Object.freeze({
|
|
7
|
+
maxParallel: 3,
|
|
8
|
+
workerTimeoutMs: 270000,
|
|
9
|
+
retryOnFail: true,
|
|
10
|
+
haltOnSecondFail: true
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
const MAX_PARALLEL_CEILING = 15;
|
|
14
|
+
|
|
15
|
+
function loadConfigFile(projectDir) {
|
|
16
|
+
const p = path.join(projectDir, '.gsd-t', 'orchestrator.config.json');
|
|
17
|
+
if (!fs.existsSync(p)) return {};
|
|
18
|
+
try {
|
|
19
|
+
return JSON.parse(fs.readFileSync(p, 'utf8'));
|
|
20
|
+
} catch (err) {
|
|
21
|
+
throw new Error(`orchestrator.config.json parse error: ${err.message}`);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function parseIntStrict(v, name) {
|
|
26
|
+
if (typeof v === 'number' && Number.isInteger(v)) return v;
|
|
27
|
+
if (typeof v === 'string' && /^\d+$/.test(v)) return parseInt(v, 10);
|
|
28
|
+
throw new Error(`${name} must be a non-negative integer, got ${JSON.stringify(v)}`);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function loadConfig(opts) {
|
|
32
|
+
const { projectDir, cliFlags = {}, env = process.env } = opts || {};
|
|
33
|
+
if (!projectDir) throw new Error('loadConfig requires projectDir');
|
|
34
|
+
|
|
35
|
+
const fileCfg = loadConfigFile(projectDir);
|
|
36
|
+
const merged = { ...DEFAULTS, ...fileCfg };
|
|
37
|
+
|
|
38
|
+
if (cliFlags.maxParallel != null) merged.maxParallel = parseIntStrict(cliFlags.maxParallel, '--max-parallel');
|
|
39
|
+
if (cliFlags.workerTimeoutMs != null) merged.workerTimeoutMs = parseIntStrict(cliFlags.workerTimeoutMs, '--worker-timeout');
|
|
40
|
+
if (cliFlags.retryOnFail != null) merged.retryOnFail = !!cliFlags.retryOnFail;
|
|
41
|
+
if (cliFlags.haltOnSecondFail != null) merged.haltOnSecondFail = !!cliFlags.haltOnSecondFail;
|
|
42
|
+
|
|
43
|
+
if (env.GSD_T_MAX_PARALLEL != null && env.GSD_T_MAX_PARALLEL !== '') {
|
|
44
|
+
merged.maxParallel = parseIntStrict(env.GSD_T_MAX_PARALLEL, 'GSD_T_MAX_PARALLEL');
|
|
45
|
+
}
|
|
46
|
+
if (env.GSD_T_WORKER_TIMEOUT_MS != null && env.GSD_T_WORKER_TIMEOUT_MS !== '') {
|
|
47
|
+
merged.workerTimeoutMs = parseIntStrict(env.GSD_T_WORKER_TIMEOUT_MS, 'GSD_T_WORKER_TIMEOUT_MS');
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if (merged.maxParallel < 1) {
|
|
51
|
+
throw new Error(`maxParallel must be >= 1, got ${merged.maxParallel}`);
|
|
52
|
+
}
|
|
53
|
+
if (merged.maxParallel > MAX_PARALLEL_CEILING) {
|
|
54
|
+
throw new Error(`maxParallel ${merged.maxParallel} exceeds Team Mode §15 ceiling (${MAX_PARALLEL_CEILING})`);
|
|
55
|
+
}
|
|
56
|
+
if (merged.workerTimeoutMs < 1000) {
|
|
57
|
+
throw new Error(`workerTimeoutMs must be >= 1000, got ${merged.workerTimeoutMs}`);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
merged.projectDir = projectDir;
|
|
61
|
+
return merged;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
module.exports = { loadConfig, DEFAULTS, MAX_PARALLEL_CEILING };
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
|
|
6
|
+
const DEFAULT_WAVE = 0;
|
|
7
|
+
|
|
8
|
+
function listDomainTaskFiles(projectDir) {
|
|
9
|
+
const domainsDir = path.join(projectDir, '.gsd-t', 'domains');
|
|
10
|
+
if (!fs.existsSync(domainsDir)) return [];
|
|
11
|
+
const out = [];
|
|
12
|
+
for (const entry of fs.readdirSync(domainsDir, { withFileTypes: true })) {
|
|
13
|
+
if (!entry.isDirectory()) continue;
|
|
14
|
+
const p = path.join(domainsDir, entry.name, 'tasks.md');
|
|
15
|
+
if (fs.existsSync(p)) out.push({ domain: entry.name, file: p });
|
|
16
|
+
}
|
|
17
|
+
return out.sort((a, b) => a.domain.localeCompare(b.domain));
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function parseFilesField(text) {
|
|
21
|
+
const patterns = [];
|
|
22
|
+
const seen = new Set();
|
|
23
|
+
const re = /`([^`]+)`/g;
|
|
24
|
+
let m;
|
|
25
|
+
while ((m = re.exec(text)) !== null) {
|
|
26
|
+
const raw = m[1].trim();
|
|
27
|
+
if (!raw) continue;
|
|
28
|
+
const pattern = raw.replace(/\/$/, '/**');
|
|
29
|
+
if (!seen.has(pattern)) { seen.add(pattern); patterns.push(pattern); }
|
|
30
|
+
}
|
|
31
|
+
if (patterns.length === 0) {
|
|
32
|
+
for (const part of text.split(/[,;]/)) {
|
|
33
|
+
const raw = part.replace(/\([^)]*\)/g, '').trim();
|
|
34
|
+
if (raw && !/^(none|n\/a)$/i.test(raw) && !seen.has(raw)) {
|
|
35
|
+
seen.add(raw); patterns.push(raw);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return patterns;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function parseTasksFile(markdown, domain) {
|
|
43
|
+
const tasks = [];
|
|
44
|
+
const lines = markdown.split(/\r?\n/);
|
|
45
|
+
|
|
46
|
+
let current = null;
|
|
47
|
+
|
|
48
|
+
const commitCurrent = () => {
|
|
49
|
+
if (current) {
|
|
50
|
+
if (current.wave == null) current.wave = DEFAULT_WAVE;
|
|
51
|
+
if (!current.dependencies) current.dependencies = [];
|
|
52
|
+
if (!current.ownedPatterns) current.ownedPatterns = [];
|
|
53
|
+
tasks.push(current);
|
|
54
|
+
}
|
|
55
|
+
current = null;
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
for (let i = 0; i < lines.length; i++) {
|
|
59
|
+
const line = lines[i];
|
|
60
|
+
const taskHeader = line.match(/^###\s+Task\s+(\d+)\s*:\s*(.+?)\s*$/i);
|
|
61
|
+
if (taskHeader) {
|
|
62
|
+
commitCurrent();
|
|
63
|
+
current = {
|
|
64
|
+
domain,
|
|
65
|
+
id: `${domain}:T${taskHeader[1]}`,
|
|
66
|
+
taskNum: parseInt(taskHeader[1], 10),
|
|
67
|
+
title: taskHeader[2].trim(),
|
|
68
|
+
wave: null,
|
|
69
|
+
dependencies: [],
|
|
70
|
+
ownedPatterns: []
|
|
71
|
+
};
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
if (!current) continue;
|
|
75
|
+
|
|
76
|
+
const waveMatch = line.match(/^\s*-\s*\*\*Wave\*\*\s*:\s*(\d+)\s*$/i);
|
|
77
|
+
if (waveMatch) {
|
|
78
|
+
current.wave = parseInt(waveMatch[1], 10);
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const depMatch = line.match(/^\s*-\s*\*\*Dependencies\*\*\s*:\s*(.+?)\s*$/i);
|
|
83
|
+
if (depMatch) {
|
|
84
|
+
current.dependencies = parseDependencies(depMatch[1], domain);
|
|
85
|
+
continue;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const filesMatch = line.match(/^\s*-\s*\*\*Files\*\*\s*:\s*(.+?)\s*$/i);
|
|
89
|
+
if (filesMatch) {
|
|
90
|
+
current.ownedPatterns = parseFilesField(filesMatch[1]);
|
|
91
|
+
continue;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
commitCurrent();
|
|
95
|
+
return tasks;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function parseDependencies(text, domain) {
|
|
99
|
+
const trimmed = text.trim();
|
|
100
|
+
if (!trimmed || /^none$/i.test(trimmed) || /^n\/a$/i.test(trimmed)) return [];
|
|
101
|
+
|
|
102
|
+
const deps = [];
|
|
103
|
+
const seen = new Set();
|
|
104
|
+
|
|
105
|
+
const push = (id) => {
|
|
106
|
+
if (!seen.has(id)) { seen.add(id); deps.push(id); }
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
const sameDomainTaskRe = /\bTask\s+(\d+)\b/gi;
|
|
110
|
+
const crossDomainRe = /\b([a-z][a-z0-9-]+)\s+Task\s+(\d+)\b/gi;
|
|
111
|
+
|
|
112
|
+
let m;
|
|
113
|
+
while ((m = crossDomainRe.exec(trimmed)) !== null) {
|
|
114
|
+
const prefix = m[1].toLowerCase();
|
|
115
|
+
if (prefix === 'requires' || prefix === 'blocked') continue;
|
|
116
|
+
push(`${prefix}:T${m[2]}`);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
const crossIds = new Set(deps);
|
|
120
|
+
sameDomainTaskRe.lastIndex = 0;
|
|
121
|
+
while ((m = sameDomainTaskRe.exec(trimmed)) !== null) {
|
|
122
|
+
const id = `${domain}:T${m[1]}`;
|
|
123
|
+
const crossMatch = [...crossIds].some((d) => d.endsWith(`:T${m[1]}`) && d !== id);
|
|
124
|
+
if (!crossMatch) push(id);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return deps;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
function readAllTasks(projectDir) {
|
|
131
|
+
const all = [];
|
|
132
|
+
for (const { domain, file } of listDomainTaskFiles(projectDir)) {
|
|
133
|
+
const md = fs.readFileSync(file, 'utf8');
|
|
134
|
+
all.push(...parseTasksFile(md, domain));
|
|
135
|
+
}
|
|
136
|
+
return all;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function groupByWave(tasks) {
|
|
140
|
+
const groups = new Map();
|
|
141
|
+
for (const t of tasks) {
|
|
142
|
+
const w = t.wave == null ? DEFAULT_WAVE : t.wave;
|
|
143
|
+
if (!groups.has(w)) groups.set(w, []);
|
|
144
|
+
groups.get(w).push(t);
|
|
145
|
+
}
|
|
146
|
+
return new Map([...groups.entries()].sort((a, b) => a[0] - b[0]));
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function validateNoForwardDeps(tasks) {
|
|
150
|
+
const byId = new Map(tasks.map((t) => [t.id, t]));
|
|
151
|
+
const errors = [];
|
|
152
|
+
for (const t of tasks) {
|
|
153
|
+
for (const depId of t.dependencies) {
|
|
154
|
+
const dep = byId.get(depId);
|
|
155
|
+
if (!dep) continue;
|
|
156
|
+
if (dep.wave > t.wave) {
|
|
157
|
+
errors.push(
|
|
158
|
+
`Task ${t.id} (wave ${t.wave}) depends on ${depId} (wave ${dep.wave}) — forward cross-wave dependency not allowed`
|
|
159
|
+
);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
if (errors.length) {
|
|
164
|
+
const err = new Error(`Wave dependency validation failed:\n ${errors.join('\n ')}`);
|
|
165
|
+
err.errors = errors;
|
|
166
|
+
throw err;
|
|
167
|
+
}
|
|
168
|
+
return true;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
module.exports = {
|
|
172
|
+
DEFAULT_WAVE,
|
|
173
|
+
listDomainTaskFiles,
|
|
174
|
+
parseTasksFile,
|
|
175
|
+
parseDependencies,
|
|
176
|
+
parseFilesField,
|
|
177
|
+
readAllTasks,
|
|
178
|
+
groupByWave,
|
|
179
|
+
validateNoForwardDeps
|
|
180
|
+
};
|