@tekyzinc/gsd-t 3.15.10 → 3.16.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/gsd-t-orchestrator-worker.cjs +35 -3
- package/bin/gsd-t-token-capture.cjs +24 -3
- package/bin/gsd-t-token-regenerate-log.cjs +129 -0
- package/bin/gsd-t-transcript-tee.cjs +246 -0
- package/bin/gsd-t-unattended-heartbeat.cjs +188 -0
- package/bin/gsd-t-unattended-platform.cjs +191 -27
- package/bin/gsd-t-unattended-safety.cjs +8 -1
- package/bin/gsd-t-unattended.cjs +218 -38
- package/bin/gsd-t.js +15 -1
- package/bin/supervisor-pid-fingerprint.cjs +126 -0
- package/commands/gsd-t-resume.md +18 -4
- package/docs/architecture.md +16 -0
- package/package.json +1 -1
- package/scripts/gsd-t-dashboard-server.js +291 -4
- package/scripts/gsd-t-dashboard.html +31 -1
- package/scripts/gsd-t-transcript.html +422 -0
- package/scripts/hooks/gsd-t-in-session-probe.js +62 -0
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
const { spawn } = require('child_process');
|
|
4
4
|
const path = require('path');
|
|
5
5
|
const { assertCompletion } = require('./gsd-t-completion-check.cjs');
|
|
6
|
+
const transcriptTee = require('./gsd-t-transcript-tee.cjs');
|
|
6
7
|
|
|
7
8
|
const DEFAULT_CLAUDE_BIN = 'claude';
|
|
8
9
|
|
|
@@ -71,19 +72,44 @@ function runWorker(opts) {
|
|
|
71
72
|
const bin = pickClaudeBin(env);
|
|
72
73
|
const args = buildArgs(task);
|
|
73
74
|
|
|
75
|
+
// M42 D1 — allocate a spawn-id and open the transcript registry entry
|
|
76
|
+
const parentSpawnId = (opts && opts.parentSpawnId) || env.GSD_T_SPAWN_ID || null;
|
|
77
|
+
const spawnId = (opts && opts.spawnId) || transcriptTee.allocateSpawnId({ parentId: parentSpawnId });
|
|
78
|
+
let transcriptOpened = false;
|
|
79
|
+
try {
|
|
80
|
+
transcriptTee.openTranscript({
|
|
81
|
+
spawnId,
|
|
82
|
+
projectDir: config.projectDir,
|
|
83
|
+
meta: {
|
|
84
|
+
parentId: parentSpawnId,
|
|
85
|
+
command: 'orchestrator-worker',
|
|
86
|
+
description: `task=${task.id} domain=${task.domain || '-'} wave=${task.wave == null ? '-' : task.wave}`,
|
|
87
|
+
model: (task && task.model) || 'sonnet',
|
|
88
|
+
},
|
|
89
|
+
});
|
|
90
|
+
transcriptOpened = true;
|
|
91
|
+
} catch (_) { /* tee is best-effort */ }
|
|
92
|
+
|
|
74
93
|
emitBoundary(onFrame, task, 'start');
|
|
75
94
|
|
|
76
95
|
return new Promise((resolve) => {
|
|
77
96
|
const child = spawnImpl(bin, args, {
|
|
78
97
|
cwd: config.projectDir,
|
|
79
|
-
env: { ...env, GSD_T_PROJECT_DIR: config.projectDir },
|
|
98
|
+
env: { ...env, GSD_T_PROJECT_DIR: config.projectDir, GSD_T_SPAWN_ID: spawnId },
|
|
80
99
|
stdio: ['pipe', 'pipe', 'pipe']
|
|
81
100
|
});
|
|
82
101
|
|
|
83
102
|
const workerPid = child && typeof child.pid === 'number' ? child.pid : null;
|
|
84
103
|
emitBoundary(onFrame, task, 'pid', { workerPid });
|
|
104
|
+
if (transcriptOpened && workerPid != null) {
|
|
105
|
+
try {
|
|
106
|
+
const idx = transcriptTee._readIndex(config.projectDir);
|
|
107
|
+
const i = idx.spawns.findIndex((s) => s.spawnId === spawnId);
|
|
108
|
+
if (i >= 0) { idx.spawns[i].workerPid = workerPid; transcriptTee._writeIndex(config.projectDir, idx); }
|
|
109
|
+
} catch (_) {}
|
|
110
|
+
}
|
|
85
111
|
if (typeof opts.onSpawn === 'function') {
|
|
86
|
-
try { opts.onSpawn({ child, pid: workerPid }); } catch (_) {}
|
|
112
|
+
try { opts.onSpawn({ child, pid: workerPid, spawnId }); } catch (_) {}
|
|
87
113
|
}
|
|
88
114
|
|
|
89
115
|
let stdoutBuf = '';
|
|
@@ -92,6 +118,9 @@ function runWorker(opts) {
|
|
|
92
118
|
let killTimer = null;
|
|
93
119
|
|
|
94
120
|
const handleLine = (line) => {
|
|
121
|
+
if (transcriptOpened) {
|
|
122
|
+
try { transcriptTee.appendFrame({ spawnId, projectDir: config.projectDir, frame: line }); } catch (_) {}
|
|
123
|
+
}
|
|
95
124
|
try {
|
|
96
125
|
const frame = JSON.parse(line);
|
|
97
126
|
if (typeof onFrame === 'function') onFrame(frame);
|
|
@@ -171,7 +200,10 @@ function runWorker(opts) {
|
|
|
171
200
|
}
|
|
172
201
|
|
|
173
202
|
emitBoundary(onFrame, task, result.ok ? 'done' : 'failed', { exitCode, durationMs, workerPid });
|
|
174
|
-
|
|
203
|
+
if (transcriptOpened) {
|
|
204
|
+
try { transcriptTee.closeTranscript({ spawnId, projectDir: config.projectDir, status: result.ok ? 'done' : 'failed' }); } catch (_) {}
|
|
205
|
+
}
|
|
206
|
+
resolve({ result, exitCode, durationMs, timedOut, stderr: stderrBuf, workerPid, spawnId });
|
|
175
207
|
});
|
|
176
208
|
|
|
177
209
|
child.stdin.on('error', () => { /* ignore — covered by child exit */ });
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
const fs = require('fs');
|
|
21
21
|
const path = require('path');
|
|
22
22
|
|
|
23
|
-
const SCHEMA_VERSION =
|
|
23
|
+
const SCHEMA_VERSION = 2;
|
|
24
24
|
|
|
25
25
|
const NEW_HEADER = '| Datetime-start | Datetime-end | Command | Step | Model | Duration(s) | Tokens | Notes | Domain | Task | Ctx% |';
|
|
26
26
|
const NEW_SEP = '|---|---|---|---|---|---|---|---|---|---|---|';
|
|
@@ -94,10 +94,10 @@ function _appendJsonlRecord(jsonlPath, record) {
|
|
|
94
94
|
fs.appendFileSync(jsonlPath, JSON.stringify(record) + '\n');
|
|
95
95
|
}
|
|
96
96
|
|
|
97
|
-
function _buildJsonlRecord({ command, step, model, startedAt, endedAt, durationSec, usage, domain, task, notes, ctxPct, milestone, source }) {
|
|
97
|
+
function _buildJsonlRecord({ command, step, model, startedAt, endedAt, durationSec, usage, domain, task, notes, ctxPct, milestone, source, sessionId, turnId, sessionType, toolAttribution, compactionPressure }) {
|
|
98
98
|
const u = usage || {};
|
|
99
99
|
const cost = (typeof u.total_cost_usd === 'number') ? u.total_cost_usd : (typeof u.cost_usd === 'number' ? u.cost_usd : null);
|
|
100
|
-
|
|
100
|
+
const rec = {
|
|
101
101
|
schemaVersion: SCHEMA_VERSION,
|
|
102
102
|
ts: new Date().toISOString(),
|
|
103
103
|
source: source || 'live',
|
|
@@ -119,6 +119,12 @@ function _buildJsonlRecord({ command, step, model, startedAt, endedAt, durationS
|
|
|
119
119
|
notes: notes || null,
|
|
120
120
|
hasUsage: !!usage,
|
|
121
121
|
};
|
|
122
|
+
if (sessionId != null) rec.session_id = String(sessionId);
|
|
123
|
+
if (turnId != null) rec.turn_id = String(turnId);
|
|
124
|
+
if (sessionType != null) rec.sessionType = sessionType;
|
|
125
|
+
if (Array.isArray(toolAttribution) && toolAttribution.length) rec.tool_attribution = toolAttribution;
|
|
126
|
+
if (compactionPressure && typeof compactionPressure === 'object') rec.compaction_pressure = compactionPressure;
|
|
127
|
+
return rec;
|
|
122
128
|
}
|
|
123
129
|
|
|
124
130
|
function _inferMilestone(projectDir) {
|
|
@@ -163,6 +169,11 @@ function _parseStartedAt(s) {
|
|
|
163
169
|
* @param {string|number} [opts.ctxPct]
|
|
164
170
|
* @param {string} [opts.notes]
|
|
165
171
|
* @param {'live'|'backfill'} [opts.source]
|
|
172
|
+
* @param {string} [opts.sessionId] v2 — stable session identifier
|
|
173
|
+
* @param {string|number} [opts.turnId] v2 — per-turn identifier within sessionId
|
|
174
|
+
* @param {'in-session'|'headless'} [opts.sessionType] v2 — channel classifier
|
|
175
|
+
* @param {Array} [opts.toolAttribution] v2 — D2 joiner output; usually omitted by spawn callers
|
|
176
|
+
* @param {object} [opts.compactionPressure] v2 — D5 runway snapshot; usually omitted by spawn callers
|
|
166
177
|
* @returns {{tokenLogPath: string, jsonlPath: string}}
|
|
167
178
|
*/
|
|
168
179
|
function recordSpawnRow(opts) {
|
|
@@ -209,6 +220,11 @@ function recordSpawnRow(opts) {
|
|
|
209
220
|
ctxPct: opts.ctxPct == null ? null : opts.ctxPct,
|
|
210
221
|
milestone,
|
|
211
222
|
source: opts.source || 'live',
|
|
223
|
+
sessionId: opts.sessionId,
|
|
224
|
+
turnId: opts.turnId,
|
|
225
|
+
sessionType: opts.sessionType,
|
|
226
|
+
toolAttribution: opts.toolAttribution,
|
|
227
|
+
compactionPressure: opts.compactionPressure,
|
|
212
228
|
}));
|
|
213
229
|
|
|
214
230
|
return { tokenLogPath, jsonlPath };
|
|
@@ -266,6 +282,11 @@ async function captureSpawn(opts) {
|
|
|
266
282
|
task: opts.task,
|
|
267
283
|
ctxPct,
|
|
268
284
|
notes,
|
|
285
|
+
sessionId: opts.sessionId,
|
|
286
|
+
turnId: opts.turnId,
|
|
287
|
+
sessionType: opts.sessionType,
|
|
288
|
+
toolAttribution: opts.toolAttribution,
|
|
289
|
+
compactionPressure: opts.compactionPressure,
|
|
269
290
|
});
|
|
270
291
|
|
|
271
292
|
if (caught) throw caught;
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
/**
|
|
3
|
+
* GSD-T Token Log Regenerator (M43 D3)
|
|
4
|
+
*
|
|
5
|
+
* Reads `.gsd-t/metrics/token-usage.jsonl` end-to-end and writes
|
|
6
|
+
* `.gsd-t/token-log.md` deterministically. Per metrics-schema-contract v2
|
|
7
|
+
* §Derived Artifact, `token-log.md` is a regenerated view post-v2.
|
|
8
|
+
*
|
|
9
|
+
* Sort (v2 §5): startedAt asc → session_id asc → turn_id asc.
|
|
10
|
+
* Numeric turn_ids sort numerically; mixed/non-numeric falls back to lex.
|
|
11
|
+
*
|
|
12
|
+
* Idempotent and deterministic: running twice produces byte-identical output.
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
const fs = require('fs');
|
|
16
|
+
const path = require('path');
|
|
17
|
+
|
|
18
|
+
const capture = require('./gsd-t-token-capture.cjs');
|
|
19
|
+
const { NEW_HEADER, NEW_SEP, _formatTokensCell } = capture;
|
|
20
|
+
|
|
21
|
+
function _readJsonl(jsonlPath) {
|
|
22
|
+
if (!fs.existsSync(jsonlPath)) return [];
|
|
23
|
+
const text = fs.readFileSync(jsonlPath, 'utf8');
|
|
24
|
+
const rows = [];
|
|
25
|
+
for (const line of text.split('\n')) {
|
|
26
|
+
const s = line.trim();
|
|
27
|
+
if (!s) continue;
|
|
28
|
+
try { rows.push(JSON.parse(s)); }
|
|
29
|
+
catch (_) { /* skip malformed line */ }
|
|
30
|
+
}
|
|
31
|
+
return rows;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function _tokenCellFromRow(row) {
|
|
35
|
+
if (!row.hasUsage) return '—';
|
|
36
|
+
const u = {
|
|
37
|
+
input_tokens: row.inputTokens || 0,
|
|
38
|
+
output_tokens: row.outputTokens || 0,
|
|
39
|
+
cache_read_input_tokens: row.cacheReadInputTokens || 0,
|
|
40
|
+
cache_creation_input_tokens: row.cacheCreationInputTokens || 0,
|
|
41
|
+
total_cost_usd: (typeof row.costUSD === 'number') ? row.costUSD : undefined,
|
|
42
|
+
};
|
|
43
|
+
return _formatTokensCell(u);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function _cmpStart(a, b) {
|
|
47
|
+
const av = a.startedAt || '';
|
|
48
|
+
const bv = b.startedAt || '';
|
|
49
|
+
if (av < bv) return -1;
|
|
50
|
+
if (av > bv) return 1;
|
|
51
|
+
return 0;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function _cmpSession(a, b) {
|
|
55
|
+
const av = a.session_id == null ? '' : String(a.session_id);
|
|
56
|
+
const bv = b.session_id == null ? '' : String(b.session_id);
|
|
57
|
+
if (av < bv) return -1;
|
|
58
|
+
if (av > bv) return 1;
|
|
59
|
+
return 0;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function _cmpTurn(a, b) {
|
|
63
|
+
const av = a.turn_id == null ? '' : String(a.turn_id);
|
|
64
|
+
const bv = b.turn_id == null ? '' : String(b.turn_id);
|
|
65
|
+
const an = Number(av), bn = Number(bv);
|
|
66
|
+
if (av !== '' && bv !== '' && Number.isFinite(an) && Number.isFinite(bn)) {
|
|
67
|
+
return an - bn;
|
|
68
|
+
}
|
|
69
|
+
if (av < bv) return -1;
|
|
70
|
+
if (av > bv) return 1;
|
|
71
|
+
return 0;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function sortRows(rows) {
|
|
75
|
+
return rows.slice().sort((a, b) =>
|
|
76
|
+
_cmpStart(a, b) || _cmpSession(a, b) || _cmpTurn(a, b)
|
|
77
|
+
);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function _durationCell(row) {
|
|
81
|
+
const ms = Number(row.durationMs || 0);
|
|
82
|
+
return `${Math.max(0, Math.round(ms / 1000))}s`;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function _renderRow(row) {
|
|
86
|
+
const tokensCell = _tokenCellFromRow(row);
|
|
87
|
+
const notes = (row.notes == null || row.notes === '') ? '-' : String(row.notes).replace(/\|/g, '\\|');
|
|
88
|
+
const domain = (row.domain == null || row.domain === '') ? '-' : String(row.domain);
|
|
89
|
+
const task = (row.task == null || row.task === '') ? '-' : String(row.task);
|
|
90
|
+
const ctxPct = (row.ctxPct == null) ? 'N/A' : String(row.ctxPct);
|
|
91
|
+
return `| ${row.startedAt || ''} | ${row.endedAt || ''} | ${row.command || ''} | ${row.step || ''} | ${row.model || ''} | ${_durationCell(row)} | ${tokensCell} | ${notes} | ${domain} | ${task} | ${ctxPct} |`;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function renderMarkdown(rows) {
|
|
95
|
+
const sorted = sortRows(rows);
|
|
96
|
+
const lines = ['# GSD-T Token Log', '', NEW_HEADER, NEW_SEP];
|
|
97
|
+
for (const r of sorted) lines.push(_renderRow(r));
|
|
98
|
+
lines.push('');
|
|
99
|
+
return lines.join('\n');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Regenerate token-log.md from token-usage.jsonl.
|
|
104
|
+
* @param {object} [opts]
|
|
105
|
+
* @param {string} [opts.projectDir='.']
|
|
106
|
+
* @param {string} [opts.jsonlPath] override input path
|
|
107
|
+
* @param {string} [opts.tokenLogPath] override output path
|
|
108
|
+
* @returns {{ wrote: string, rowCount: number }}
|
|
109
|
+
*/
|
|
110
|
+
function regenerateLog(opts = {}) {
|
|
111
|
+
const projectDir = opts.projectDir || '.';
|
|
112
|
+
const jsonlPath = opts.jsonlPath || path.join(projectDir, '.gsd-t', 'metrics', 'token-usage.jsonl');
|
|
113
|
+
const tokenLogPath = opts.tokenLogPath || path.join(projectDir, '.gsd-t', 'token-log.md');
|
|
114
|
+
const rows = _readJsonl(jsonlPath);
|
|
115
|
+
const markdown = renderMarkdown(rows);
|
|
116
|
+
const dir = path.dirname(tokenLogPath);
|
|
117
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
118
|
+
fs.writeFileSync(tokenLogPath, markdown);
|
|
119
|
+
return { wrote: tokenLogPath, rowCount: rows.length };
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
module.exports = {
|
|
123
|
+
regenerateLog,
|
|
124
|
+
renderMarkdown,
|
|
125
|
+
sortRows,
|
|
126
|
+
_readJsonl,
|
|
127
|
+
_renderRow,
|
|
128
|
+
_tokenCellFromRow,
|
|
129
|
+
};
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
/**
|
|
3
|
+
* GSD-T Transcript Tee (M42 D1)
|
|
4
|
+
*
|
|
5
|
+
* Captures raw stream-json frames from every unattended spawn to
|
|
6
|
+
* `.gsd-t/transcripts/{spawn-id}.ndjson` + maintains a registry at
|
|
7
|
+
* `.gsd-t/transcripts/.index.json` used by the dashboard sidebar.
|
|
8
|
+
*
|
|
9
|
+
* Zero external deps. Append-only. Does not parse the frames — that's the
|
|
10
|
+
* renderer's job. One frame per line; lines that fail JSON parse are still
|
|
11
|
+
* tee'd so nothing is silently dropped.
|
|
12
|
+
*
|
|
13
|
+
* Contracts:
|
|
14
|
+
* - .gsd-t/contracts/stream-json-sink-contract.md v1.1.0 (frame shape)
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
const fs = require('fs');
|
|
18
|
+
const path = require('path');
|
|
19
|
+
const crypto = require('crypto');
|
|
20
|
+
|
|
21
|
+
const TRANSCRIPTS_DIRNAME = path.join('.gsd-t', 'transcripts');
|
|
22
|
+
const INDEX_FILENAME = '.index.json';
|
|
23
|
+
|
|
24
|
+
function _transcriptsDir(projectDir) {
|
|
25
|
+
return path.join(projectDir || '.', TRANSCRIPTS_DIRNAME);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function _indexPath(projectDir) {
|
|
29
|
+
return path.join(_transcriptsDir(projectDir), INDEX_FILENAME);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function _ensureDir(p) {
|
|
33
|
+
if (!fs.existsSync(p)) fs.mkdirSync(p, { recursive: true });
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Allocate a hierarchical spawn-id. Shape: `{parent-prefix}-{short}` where
|
|
38
|
+
* `short` is an 8-char hex from a random UUID. Root spawns get no prefix.
|
|
39
|
+
*
|
|
40
|
+
* @param {object} [opts]
|
|
41
|
+
* @param {string|null} [opts.parentId]
|
|
42
|
+
* @returns {string}
|
|
43
|
+
*/
|
|
44
|
+
function allocateSpawnId(opts) {
|
|
45
|
+
const parentId = opts && opts.parentId ? String(opts.parentId) : null;
|
|
46
|
+
const short = crypto.randomBytes(4).toString('hex');
|
|
47
|
+
return parentId ? `${parentId}.${short}` : `s-${short}`;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function _readIndex(projectDir) {
|
|
51
|
+
const p = _indexPath(projectDir);
|
|
52
|
+
if (!fs.existsSync(p)) return { spawns: [] };
|
|
53
|
+
try {
|
|
54
|
+
const raw = fs.readFileSync(p, 'utf8');
|
|
55
|
+
const parsed = JSON.parse(raw);
|
|
56
|
+
if (!parsed || typeof parsed !== 'object' || !Array.isArray(parsed.spawns)) return { spawns: [] };
|
|
57
|
+
return parsed;
|
|
58
|
+
} catch (_) {
|
|
59
|
+
return { spawns: [] };
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function _writeIndex(projectDir, idx) {
|
|
64
|
+
const p = _indexPath(projectDir);
|
|
65
|
+
_ensureDir(path.dirname(p));
|
|
66
|
+
const tmp = p + '.tmp';
|
|
67
|
+
fs.writeFileSync(tmp, JSON.stringify(idx, null, 2));
|
|
68
|
+
fs.renameSync(tmp, p);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Register a new transcript + create the ndjson file.
|
|
73
|
+
*
|
|
74
|
+
* @param {object} opts
|
|
75
|
+
* @param {string} opts.spawnId
|
|
76
|
+
* @param {string} [opts.projectDir='.']
|
|
77
|
+
* @param {object} [opts.meta] { parentId?, command?, description?, workerPid?, model? }
|
|
78
|
+
* @returns {{spawnId, transcriptPath, startedAt}}
|
|
79
|
+
*/
|
|
80
|
+
function openTranscript(opts) {
|
|
81
|
+
if (!opts || !opts.spawnId) throw new Error('openTranscript: spawnId required');
|
|
82
|
+
const projectDir = opts.projectDir || '.';
|
|
83
|
+
const meta = opts.meta || {};
|
|
84
|
+
const dir = _transcriptsDir(projectDir);
|
|
85
|
+
_ensureDir(dir);
|
|
86
|
+
|
|
87
|
+
const transcriptPath = path.join(dir, `${opts.spawnId}.ndjson`);
|
|
88
|
+
if (!fs.existsSync(transcriptPath)) {
|
|
89
|
+
fs.writeFileSync(transcriptPath, '');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const startedAt = new Date().toISOString();
|
|
93
|
+
const entry = {
|
|
94
|
+
spawnId: opts.spawnId,
|
|
95
|
+
parentId: meta.parentId || null,
|
|
96
|
+
command: meta.command || null,
|
|
97
|
+
description: meta.description || null,
|
|
98
|
+
model: meta.model || null,
|
|
99
|
+
workerPid: meta.workerPid || null,
|
|
100
|
+
startedAt,
|
|
101
|
+
endedAt: null,
|
|
102
|
+
status: 'running',
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
const idx = _readIndex(projectDir);
|
|
106
|
+
const existing = idx.spawns.findIndex((s) => s.spawnId === opts.spawnId);
|
|
107
|
+
if (existing >= 0) {
|
|
108
|
+
idx.spawns[existing] = { ...idx.spawns[existing], ...entry };
|
|
109
|
+
} else {
|
|
110
|
+
idx.spawns.push(entry);
|
|
111
|
+
}
|
|
112
|
+
_writeIndex(projectDir, idx);
|
|
113
|
+
|
|
114
|
+
return { spawnId: opts.spawnId, transcriptPath, startedAt };
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Append a single frame (already JSON-serializable) to the transcript.
|
|
119
|
+
* `frame` may be a parsed object OR a raw string line — strings are wrapped
|
|
120
|
+
* as `{type:"raw",line}` so the ndjson shape stays uniform.
|
|
121
|
+
*
|
|
122
|
+
* @param {object} opts
|
|
123
|
+
* @param {string} opts.spawnId
|
|
124
|
+
* @param {string} [opts.projectDir='.']
|
|
125
|
+
* @param {object|string} opts.frame
|
|
126
|
+
*/
|
|
127
|
+
function appendFrame(opts) {
|
|
128
|
+
if (!opts || !opts.spawnId) throw new Error('appendFrame: spawnId required');
|
|
129
|
+
if (opts.frame === undefined || opts.frame === null) return;
|
|
130
|
+
const projectDir = opts.projectDir || '.';
|
|
131
|
+
const p = path.join(_transcriptsDir(projectDir), `${opts.spawnId}.ndjson`);
|
|
132
|
+
_ensureDir(path.dirname(p));
|
|
133
|
+
|
|
134
|
+
let line;
|
|
135
|
+
if (typeof opts.frame === 'string') {
|
|
136
|
+
const trimmed = opts.frame.trim();
|
|
137
|
+
if (!trimmed) return;
|
|
138
|
+
try {
|
|
139
|
+
JSON.parse(trimmed);
|
|
140
|
+
line = trimmed;
|
|
141
|
+
} catch (_) {
|
|
142
|
+
line = JSON.stringify({ type: 'raw', line: opts.frame });
|
|
143
|
+
}
|
|
144
|
+
} else {
|
|
145
|
+
try {
|
|
146
|
+
line = JSON.stringify(opts.frame);
|
|
147
|
+
} catch (_) {
|
|
148
|
+
line = JSON.stringify({ type: 'raw', line: String(opts.frame) });
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
fs.appendFileSync(p, line + '\n');
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Mark a transcript as ended. Idempotent — subsequent calls update endedAt.
|
|
156
|
+
*
|
|
157
|
+
* @param {object} opts
|
|
158
|
+
* @param {string} opts.spawnId
|
|
159
|
+
* @param {string} [opts.projectDir='.']
|
|
160
|
+
* @param {'done'|'failed'|'stopped'|'ended'} [opts.status='ended']
|
|
161
|
+
*/
|
|
162
|
+
function closeTranscript(opts) {
|
|
163
|
+
if (!opts || !opts.spawnId) throw new Error('closeTranscript: spawnId required');
|
|
164
|
+
const projectDir = opts.projectDir || '.';
|
|
165
|
+
const idx = _readIndex(projectDir);
|
|
166
|
+
const i = idx.spawns.findIndex((s) => s.spawnId === opts.spawnId);
|
|
167
|
+
if (i < 0) return false;
|
|
168
|
+
idx.spawns[i].endedAt = new Date().toISOString();
|
|
169
|
+
idx.spawns[i].status = opts.status || 'ended';
|
|
170
|
+
_writeIndex(projectDir, idx);
|
|
171
|
+
return true;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* List registered spawns (most recent first).
|
|
176
|
+
*
|
|
177
|
+
* @param {string} [projectDir='.']
|
|
178
|
+
* @returns {Array<object>}
|
|
179
|
+
*/
|
|
180
|
+
function listTranscripts(projectDir) {
|
|
181
|
+
const idx = _readIndex(projectDir || '.');
|
|
182
|
+
return idx.spawns.slice().sort((a, b) => {
|
|
183
|
+
const ta = Date.parse(a.startedAt) || 0;
|
|
184
|
+
const tb = Date.parse(b.startedAt) || 0;
|
|
185
|
+
return tb - ta;
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
function readTranscriptMeta(projectDir, spawnId) {
|
|
190
|
+
const idx = _readIndex(projectDir || '.');
|
|
191
|
+
return idx.spawns.find((s) => s.spawnId === spawnId) || null;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Tee a stream-json stdout stream. Returns an `onChunk(buffer)` callback that
|
|
196
|
+
* you feed chunks to (typed as Buffer or string). Frames are split at `\n`
|
|
197
|
+
* and each complete line is written to the transcript ndjson. Incomplete
|
|
198
|
+
* tails are buffered until the next chunk.
|
|
199
|
+
*
|
|
200
|
+
* Returns also a `flush()` to call on child exit — writes any stranded tail
|
|
201
|
+
* as a single `{type:"raw"}` line so nothing is dropped.
|
|
202
|
+
*
|
|
203
|
+
* @param {object} opts
|
|
204
|
+
* @param {string} opts.spawnId
|
|
205
|
+
* @param {string} [opts.projectDir='.']
|
|
206
|
+
* @returns {{onChunk: (chunk) => void, flush: () => void}}
|
|
207
|
+
*/
|
|
208
|
+
function makeStreamTee(opts) {
|
|
209
|
+
if (!opts || !opts.spawnId) throw new Error('makeStreamTee: spawnId required');
|
|
210
|
+
const projectDir = opts.projectDir || '.';
|
|
211
|
+
const spawnId = opts.spawnId;
|
|
212
|
+
let buf = '';
|
|
213
|
+
|
|
214
|
+
return {
|
|
215
|
+
onChunk(chunk) {
|
|
216
|
+
if (chunk == null) return;
|
|
217
|
+
buf += Buffer.isBuffer(chunk) ? chunk.toString('utf8') : String(chunk);
|
|
218
|
+
let nl;
|
|
219
|
+
while ((nl = buf.indexOf('\n')) >= 0) {
|
|
220
|
+
const line = buf.slice(0, nl);
|
|
221
|
+
buf = buf.slice(nl + 1);
|
|
222
|
+
if (line.length > 0) appendFrame({ spawnId, projectDir, frame: line });
|
|
223
|
+
}
|
|
224
|
+
},
|
|
225
|
+
flush() {
|
|
226
|
+
if (buf.length > 0) {
|
|
227
|
+
appendFrame({ spawnId, projectDir, frame: buf });
|
|
228
|
+
buf = '';
|
|
229
|
+
}
|
|
230
|
+
},
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
module.exports = {
|
|
235
|
+
allocateSpawnId,
|
|
236
|
+
openTranscript,
|
|
237
|
+
appendFrame,
|
|
238
|
+
closeTranscript,
|
|
239
|
+
listTranscripts,
|
|
240
|
+
readTranscriptMeta,
|
|
241
|
+
makeStreamTee,
|
|
242
|
+
_readIndex,
|
|
243
|
+
_writeIndex,
|
|
244
|
+
TRANSCRIPTS_DIRNAME,
|
|
245
|
+
INDEX_FILENAME,
|
|
246
|
+
};
|