@tekyzinc/gsd-t 3.10.13 → 3.10.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,246 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * GSD-T Token Telemetry — per-subagent-spawn granular telemetry recorder
5
+ *
6
+ * Records one JSON object per line to .gsd-t/token-metrics.jsonl for every
7
+ * subagent spawn across every command file. Feeds:
8
+ * - bin/runway-estimator.js (M35 Wave 3) — pre-flight runway projection
9
+ * - bin/token-optimizer.js (M35 Wave 4) — optimization backlog detector
10
+ * - gsd-t metrics --tokens / --halts / --tokens --context-window CLI
11
+ *
12
+ * Zero external dependencies (Node.js built-ins only).
13
+ * Zero API calls (reads .gsd-t/.context-meter-state.json written by M34 hook).
14
+ * Single-writer assumption — no lockfile; fs.appendFileSync is atomic for
15
+ * writes under PIPE_BUF (4096 bytes on POSIX), and a single record is well
16
+ * under that limit.
17
+ *
18
+ * Contract: .gsd-t/contracts/token-telemetry-contract.md v1.0.0
19
+ * Schema is frozen for v1.x — fields can be added in minor bumps but never
20
+ * removed or renamed.
21
+ */
22
+
23
+ const fs = require("fs");
24
+ const path = require("path");
25
+
26
+ // ── Frozen schema (matches token-telemetry-contract.md v1.0.0) ──────────────
27
+
28
+ /**
29
+ * The 18 required fields. Order is not significant on disk (parsers use keys),
30
+ * but this array is the canonical list for validation error messages and
31
+ * downstream tooling that needs a stable field enumeration.
32
+ */
33
+ const REQUIRED_FIELDS = Object.freeze([
34
+ "timestamp",
35
+ "milestone",
36
+ "command",
37
+ "phase",
38
+ "step",
39
+ "domain",
40
+ "domain_type",
41
+ "task",
42
+ "model",
43
+ "duration_s",
44
+ "input_tokens_before",
45
+ "input_tokens_after",
46
+ "tokens_consumed",
47
+ "context_window_pct_before",
48
+ "context_window_pct_after",
49
+ "outcome",
50
+ "halt_type",
51
+ "escalated_via_advisor",
52
+ ]);
53
+
54
+ /**
55
+ * Type enforcement map. Keys are field names; values are either "string",
56
+ * "number", "boolean", "nullable-string", or a Set of valid string enum values.
57
+ * halt_type is the only nullable field in v1.0.0 per the contract.
58
+ */
59
+ const FIELD_TYPES = Object.freeze({
60
+ timestamp: "string",
61
+ milestone: "string",
62
+ command: "string",
63
+ phase: "string",
64
+ step: "string",
65
+ domain: "string",
66
+ domain_type: "string",
67
+ task: "string",
68
+ model: new Set(["haiku", "sonnet", "opus"]),
69
+ duration_s: "number",
70
+ input_tokens_before: "number",
71
+ input_tokens_after: "number",
72
+ tokens_consumed: "number",
73
+ context_window_pct_before: "number",
74
+ context_window_pct_after: "number",
75
+ outcome: new Set(["success", "failure", "blocked", "escalated"]),
76
+ halt_type: "nullable-string", // null OR one of the halt_type enum values
77
+ escalated_via_advisor: "boolean",
78
+ });
79
+
80
+ const HALT_TYPE_ENUM = Object.freeze(
81
+ new Set(["clean", "runway-refusal", "headless-handoff", "native-compact"]),
82
+ );
83
+
84
+ // ── Exports ─────────────────────────────────────────────────────────────────
85
+
86
+ module.exports = {
87
+ recordSpawn,
88
+ readAll,
89
+ aggregate,
90
+ REQUIRED_FIELDS,
91
+ };
92
+
93
+ // ── recordSpawn ─────────────────────────────────────────────────────────────
94
+
95
+ /**
96
+ * Append one telemetry record to .gsd-t/token-metrics.jsonl.
97
+ *
98
+ * @param {object} record - A record matching the v1.0.0 schema. All 18
99
+ * required fields must be present and of the correct type.
100
+ * @param {string} [projectDir] - Optional project root. Defaults to cwd.
101
+ * @throws {Error} on missing required field, wrong type, or I/O failure.
102
+ * @returns {void}
103
+ */
104
+ function recordSpawn(record, projectDir) {
105
+ validateRecord(record);
106
+ const dir = projectDir || process.cwd();
107
+ const gsdDir = path.join(dir, ".gsd-t");
108
+ ensureDir(gsdDir);
109
+ const fp = path.join(gsdDir, "token-metrics.jsonl");
110
+ const line = JSON.stringify(record) + "\n";
111
+ fs.appendFileSync(fp, line);
112
+ }
113
+
114
+ // ── readAll ─────────────────────────────────────────────────────────────────
115
+
116
+ /**
117
+ * Read and parse every record from .gsd-t/token-metrics.jsonl.
118
+ *
119
+ * @param {string} [projectDir] - Optional project root. Defaults to cwd.
120
+ * @returns {Array<object>} - Array of parsed records. Returns [] if the file
121
+ * does not exist. Malformed lines are skipped with a console.warn (does
122
+ * not abort the read).
123
+ */
124
+ function readAll(projectDir) {
125
+ const dir = projectDir || process.cwd();
126
+ const fp = path.join(dir, ".gsd-t", "token-metrics.jsonl");
127
+ if (!fs.existsSync(fp)) return [];
128
+ const raw = fs.readFileSync(fp, "utf8");
129
+ const lines = raw.split("\n").filter((l) => l.trim().length > 0);
130
+ const records = [];
131
+ for (const line of lines) {
132
+ try {
133
+ records.push(JSON.parse(line));
134
+ } catch (e) {
135
+ // eslint-disable-next-line no-console
136
+ console.warn(`token-telemetry.readAll: skipping malformed line: ${e.message}`);
137
+ }
138
+ }
139
+ return records;
140
+ }
141
+
142
+ // ── aggregate ───────────────────────────────────────────────────────────────
143
+
144
+ /**
145
+ * Group records by one or more fields and compute per-group statistics.
146
+ *
147
+ * @param {Array<object>} records
148
+ * @param {{ by: Array<string> }} options - Array of field names to group by.
149
+ * Unknown fields yield empty-string values in the group key.
150
+ * @returns {Array<{ key: object, count: number, total_tokens: number,
151
+ * mean: number, median: number, p95: number }>}
152
+ */
153
+ function aggregate(records, options) {
154
+ const by = (options && Array.isArray(options.by)) ? options.by : [];
155
+ if (!Array.isArray(records) || records.length === 0) return [];
156
+
157
+ // Build groups keyed on a stable string (JSON of the key object).
158
+ const groups = new Map();
159
+ for (const r of records) {
160
+ const key = {};
161
+ for (const field of by) key[field] = r[field] != null ? r[field] : "";
162
+ const keyStr = JSON.stringify(key);
163
+ if (!groups.has(keyStr)) groups.set(keyStr, { key, tokens: [] });
164
+ const tokens = typeof r.tokens_consumed === "number" ? r.tokens_consumed : 0;
165
+ groups.get(keyStr).tokens.push(tokens);
166
+ }
167
+
168
+ const result = [];
169
+ for (const { key, tokens } of groups.values()) {
170
+ const count = tokens.length;
171
+ const total_tokens = tokens.reduce((s, v) => s + v, 0);
172
+ const mean = count > 0 ? total_tokens / count : 0;
173
+ const sorted = tokens.slice().sort((a, b) => a - b);
174
+ const median = count > 0 ? sorted[Math.floor(count / 2)] : 0;
175
+ const p95idx = count > 0 ? Math.min(count - 1, Math.floor(count * 0.95)) : 0;
176
+ const p95 = count > 0 ? sorted[p95idx] : 0;
177
+ result.push({ key, count, total_tokens, mean, median, p95 });
178
+ }
179
+ return result;
180
+ }
181
+
182
+ // ── Internal: schema validation ─────────────────────────────────────────────
183
+
184
+ function validateRecord(record) {
185
+ if (record == null || typeof record !== "object" || Array.isArray(record)) {
186
+ throw new Error(
187
+ `recordSpawn: record must be a plain object, got ${Array.isArray(record) ? "array" : typeof record}`,
188
+ );
189
+ }
190
+ for (const field of REQUIRED_FIELDS) {
191
+ if (!(field in record)) {
192
+ throw new Error(`recordSpawn: missing required field: ${field}`);
193
+ }
194
+ }
195
+ for (const field of REQUIRED_FIELDS) {
196
+ const expected = FIELD_TYPES[field];
197
+ const value = record[field];
198
+ if (expected === "string") {
199
+ if (typeof value !== "string") {
200
+ throw new Error(
201
+ `recordSpawn: field ${field} has wrong type: expected string, got ${typeName(value)}`,
202
+ );
203
+ }
204
+ } else if (expected === "number") {
205
+ if (typeof value !== "number" || !Number.isFinite(value)) {
206
+ throw new Error(
207
+ `recordSpawn: field ${field} has wrong type: expected finite number, got ${typeName(value)}`,
208
+ );
209
+ }
210
+ } else if (expected === "boolean") {
211
+ if (typeof value !== "boolean") {
212
+ throw new Error(
213
+ `recordSpawn: field ${field} has wrong type: expected boolean, got ${typeName(value)}`,
214
+ );
215
+ }
216
+ } else if (expected === "nullable-string") {
217
+ // halt_type: null OR one of the halt_type enum values
218
+ if (value !== null) {
219
+ if (typeof value !== "string" || !HALT_TYPE_ENUM.has(value)) {
220
+ throw new Error(
221
+ `recordSpawn: field ${field} has wrong value: expected null or one of ${Array.from(HALT_TYPE_ENUM).join("|")}, got ${JSON.stringify(value)}`,
222
+ );
223
+ }
224
+ }
225
+ } else if (expected instanceof Set) {
226
+ // string enum
227
+ if (typeof value !== "string" || !expected.has(value)) {
228
+ throw new Error(
229
+ `recordSpawn: field ${field} has wrong value: expected one of ${Array.from(expected).join("|")}, got ${JSON.stringify(value)}`,
230
+ );
231
+ }
232
+ }
233
+ }
234
+ }
235
+
236
+ function typeName(v) {
237
+ if (v === null) return "null";
238
+ if (Array.isArray(v)) return "array";
239
+ return typeof v;
240
+ }
241
+
242
+ // ── Internal: fs helpers ────────────────────────────────────────────────────
243
+
244
+ function ensureDir(d) {
245
+ if (!fs.existsSync(d)) fs.mkdirSync(d, { recursive: true });
246
+ }
@@ -13,11 +13,11 @@ If `$ARGUMENTS` contains `--file {path}`, read from `.gsd-t/{path}` instead of t
13
13
 
14
14
  Also support `--status {pending|promoted|rejected}` when listing the optimization backlog — filters by the `**Status**:` field inside each H2 block.
15
15
 
16
- If `--file optimization-backlog.md` is supplied, use `bin/token-optimizer.js` parseBacklog() to parse entries, then render a simplified table with columns: ID, Type, Status, Evidence (truncated to 80 chars). Example:
16
+ If `--file optimization-backlog.md` is supplied, use `bin/token-optimizer.cjs` parseBacklog() to parse entries, then render a simplified table with columns: ID, Type, Status, Evidence (truncated to 80 chars). Example:
17
17
 
18
18
  ```bash
19
19
  node -e "
20
- const opt = require('./bin/token-optimizer.js');
20
+ const opt = require('./bin/token-optimizer.cjs');
21
21
  const entries = opt.parseBacklog(opt.readBacklog('.'));
22
22
  const statusFilter = process.argv[1] || '';
23
23
  const filtered = statusFilter
@@ -515,7 +515,7 @@ After all quality gates pass and the milestone is archived, run the token optimi
515
515
  ```bash
516
516
  node -e "
517
517
  try {
518
- const opt = require('./bin/token-optimizer.js');
518
+ const opt = require('./bin/token-optimizer.cjs');
519
519
  const recs = opt.detectRecommendations({projectDir: '.', lookbackMilestones: 3});
520
520
  opt.appendToBacklog(recs, '.');
521
521
  if (recs.length === 0) {
@@ -27,7 +27,7 @@ T0_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.
27
27
  ```bash
28
28
  T1_TOKENS=$(node -e "try{const s=require('fs').readFileSync('.gsd-t/.context-meter-state.json','utf8');process.stdout.write(String(JSON.parse(s).inputTokens||0))}catch(_){process.stdout.write('0')}")
29
29
  T1_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.write(String(tb.getSessionStatus('.').pct||0))}catch(_){process.stdout.write('0')}")
30
- node -e "require('./bin/token-telemetry.js').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-debug',phase:'debug',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-opus}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
30
+ node -e "require('./bin/token-telemetry.cjs').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-debug',phase:'debug',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-opus}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
31
31
  ```
32
32
 
33
33
  The bracket is additive to the existing `.gsd-t/token-log.md` OBSERVABILITY LOGGING rows. Both sinks coexist.
@@ -38,7 +38,7 @@ Debug uses conservative per-iteration cost (opus-default fallback = 8%/task). Ru
38
38
 
39
39
  ```bash
40
40
  node -e "
41
- const r = require('./bin/runway-estimator.js').estimateRunway({
41
+ const r = require('./bin/runway-estimator.cjs').estimateRunway({
42
42
  command: 'gsd-t-debug',
43
43
  domain_type: '',
44
44
  remaining_tasks: 1,
@@ -48,7 +48,7 @@ console.log(JSON.stringify(r, null, 2));
48
48
  if (!r.can_start) {
49
49
  console.log('⛔ Insufficient runway — projected ' + r.projected_end_pct + '% (current ' + r.current_pct + '%, ' + r.pct_per_task + '%/task, ' + r.confidence + ' confidence, ' + r.confidence_basis + ' records)');
50
50
  console.log('Auto-spawning headless to continue in a fresh context.');
51
- const s = require('./bin/headless-auto-spawn.js').autoSpawnHeadless({
51
+ const s = require('./bin/headless-auto-spawn.cjs').autoSpawnHeadless({
52
52
  command: 'gsd-t-debug', args: [], continue_from: '.'
53
53
  });
54
54
  console.log('Session ID: ' + s.id);
@@ -353,7 +353,7 @@ Run via Bash before each iteration:
353
353
 
354
354
  ```bash
355
355
  node -e "
356
- const r = require('./bin/runway-estimator.js').estimateRunway({
356
+ const r = require('./bin/runway-estimator.cjs').estimateRunway({
357
357
  command: 'gsd-t-debug',
358
358
  domain_type: '',
359
359
  remaining_tasks: 1,
@@ -381,7 +381,7 @@ if (!r.can_start) {
381
381
  console.log('⛔ Runway exceeded mid-loop — projected ' + r.projected_end_pct + '% at iteration ' + snapshot.iteration_n_plus_1);
382
382
  console.log('Persisted hypothesis + last fix + test output to ' + ledgerPath);
383
383
 
384
- const s = require('./bin/headless-auto-spawn.js').autoSpawnHeadless({
384
+ const s = require('./bin/headless-auto-spawn.cjs').autoSpawnHeadless({
385
385
  command: 'gsd-t-debug',
386
386
  args: ['--resume', 'iteration-' + snapshot.iteration_n_plus_1],
387
387
  continue_from: ledgerPath
@@ -25,7 +25,7 @@ T0_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.
25
25
  ```bash
26
26
  T1_TOKENS=$(node -e "try{const s=require('fs').readFileSync('.gsd-t/.context-meter-state.json','utf8');process.stdout.write(String(JSON.parse(s).inputTokens||0))}catch(_){process.stdout.write('0')}")
27
27
  T1_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.write(String(tb.getSessionStatus('.').pct||0))}catch(_){process.stdout.write('0')}")
28
- node -e "require('./bin/token-telemetry.js').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-doc-ripple',phase:'doc-ripple',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
28
+ node -e "require('./bin/token-telemetry.cjs').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-doc-ripple',phase:'doc-ripple',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
29
29
  ```
30
30
 
31
31
  The bracket is additive to the existing `.gsd-t/token-log.md` OBSERVABILITY LOGGING rows. Both sinks coexist.
@@ -31,7 +31,7 @@ T0_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.
31
31
  ```bash
32
32
  T1_TOKENS=$(node -e "try{const s=require('fs').readFileSync('.gsd-t/.context-meter-state.json','utf8');process.stdout.write(String(JSON.parse(s).inputTokens||0))}catch(_){process.stdout.write('0')}")
33
33
  T1_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.write(String(tb.getSessionStatus('.').pct||0))}catch(_){process.stdout.write('0')}")
34
- node -e "require('./bin/token-telemetry.js').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-execute',phase:'${PHASE:-execute}',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
34
+ node -e "require('./bin/token-telemetry.cjs').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-execute',phase:'${PHASE:-execute}',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
35
35
  ```
36
36
 
37
37
  The bracket is additive to the existing `.gsd-t/token-log.md` OBSERVABILITY LOGGING rows. Both sinks coexist — token-log.md is human-readable with context percentage, token-metrics.jsonl is machine-readable with the full 18-field schema for `gsd-t metrics --tokens/--halts/--context-window` aggregation.
@@ -42,7 +42,7 @@ Run via Bash. Count the `remaining_tasks` from the unblocked task list (Step 1 r
42
42
 
43
43
  ```bash
44
44
  node -e "
45
- const r = require('./bin/runway-estimator.js').estimateRunway({
45
+ const r = require('./bin/runway-estimator.cjs').estimateRunway({
46
46
  command: 'gsd-t-execute',
47
47
  domain_type: '{DOMAIN_TYPE}',
48
48
  remaining_tasks: {N},
@@ -52,7 +52,7 @@ console.log(JSON.stringify(r, null, 2));
52
52
  if (!r.can_start) {
53
53
  console.log('⛔ Insufficient runway — projected ' + r.projected_end_pct + '% (current ' + r.current_pct + '%, ' + r.pct_per_task + '%/task, ' + r.confidence + ' confidence, ' + r.confidence_basis + ' records)');
54
54
  console.log('Auto-spawning headless to continue in a fresh context.');
55
- const s = require('./bin/headless-auto-spawn.js').autoSpawnHeadless({
55
+ const s = require('./bin/headless-auto-spawn.cjs').autoSpawnHeadless({
56
56
  command: 'gsd-t-execute', args: [], continue_from: '.'
57
57
  });
58
58
  console.log('Session ID: ' + s.id);
@@ -27,7 +27,7 @@ T0_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.
27
27
  ```bash
28
28
  T1_TOKENS=$(node -e "try{const s=require('fs').readFileSync('.gsd-t/.context-meter-state.json','utf8');process.stdout.write(String(JSON.parse(s).inputTokens||0))}catch(_){process.stdout.write('0')}")
29
29
  T1_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.write(String(tb.getSessionStatus('.').pct||0))}catch(_){process.stdout.write('0')}")
30
- node -e "require('./bin/token-telemetry.js').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-integrate',phase:'integrate',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
30
+ node -e "require('./bin/token-telemetry.cjs').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-integrate',phase:'integrate',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
31
31
  ```
32
32
 
33
33
  The bracket is additive to the existing `.gsd-t/token-log.md` OBSERVABILITY LOGGING rows. Both sinks coexist.
@@ -38,7 +38,7 @@ Count the integration wiring seams in `.gsd-t/contracts/integration-points.md` a
38
38
 
39
39
  ```bash
40
40
  node -e "
41
- const r = require('./bin/runway-estimator.js').estimateRunway({
41
+ const r = require('./bin/runway-estimator.cjs').estimateRunway({
42
42
  command: 'gsd-t-integrate',
43
43
  domain_type: '',
44
44
  remaining_tasks: {N},
@@ -48,7 +48,7 @@ console.log(JSON.stringify(r, null, 2));
48
48
  if (!r.can_start) {
49
49
  console.log('⛔ Insufficient runway — projected ' + r.projected_end_pct + '% (current ' + r.current_pct + '%, ' + r.pct_per_task + '%/task, ' + r.confidence + ' confidence, ' + r.confidence_basis + ' records)');
50
50
  console.log('Auto-spawning headless to continue in a fresh context.');
51
- const s = require('./bin/headless-auto-spawn.js').autoSpawnHeadless({
51
+ const s = require('./bin/headless-auto-spawn.cjs').autoSpawnHeadless({
52
52
  command: 'gsd-t-integrate', args: [], continue_from: '.'
53
53
  });
54
54
  console.log('Session ID: ' + s.id);
@@ -2,7 +2,7 @@
2
2
 
3
3
  Apply (promote) a pending recommendation from `.gsd-t/optimization-backlog.md`. Takes `$ARGUMENTS` as the recommendation ID (e.g., `M35-OPT-001`).
4
4
 
5
- Recommendations are produced by `bin/token-optimizer.js` at `complete-milestone` and are **never auto-applied**. This command is the user's deliberate promotion step.
5
+ Recommendations are produced by `bin/token-optimizer.cjs` at `complete-milestone` and are **never auto-applied**. This command is the user's deliberate promotion step.
6
6
 
7
7
  ## Usage
8
8
 
@@ -25,7 +25,7 @@ Then exit.
25
25
 
26
26
  ```bash
27
27
  node -e "
28
- const opt = require('./bin/token-optimizer.js');
28
+ const opt = require('./bin/token-optimizer.cjs');
29
29
  const content = opt.readBacklog('.');
30
30
  const entries = opt.parseBacklog(content);
31
31
  const id = process.argv[1];
@@ -58,7 +58,7 @@ At Autonomy Level 3: automatically choose option 1 (quick task) unless the recom
58
58
 
59
59
  ```bash
60
60
  node -e "
61
- const opt = require('./bin/token-optimizer.js');
61
+ const opt = require('./bin/token-optimizer.cjs');
62
62
  let content = opt.readBacklog('.');
63
63
  content = opt.setRecommendationStatus(content, process.argv[1], {
64
64
  status: 'promoted'
@@ -28,7 +28,7 @@ Run `/user:gsd-t-backlog-list --file optimization-backlog.md` to see pending rec
28
28
 
29
29
  ```bash
30
30
  node -e "
31
- const opt = require('./bin/token-optimizer.js');
31
+ const opt = require('./bin/token-optimizer.cjs');
32
32
  const content = opt.readBacklog('.');
33
33
  const entries = opt.parseBacklog(content);
34
34
  const id = process.argv[1];
@@ -54,7 +54,7 @@ The reason text defaults to "no reason given" when `--reason` is absent.
54
54
  ```bash
55
55
  REASON="${REASON:-no reason given}"
56
56
  node -e "
57
- const opt = require('./bin/token-optimizer.js');
57
+ const opt = require('./bin/token-optimizer.cjs');
58
58
  let content = opt.readBacklog('.');
59
59
  content = opt.setRecommendationStatus(content, process.argv[1], {
60
60
  status: 'rejected',
@@ -87,7 +87,7 @@ Add a Decision Log entry to `.gsd-t/progress.md`:
87
87
 
88
88
  ## Cooldown Behavior
89
89
 
90
- After rejection, `bin/token-optimizer.js` will skip any fingerprint-matching recommendation for 5 subsequent `complete-milestone` invocations. The cooldown counter is stored in the entry's `Rejection cooldown` field and decrements at each `complete-milestone` run (decrement logic lives in `bin/token-optimizer.js` — Wave 5 docs task DAT-T? covers the decrement step if missing).
90
+ After rejection, `bin/token-optimizer.cjs` will skip any fingerprint-matching recommendation for 5 subsequent `complete-milestone` invocations. The cooldown counter is stored in the entry's `Rejection cooldown` field and decrements at each `complete-milestone` run (decrement logic lives in `bin/token-optimizer.cjs` — Wave 5 docs task DAT-T? covers the decrement step if missing).
91
91
 
92
92
  ## Contract References
93
93
 
@@ -27,7 +27,7 @@ T0_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.
27
27
  ```bash
28
28
  T1_TOKENS=$(node -e "try{const s=require('fs').readFileSync('.gsd-t/.context-meter-state.json','utf8');process.stdout.write(String(JSON.parse(s).inputTokens||0))}catch(_){process.stdout.write('0')}")
29
29
  T1_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.write(String(tb.getSessionStatus('.').pct||0))}catch(_){process.stdout.write('0')}")
30
- node -e "require('./bin/token-telemetry.js').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-quick',phase:'quick',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
30
+ node -e "require('./bin/token-telemetry.cjs').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-quick',phase:'quick',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
31
31
  ```
32
32
 
33
33
  The bracket is additive to the existing `.gsd-t/token-log.md` OBSERVABILITY LOGGING rows. Both sinks coexist — token-log.md is human-readable, token-metrics.jsonl is machine-readable for `gsd-t metrics` aggregation.
@@ -38,7 +38,7 @@ Quick tasks are always single-task, so `remaining_tasks=1`. Run via Bash:
38
38
 
39
39
  ```bash
40
40
  node -e "
41
- const r = require('./bin/runway-estimator.js').estimateRunway({
41
+ const r = require('./bin/runway-estimator.cjs').estimateRunway({
42
42
  command: 'gsd-t-quick',
43
43
  domain_type: '',
44
44
  remaining_tasks: 1,
@@ -48,7 +48,7 @@ console.log(JSON.stringify(r, null, 2));
48
48
  if (!r.can_start) {
49
49
  console.log('⛔ Insufficient runway — projected ' + r.projected_end_pct + '% (current ' + r.current_pct + '%, ' + r.pct_per_task + '%/task, ' + r.confidence + ' confidence, ' + r.confidence_basis + ' records)');
50
50
  console.log('Auto-spawning headless to continue in a fresh context.');
51
- const s = require('./bin/headless-auto-spawn.js').autoSpawnHeadless({
51
+ const s = require('./bin/headless-auto-spawn.cjs').autoSpawnHeadless({
52
52
  command: 'gsd-t-quick', args: [], continue_from: '.'
53
53
  });
54
54
  console.log('Session ID: ' + s.id);
@@ -46,7 +46,7 @@ Before reading any continue-here file or state file, check if a parent process w
46
46
  node -e "
47
47
  const sessionId = process.env.CLAUDE_HEADLESS_SESSION_ID;
48
48
  if (!sessionId) { process.exit(0); }
49
- const hl = require('./bin/handoff-lock.js');
49
+ const hl = require('./bin/handoff-lock.cjs');
50
50
  hl.waitForLockRelease('.', sessionId, 5000)
51
51
  .then(() => process.exit(0))
52
52
  .catch(e => { console.error('[resume] handoff lock wait timed out:', e.message); process.exit(0); });
@@ -38,7 +38,7 @@ Immediately after the headless banner, surface the count of pending token-optimi
38
38
  ```bash
39
39
  node -e "
40
40
  try {
41
- const opt = require('./bin/token-optimizer.js');
41
+ const opt = require('./bin/token-optimizer.cjs');
42
42
  const entries = opt.parseBacklog(opt.readBacklog('.'));
43
43
  const pending = entries.filter(e => e.status === 'pending').length;
44
44
  if (pending > 0) {
@@ -243,7 +243,7 @@ Otherwise, run the actual spawn:
243
243
  ```bash
244
244
  node -e "
245
245
  const path = require('path');
246
- const { spawnSupervisor } = require('./bin/gsd-t-unattended-platform.js');
246
+ const { spawnSupervisor } = require('./bin/gsd-t-unattended-platform.cjs');
247
247
 
248
248
  // Parse CLI args forwarded from the launch command
249
249
  const hours = parseInt(process.env.GSD_T_HOURS || '24', 10) || 24;
@@ -412,7 +412,7 @@ After the tool call, end the turn. The in-session watch loop takes over from her
412
412
 
413
413
  - **Singleton**: only one supervisor per project at a time. PID collision → refuse with "already running" message.
414
414
  - **Stale stop sentinel**: if `.gsd-t/.unattended/stop` exists from a prior run, Step 1d removes it before spawning.
415
- - **Platform helper**: uses `spawnSupervisor` from `bin/gsd-t-unattended-platform.js` — never hand-rolls `child_process.spawn` directly. This handles macOS/Linux/Windows differences.
415
+ - **Platform helper**: uses `spawnSupervisor` from `bin/gsd-t-unattended-platform.cjs` — never hand-rolls `child_process.spawn` directly. This handles macOS/Linux/Windows differences.
416
416
  - **Dry-run**: `--dry-run` prints the would-be invocation without spawning. Useful for validating flags before a long overnight run.
417
417
  - **No doc ripple, no pre-commit gate**: this command spawns a background process; it does not modify any source files or contracts.
418
418
  - **watch command is stateless**: after this command returns, every `/user:gsd-t-unattended-watch` tick re-reads state from disk. There is no in-memory state to preserve.
@@ -26,7 +26,7 @@ T0_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.
26
26
  ```bash
27
27
  T1_TOKENS=$(node -e "try{const s=require('fs').readFileSync('.gsd-t/.context-meter-state.json','utf8');process.stdout.write(String(JSON.parse(s).inputTokens||0))}catch(_){process.stdout.write('0')}")
28
28
  T1_PCT=$(node -e "try{const tb=require('./bin/token-budget.cjs');process.stdout.write(String(tb.getSessionStatus('.').pct||0))}catch(_){process.stdout.write('0')}")
29
- node -e "require('./bin/token-telemetry.js').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-wave',phase:'${PHASE:-}',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
29
+ node -e "require('./bin/token-telemetry.cjs').recordSpawn({timestamp:new Date().toISOString(),milestone:process.env.GSD_T_MILESTONE||'',command:'gsd-t-wave',phase:'${PHASE:-}',step:'${STEP:-}',domain:'${DOMAIN:-}',domain_type:'${DOMAIN_TYPE:-}',task:'${TASK:-}',model:'${MODEL:-sonnet}',duration_s:${DURATION:-0},input_tokens_before:${T0_TOKENS},input_tokens_after:${T1_TOKENS},tokens_consumed:${T1_TOKENS}-${T0_TOKENS},context_window_pct_before:${T0_PCT},context_window_pct_after:${T1_PCT},outcome:'${OUTCOME:-success}',halt_type:${HALT_TYPE:-null},escalated_via_advisor:${ESCALATED_VIA_ADVISOR:-false}})" 2>/dev/null || true
30
30
  ```
31
31
 
32
32
  The bracket is additive to the existing `.gsd-t/token-log.md` OBSERVABILITY LOGGING rows. Both sinks coexist — token-log.md is human-readable with context percentage, token-metrics.jsonl is machine-readable with the full 18-field schema for `gsd-t metrics --tokens/--halts/--context-window` aggregation.
@@ -37,7 +37,7 @@ Count the wave's total task count (sum of atomic tasks across domains in the cur
37
37
 
38
38
  ```bash
39
39
  node -e "
40
- const r = require('./bin/runway-estimator.js').estimateRunway({
40
+ const r = require('./bin/runway-estimator.cjs').estimateRunway({
41
41
  command: 'gsd-t-wave',
42
42
  domain_type: '',
43
43
  remaining_tasks: {N},
@@ -47,7 +47,7 @@ console.log(JSON.stringify(r, null, 2));
47
47
  if (!r.can_start) {
48
48
  console.log('⛔ Insufficient runway — projected ' + r.projected_end_pct + '% (current ' + r.current_pct + '%, ' + r.pct_per_task + '%/task, ' + r.confidence + ' confidence, ' + r.confidence_basis + ' records)');
49
49
  console.log('Auto-spawning headless to continue in a fresh context.');
50
- const s = require('./bin/headless-auto-spawn.js').autoSpawnHeadless({
50
+ const s = require('./bin/headless-auto-spawn.cjs').autoSpawnHeadless({
51
51
  command: 'gsd-t-wave', args: [], continue_from: '.'
52
52
  });
53
53
  console.log('Session ID: ' + s.id);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tekyzinc/gsd-t",
3
- "version": "3.10.13",
3
+ "version": "3.10.15",
4
4
  "description": "GSD-T: Contract-Driven Development for Claude Code — 61 slash commands with unattended supervisor relay, headless CI/CD mode, graph-powered code analysis, real-time agent dashboard, execution intelligence, task telemetry, doc-ripple enforcement, backlog management, impact analysis, test sync, milestone archival, and PRD generation",
5
5
  "author": "Tekyz, Inc.",
6
6
  "license": "MIT",
@@ -152,7 +152,69 @@ async function parseTranscript(transcriptPath) {
152
152
  return null;
153
153
  }
154
154
 
155
- return { system, messages };
155
+ return { system, messages: sanitizeToolPairs(messages) };
156
+ }
157
+
158
+ /**
159
+ * Enforce the count_tokens adjacency constraint: every assistant tool_use
160
+ * must be immediately followed by a user message whose tool_result ids match
161
+ * ALL tool_use ids from the preceding assistant message. Walk the message
162
+ * list and strip tool_use / tool_result blocks from any pair that violates
163
+ * this rule. Drop messages that become empty after stripping.
164
+ */
165
+ function sanitizeToolPairs(messages) {
166
+ const out = [];
167
+ for (let i = 0; i < messages.length; i++) {
168
+ const m = messages[i];
169
+ if (!Array.isArray(m.content)) { out.push(m); continue; }
170
+
171
+ if (m.role === "assistant") {
172
+ const toolUseIds = new Set();
173
+ for (const b of m.content) {
174
+ if (b.type === "tool_use" && typeof b.id === "string") toolUseIds.add(b.id);
175
+ }
176
+
177
+ if (toolUseIds.size === 0) { out.push(m); continue; }
178
+
179
+ const next = messages[i + 1];
180
+ const nextResultIds = new Set();
181
+ if (next && next.role === "user" && Array.isArray(next.content)) {
182
+ for (const b of next.content) {
183
+ if (b.type === "tool_result" && typeof b.tool_use_id === "string") {
184
+ nextResultIds.add(b.tool_use_id);
185
+ }
186
+ }
187
+ }
188
+
189
+ const validIds = new Set([...toolUseIds].filter((id) => nextResultIds.has(id)));
190
+ const filtered = m.content.filter((b) => {
191
+ if (b.type === "tool_use") return validIds.has(b.id);
192
+ return true;
193
+ });
194
+ if (filtered.length > 0) out.push({ role: m.role, content: filtered });
195
+ continue;
196
+ }
197
+
198
+ if (m.role === "user") {
199
+ const prev = out[out.length - 1];
200
+ const prevUseIds = new Set();
201
+ if (prev && prev.role === "assistant" && Array.isArray(prev.content)) {
202
+ for (const b of prev.content) {
203
+ if (b.type === "tool_use" && typeof b.id === "string") prevUseIds.add(b.id);
204
+ }
205
+ }
206
+
207
+ const filtered = m.content.filter((b) => {
208
+ if (b.type === "tool_result") return prevUseIds.has(b.tool_use_id);
209
+ return true;
210
+ });
211
+ if (filtered.length > 0) out.push({ role: m.role, content: filtered });
212
+ continue;
213
+ }
214
+
215
+ out.push(m);
216
+ }
217
+ return out;
156
218
  }
157
219
 
158
220
  /**