@tekyzinc/gsd-t 2.73.25 → 2.74.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,34 @@
2
2
 
3
3
  All notable changes to GSD-T are documented here. Updated with each release.
4
4
 
5
+ ## [2.74.10] - 2026-04-13
6
+
7
+ ### Added
8
+ - **`bin/archive-progress.js`** — rolling Decision Log archival. Keeps the last 5 entries live in `.gsd-t/progress.md`; older entries roll into `.gsd-t/progress-archive/NNN-YYYY-MM-DD.md` files (20 entries each) with an `INDEX.md` for date-range lookup. **Solves the runaway context consumption from progress.md growth** — current GSD-T project saw 163KB → 42KB on first migration (Decision Log section dropped from ~100KB to 13KB). Idempotent, dry-run supported, safe to run anytime.
9
+ - **`bin/log-tail.js`** — truncate test/build log output before forwarding into context. Writes full output to disk, prints only the tail (default 100 lines, 500 on detected failure). Used by command files to prevent multi-thousand-line stdout dumps from npm test / playwright test from blowing context budget.
10
+ - **`bin/context-budget-audit.js`** — measures the static context cost of a Claude Code session before any work happens. Reports tokens consumed by CLAUDE.md files, command manifest, MCP server schemas, auto-memory, and lazy-loaded skill bodies. Use to diagnose why long-running sessions hit manual `/compact` prompts.
11
+ - **Auto-migration on `version-update-all`** — every registered project gets `archive-progress.js`, `log-tail.js`, and `context-budget-audit.js` copied into its `bin/` directory automatically. The progress archive migration runs once per project (gated by `.gsd-t/.archive-migration-v1` marker) so the next `version-update-all` reclaims context budget across every GSD-T project at once.
12
+
13
+ ### Fixed
14
+ - **Mid-session context exhaustion regression** — manual `/compact` prompts that started ~2026-04-10 traced to `progress.md` growing past 50K tokens (25% of the 200K context window in a single file). Every command that read it paid this cost. Archival fix targets the root cause; commands that read `progress.md` now see <10K tokens of relevant content instead of 50K+ of historical decisions.
15
+
16
+ ## [2.73.28] - 2026-04-09
17
+
18
+ ### Fixed
19
+ - **Ctrl+C now cleanly kills the orchestrator and all child processes** — SIGINT handler tracks all spawned Claude processes (build, review, fix) and kills them on Ctrl+C. The sync `spawnClaude` was converted from `execFileSync` (which blocked the event loop and prevented signal handling) to an async `execFile` with a polling wait that checks an interrupt flag. The `waitForReview` polling loop also breaks on Ctrl+C. No more orphaned processes.
20
+
21
+ ## [2.73.27] - 2026-04-09
22
+
23
+ ### Changed
24
+ - **Unlimited human review cycles with auto-review reset** — the orchestrator no longer caps human review iterations. After each human fix: (1) fixes are applied, (2) components are re-measured, (3) automated AI review runs with a fresh cycle counter (up to `maxAutoReviewCycles`), (4) components are re-queued for human review. This loop repeats until the reviewer submits with zero changes. The human is always the final gate.
25
+
26
+ ## [2.73.26] - 2026-04-09
27
+
28
+ ### Added
29
+ - **AI prompt assistant in review panel** — expandable panel in the header (toggle with Ctrl+K or the AI button). Ask questions about the selected component ("what stroke-width is this using?"), get help translating vague corrections into precise contract language ("arcs are too thick" → actionable property changes), and preview responses before committing them as comments via "Use as comment" button. Uses the Claude Code CLI (`claude -p`) so it works with Claude Max subscriptions — no API key needed. Model defaults to opus (override with `GSD_AI_ASSIST_MODEL` env var).
30
+ - **`/review/api/contract` endpoint** — returns the full design contract markdown for a given component path. Used by the AI assistant to provide contract-aware responses.
31
+ - **`/review/api/ai-assist` endpoint** — streaming SSE endpoint that spawns `claude -p` with component context (name, measurements, computed styles, contract). Zero external dependencies — uses the locally installed Claude Code CLI.
32
+
5
33
  ## [2.73.25] - 2026-04-09
6
34
 
7
35
  ### Added
@@ -0,0 +1,335 @@
1
+ #!/usr/bin/env node
2
+ // Archive Progress — keeps .gsd-t/progress.md lean by rolling old Decision Log
3
+ // entries into numbered archive files. Live progress.md keeps only the most recent
4
+ // entries; older entries roll into .gsd-t/progress-archive/NNN-YYYY-MM-DD.md files.
5
+ //
6
+ // Usage:
7
+ // node bin/archive-progress.js # run against ./.gsd-t/progress.md
8
+ // node bin/archive-progress.js --project /path/to # run against a specific project
9
+ // node bin/archive-progress.js --dry-run # show what would happen, change nothing
10
+ // node bin/archive-progress.js --keep 5 # override "keep last N entries" (default 5)
11
+ // node bin/archive-progress.js --per-archive 20 # override "entries per archive" (default 20)
12
+ //
13
+ // Idempotent: safe to run anytime. Re-running with no new entries is a no-op.
14
+ //
15
+ // Decision Log format expected:
16
+ // ## Decision Log (or # Decision Log or any heading containing "Decision Log")
17
+ // - YYYY-MM-DD[ HH:MM][ tag]: {message} ← entry start (matched by /^- \d{4}-\d{2}-\d{2}/)
18
+ // ... continuation lines (indented or non-leading-dash) ...
19
+ // - YYYY-MM-DD[ HH:MM]: {next entry}
20
+
21
+ const fs = require('fs');
22
+ const path = require('path');
23
+
24
+ const DEFAULT_KEEP_LIVE = 5;
25
+ const DEFAULT_PER_ARCHIVE = 20;
26
+
27
+ const ENTRY_START = /^- (\d{4}-\d{2}-\d{2})(?:[ T]\d{2}:\d{2})?/;
28
+
29
+ function parseArgs(argv) {
30
+ const opts = {
31
+ projectDir: process.cwd(),
32
+ dryRun: false,
33
+ keepLive: DEFAULT_KEEP_LIVE,
34
+ perArchive: DEFAULT_PER_ARCHIVE,
35
+ quiet: false,
36
+ };
37
+ for (let i = 0; i < argv.length; i++) {
38
+ const a = argv[i];
39
+ if (a === '--project') opts.projectDir = path.resolve(argv[++i]);
40
+ else if (a === '--dry-run') opts.dryRun = true;
41
+ else if (a === '--keep') opts.keepLive = parseInt(argv[++i], 10);
42
+ else if (a === '--per-archive') opts.perArchive = parseInt(argv[++i], 10);
43
+ else if (a === '--quiet' || a === '-q') opts.quiet = true;
44
+ else if (a === '--help' || a === '-h') {
45
+ console.log(
46
+ 'Usage: node bin/archive-progress.js [--project DIR] [--keep N] [--per-archive N] [--dry-run] [--quiet]'
47
+ );
48
+ process.exit(0);
49
+ }
50
+ }
51
+ return opts;
52
+ }
53
+
54
+ function findDecisionLogBounds(lines) {
55
+ // Returns { startIdx, endIdx } where startIdx is the line AFTER the heading and
56
+ // endIdx is exclusive — the last line of the Decision Log section. Returns null
57
+ // if no Decision Log heading is found.
58
+ let headingIdx = -1;
59
+ for (let i = 0; i < lines.length; i++) {
60
+ const line = lines[i];
61
+ if (/^#{1,6}\s+.*Decision Log/i.test(line)) {
62
+ headingIdx = i;
63
+ break;
64
+ }
65
+ }
66
+ if (headingIdx === -1) return null;
67
+
68
+ // End is the next heading at the same or higher level, or end of file
69
+ const headingMatch = lines[headingIdx].match(/^(#{1,6})\s/);
70
+ const headingLevel = headingMatch ? headingMatch[1].length : 2;
71
+ let endIdx = lines.length;
72
+ for (let i = headingIdx + 1; i < lines.length; i++) {
73
+ const m = lines[i].match(/^(#{1,6})\s/);
74
+ if (m && m[1].length <= headingLevel) {
75
+ endIdx = i;
76
+ break;
77
+ }
78
+ }
79
+ return { headingIdx, contentStart: headingIdx + 1, endIdx };
80
+ }
81
+
82
+ function parseEntries(lines, contentStart, endIdx) {
83
+ // Walk the Decision Log content. Each entry starts with `- YYYY-MM-DD`. Continuation
84
+ // lines (anything between two entry-start lines that doesn't start a new entry) are
85
+ // attached to the previous entry. Blank lines and headings inside the section are
86
+ // preserved as a "preamble" attached to the next entry, OR as a trailing tail.
87
+ const entries = [];
88
+ let currentEntry = null;
89
+ let preambleBeforeFirstEntry = [];
90
+
91
+ for (let i = contentStart; i < endIdx; i++) {
92
+ const line = lines[i];
93
+ if (ENTRY_START.test(line)) {
94
+ if (currentEntry) entries.push(currentEntry);
95
+ const dateMatch = line.match(ENTRY_START);
96
+ currentEntry = {
97
+ date: dateMatch[1],
98
+ startLine: i,
99
+ lines: [line],
100
+ };
101
+ } else if (currentEntry) {
102
+ currentEntry.lines.push(line);
103
+ } else {
104
+ preambleBeforeFirstEntry.push(line);
105
+ }
106
+ }
107
+ if (currentEntry) entries.push(currentEntry);
108
+
109
+ return { entries, preambleBeforeFirstEntry };
110
+ }
111
+
112
+ function trimTrailingBlankLines(arr) {
113
+ const out = [...arr];
114
+ while (out.length && out[out.length - 1].trim() === '') out.pop();
115
+ return out;
116
+ }
117
+
118
+ function ensureDir(dir) {
119
+ if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
120
+ }
121
+
122
+ function nextArchiveSeq(archiveDir) {
123
+ if (!fs.existsSync(archiveDir)) return 1;
124
+ const existing = fs
125
+ .readdirSync(archiveDir)
126
+ .map((f) => f.match(/^(\d{3})-/))
127
+ .filter(Boolean)
128
+ .map((m) => parseInt(m[1], 10));
129
+ return existing.length === 0 ? 1 : Math.max(...existing) + 1;
130
+ }
131
+
132
+ function formatArchiveFile(seq, entries) {
133
+ // entries is sorted oldest-first
134
+ const firstDate = entries[0].date;
135
+ const lastDate = entries[entries.length - 1].date;
136
+ const seqStr = String(seq).padStart(3, '0');
137
+ const fileName = `${seqStr}-${firstDate}.md`;
138
+ const header = [
139
+ `# Progress Archive ${seqStr}`,
140
+ '',
141
+ `**Date range**: ${firstDate} → ${lastDate}`,
142
+ `**Entries**: ${entries.length}`,
143
+ `**Generated**: ${new Date().toISOString().slice(0, 10)} by archive-progress.js`,
144
+ '',
145
+ '---',
146
+ '',
147
+ '## Decision Log',
148
+ '',
149
+ ];
150
+ const body = entries.flatMap((e) => trimTrailingBlankLines(e.lines)).map((l) => l);
151
+ // Insert blank line between entries for readability
152
+ const formatted = [];
153
+ for (let i = 0; i < entries.length; i++) {
154
+ formatted.push(...trimTrailingBlankLines(entries[i].lines));
155
+ if (i < entries.length - 1) formatted.push('');
156
+ }
157
+ return {
158
+ fileName,
159
+ content: header.concat(formatted, ['']).join('\n'),
160
+ firstDate,
161
+ lastDate,
162
+ entryCount: entries.length,
163
+ };
164
+ }
165
+
166
+ function rebuildIndex(archiveDir) {
167
+ const files = fs
168
+ .readdirSync(archiveDir)
169
+ .filter((f) => /^\d{3}-\d{4}-\d{2}-\d{2}\.md$/.test(f))
170
+ .sort();
171
+ const lines = [
172
+ '# Progress Archive Index',
173
+ '',
174
+ 'Generated by `bin/archive-progress.js`. Each archive file holds a window of older Decision Log entries that have been rolled out of the live `progress.md`.',
175
+ '',
176
+ 'To find historical context, scan this index for the relevant date range, then read the matching archive file.',
177
+ '',
178
+ '| File | Date range | Entries | Size |',
179
+ '|------|------------|---------|------|',
180
+ ];
181
+ for (const f of files) {
182
+ const fullPath = path.join(archiveDir, f);
183
+ const content = fs.readFileSync(fullPath, 'utf8');
184
+ const rangeMatch = content.match(/\*\*Date range\*\*:\s*(\d{4}-\d{2}-\d{2}) → (\d{4}-\d{2}-\d{2})/);
185
+ const countMatch = content.match(/\*\*Entries\*\*:\s*(\d+)/);
186
+ const stat = fs.statSync(fullPath);
187
+ const sizeKB = (stat.size / 1024).toFixed(1);
188
+ const range = rangeMatch ? `${rangeMatch[1]} → ${rangeMatch[2]}` : 'unknown';
189
+ const count = countMatch ? countMatch[1] : '?';
190
+ lines.push(`| [${f}](${f}) | ${range} | ${count} | ${sizeKB}KB |`);
191
+ }
192
+ lines.push('');
193
+ return lines.join('\n');
194
+ }
195
+
196
+ function rebuildLiveProgress(originalLines, bounds, keptEntries, archivedCount, archiveDir) {
197
+ // Rebuild progress.md with:
198
+ // - everything before the Decision Log heading (unchanged)
199
+ // - the Decision Log heading (unchanged)
200
+ // - a one-line pointer to the archive directory (if anything was archived)
201
+ // - the kept entries (newest first, matching how new entries are appended)
202
+ // - everything after the Decision Log section (unchanged)
203
+ const before = originalLines.slice(0, bounds.headingIdx);
204
+ const heading = originalLines[bounds.headingIdx];
205
+ const after = originalLines.slice(bounds.endIdx);
206
+
207
+ const archiveExists = fs.existsSync(archiveDir) &&
208
+ fs.readdirSync(archiveDir).some((f) => /^\d{3}-/.test(f));
209
+
210
+ const newDecisionLog = [];
211
+ newDecisionLog.push(heading);
212
+ newDecisionLog.push('');
213
+ if (archiveExists) {
214
+ const relPath = path.relative(path.dirname(bounds.progressPath || ''), archiveDir) || 'progress-archive';
215
+ newDecisionLog.push(
216
+ `> Older entries archived under \`${relPath}/\` — see \`${relPath}/INDEX.md\` for the date-range index.`
217
+ );
218
+ newDecisionLog.push('');
219
+ }
220
+
221
+ // keptEntries is oldest-first; emit in that order so the file reads top-to-bottom
222
+ // chronologically, matching how it looked before archival.
223
+ for (let i = 0; i < keptEntries.length; i++) {
224
+ newDecisionLog.push(...trimTrailingBlankLines(keptEntries[i].lines));
225
+ if (i < keptEntries.length - 1) newDecisionLog.push('');
226
+ }
227
+ newDecisionLog.push('');
228
+
229
+ return [...before, ...newDecisionLog, ...after].join('\n');
230
+ }
231
+
232
+ function archiveProgress(opts) {
233
+ const progressPath = path.join(opts.projectDir, '.gsd-t', 'progress.md');
234
+ if (!fs.existsSync(progressPath)) {
235
+ if (!opts.quiet) console.log(`(no progress.md at ${progressPath} — skipping)`);
236
+ return { skipped: true };
237
+ }
238
+
239
+ const original = fs.readFileSync(progressPath, 'utf8');
240
+ const lines = original.split('\n');
241
+ const bounds = findDecisionLogBounds(lines);
242
+ if (!bounds) {
243
+ if (!opts.quiet) console.log('(no Decision Log section found — skipping)');
244
+ return { skipped: true };
245
+ }
246
+ bounds.progressPath = progressPath;
247
+
248
+ const { entries } = parseEntries(lines, bounds.contentStart, bounds.endIdx);
249
+
250
+ if (entries.length <= opts.keepLive) {
251
+ if (!opts.quiet) {
252
+ console.log(
253
+ `progress.md: ${entries.length} entries (≤ keep=${opts.keepLive}) — nothing to archive`
254
+ );
255
+ }
256
+ return { skipped: true, entryCount: entries.length };
257
+ }
258
+
259
+ // Sort oldest-first so we archive the oldest tail
260
+ entries.sort((a, b) => {
261
+ if (a.date < b.date) return -1;
262
+ if (a.date > b.date) return 1;
263
+ return a.startLine - b.startLine;
264
+ });
265
+
266
+ const toArchive = entries.slice(0, entries.length - opts.keepLive);
267
+ const toKeep = entries.slice(entries.length - opts.keepLive);
268
+
269
+ const archiveDir = path.join(opts.projectDir, '.gsd-t', 'progress-archive');
270
+ if (!opts.dryRun) ensureDir(archiveDir);
271
+
272
+ // Pack archive entries into windows of opts.perArchive each
273
+ const windows = [];
274
+ for (let i = 0; i < toArchive.length; i += opts.perArchive) {
275
+ windows.push(toArchive.slice(i, i + opts.perArchive));
276
+ }
277
+
278
+ const writtenFiles = [];
279
+ let seq = opts.dryRun ? nextArchiveSeq(archiveDir) : nextArchiveSeq(archiveDir);
280
+ for (const window of windows) {
281
+ if (window.length === 0) continue;
282
+ const formatted = formatArchiveFile(seq, window);
283
+ const outPath = path.join(archiveDir, formatted.fileName);
284
+ if (!opts.dryRun) fs.writeFileSync(outPath, formatted.content);
285
+ writtenFiles.push({ name: formatted.fileName, count: window.length, range: `${formatted.firstDate} → ${formatted.lastDate}` });
286
+ seq++;
287
+ }
288
+
289
+ if (!opts.dryRun) {
290
+ const indexContent = rebuildIndex(archiveDir);
291
+ fs.writeFileSync(path.join(archiveDir, 'INDEX.md'), indexContent);
292
+ }
293
+
294
+ // Now rebuild live progress.md with only the kept entries
295
+ const newProgress = rebuildLiveProgress(lines, bounds, toKeep, toArchive.length, archiveDir);
296
+ if (!opts.dryRun) fs.writeFileSync(progressPath, newProgress);
297
+
298
+ if (!opts.quiet) {
299
+ const beforeKB = (Buffer.byteLength(original, 'utf8') / 1024).toFixed(1);
300
+ const afterKB = opts.dryRun
301
+ ? '(dry-run)'
302
+ : (Buffer.byteLength(newProgress, 'utf8') / 1024).toFixed(1) + 'KB';
303
+ console.log(
304
+ `${opts.dryRun ? '[DRY-RUN] ' : ''}progress.md: archived ${toArchive.length} entries → ${writtenFiles.length} archive file(s); kept ${toKeep.length} live`
305
+ );
306
+ for (const f of writtenFiles) {
307
+ console.log(` ${f.name} (${f.count} entries, ${f.range})`);
308
+ }
309
+ console.log(` size: ${beforeKB}KB → ${afterKB}`);
310
+ }
311
+
312
+ return {
313
+ archived: toArchive.length,
314
+ kept: toKeep.length,
315
+ archiveFiles: writtenFiles,
316
+ dryRun: opts.dryRun,
317
+ };
318
+ }
319
+
320
+ function main() {
321
+ const opts = parseArgs(process.argv.slice(2));
322
+ try {
323
+ const result = archiveProgress(opts);
324
+ if (result.skipped) process.exit(0);
325
+ process.exit(0);
326
+ } catch (e) {
327
+ console.error(`archive-progress: ERROR — ${e.message}`);
328
+ if (process.env.DEBUG) console.error(e.stack);
329
+ process.exit(1);
330
+ }
331
+ }
332
+
333
+ if (require.main === module) main();
334
+
335
+ module.exports = { archiveProgress, parseEntries, findDecisionLogBounds };