tlc-claude-code 2.4.2 → 2.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/.claude/commands/tlc/build.md +75 -5
  2. package/.claude/commands/tlc/discuss.md +174 -123
  3. package/.claude/commands/tlc/e2e-verify.md +1 -1
  4. package/.claude/commands/tlc/plan.md +77 -2
  5. package/.claude/commands/tlc/recall.md +59 -87
  6. package/.claude/commands/tlc/remember.md +76 -71
  7. package/.claude/commands/tlc/review.md +76 -21
  8. package/.claude/commands/tlc/tlc.md +204 -473
  9. package/.claude/hooks/tlc-capture-exchange.sh +50 -21
  10. package/.claude/hooks/tlc-session-init.sh +30 -0
  11. package/CLAUDE.md +6 -5
  12. package/bin/init.js +12 -3
  13. package/package.json +4 -1
  14. package/scripts/dev-link.sh +29 -0
  15. package/scripts/test-package.sh +54 -0
  16. package/scripts/version-sync.js +42 -0
  17. package/scripts/version-sync.test.js +100 -0
  18. package/server/lib/capture/classifier.js +71 -0
  19. package/server/lib/capture/classifier.test.js +71 -0
  20. package/server/lib/capture/claude-capture.js +140 -0
  21. package/server/lib/capture/claude-capture.test.js +152 -0
  22. package/server/lib/capture/codex-capture.js +79 -0
  23. package/server/lib/capture/codex-capture.test.js +161 -0
  24. package/server/lib/capture/codex-event-parser.js +76 -0
  25. package/server/lib/capture/codex-event-parser.test.js +83 -0
  26. package/server/lib/capture/ensure-ready.js +56 -0
  27. package/server/lib/capture/ensure-ready.test.js +135 -0
  28. package/server/lib/capture/envelope.js +77 -0
  29. package/server/lib/capture/envelope.test.js +169 -0
  30. package/server/lib/capture/extractor.js +51 -0
  31. package/server/lib/capture/extractor.test.js +92 -0
  32. package/server/lib/capture/generic-capture.js +96 -0
  33. package/server/lib/capture/generic-capture.test.js +171 -0
  34. package/server/lib/capture/index.js +117 -0
  35. package/server/lib/capture/index.test.js +263 -0
  36. package/server/lib/capture/redactor.js +68 -0
  37. package/server/lib/capture/redactor.test.js +93 -0
  38. package/server/lib/capture/spool-processor.js +155 -0
  39. package/server/lib/capture/spool-processor.test.js +278 -0
  40. package/server/lib/health-check.js +255 -0
  41. package/server/lib/health-check.test.js +243 -0
  42. package/server/lib/model-router.js +11 -2
  43. package/server/lib/model-router.test.js +27 -1
  44. package/server/lib/orchestration/cli-dispatch.js +200 -0
  45. package/server/lib/orchestration/cli-dispatch.test.js +242 -0
  46. package/server/lib/orchestration/codex-orchestrator.js +185 -0
  47. package/server/lib/orchestration/codex-orchestrator.test.js +221 -0
  48. package/server/lib/orchestration/dep-linker.js +61 -0
  49. package/server/lib/orchestration/dep-linker.test.js +174 -0
  50. package/server/lib/orchestration/prompt-builder.js +118 -0
  51. package/server/lib/orchestration/prompt-builder.test.js +200 -0
  52. package/server/lib/orchestration/standalone-compat.js +39 -0
  53. package/server/lib/orchestration/standalone-compat.test.js +144 -0
  54. package/server/lib/orchestration/worktree-manager.js +43 -0
  55. package/server/lib/orchestration/worktree-manager.test.js +50 -0
  56. package/server/lib/router-config.js +18 -3
  57. package/server/lib/router-config.test.js +57 -1
  58. package/server/lib/routing/index.js +34 -0
  59. package/server/lib/routing/index.test.js +33 -0
  60. package/server/lib/routing-command.js +11 -2
  61. package/server/lib/routing-command.test.js +39 -1
  62. package/server/lib/routing-preamble.integration.test.js +319 -0
  63. package/server/lib/routing-preamble.js +34 -11
  64. package/server/lib/routing-preamble.test.js +11 -0
  65. package/server/lib/task-router-config.js +35 -14
  66. package/server/lib/task-router-config.test.js +77 -13
@@ -0,0 +1,93 @@
1
+ import { describe, it, expect } from 'vitest';
2
+
3
+ describe('capture/redactor', () => {
4
+ it('redacts OpenAI style API keys', async () => {
5
+ const { redact } = await import('./redactor.js');
6
+
7
+ expect(redact('token sk-abcdefghijklmnopqrstuvwxyz123456')).toContain('[API_KEY_REDACTED]');
8
+ });
9
+
10
+ it('redacts GitHub tokens with ghp_ and ghu_ prefixes', async () => {
11
+ const { redact } = await import('./redactor.js');
12
+
13
+ const output = redact('ghp_abcdefghijklmnopqrstuvwxyz123456 and ghu_abcdefghijklmnopqrstuvwxyz123456');
14
+
15
+ expect(output).not.toContain('ghp_abcdefghijklmnopqrstuvwxyz123456');
16
+ expect(output).not.toContain('ghu_abcdefghijklmnopqrstuvwxyz123456');
17
+ });
18
+
19
+ it('redacts bearer tokens', async () => {
20
+ const { redact } = await import('./redactor.js');
21
+
22
+ expect(redact('Authorization: Bearer abc.def.ghi')).toBe('Authorization: Bearer [TOKEN_REDACTED]');
23
+ });
24
+
25
+ it('redacts token fields inside quoted JSON-like text', async () => {
26
+ const { redact } = await import('./redactor.js');
27
+
28
+ expect(redact('{"token":"abc123secret"}')).toBe('{"token":"[TOKEN_REDACTED]"}');
29
+ });
30
+
31
+ it('redacts password and secret assignments', async () => {
32
+ const { redact } = await import('./redactor.js');
33
+
34
+ const output = redact('password=hunter2 secret=abc PASSWORD="quoted-secret"');
35
+
36
+ expect(output).toContain('password=[REDACTED]');
37
+ expect(output).toContain('secret=[REDACTED]');
38
+ expect(output).toContain('PASSWORD="[REDACTED]"');
39
+ });
40
+
41
+ it('rewrites absolute user paths under /Users and /home', async () => {
42
+ const { redact } = await import('./redactor.js');
43
+
44
+ const output = redact('See /Users/jurgen/project/file.js and /home/alice/work/app.js');
45
+
46
+ expect(output).toBe('See ~/project/file.js and ~/work/app.js');
47
+ });
48
+
49
+ it('collapses stack traces to first and last line with omitted count', async () => {
50
+ const { redact } = await import('./redactor.js');
51
+ const input = [
52
+ 'Error: boom',
53
+ ' at a (/tmp/a.js:1:1)',
54
+ ' at b (/tmp/b.js:2:2)',
55
+ ' at c (/tmp/c.js:3:3)',
56
+ 'Caused by: root issue',
57
+ ].join('\n');
58
+
59
+ expect(redact(input)).toBe('Error: boom\n... 3 more lines\nCaused by: root issue');
60
+ });
61
+
62
+ it('does not collapse short multi-line text that is not a stack trace', async () => {
63
+ const { redact } = await import('./redactor.js');
64
+ const input = 'line one\nline two';
65
+
66
+ expect(redact(input)).toBe('line one\nline two');
67
+ });
68
+
69
+ it('redacts long base64 blobs', async () => {
70
+ const { redact } = await import('./redactor.js');
71
+ const blob = 'QUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVo='.repeat(4);
72
+
73
+ expect(redact(`payload=${blob}`)).toBe('payload=[BASE64_REDACTED]');
74
+ });
75
+
76
+ it('applies extra custom patterns after built-in redaction', async () => {
77
+ const { redact } = await import('./redactor.js');
78
+
79
+ const output = redact('Order ID: 12345', {
80
+ extraPatterns: [
81
+ { pattern: /12345/g, replacement: '[ORDER_ID]' },
82
+ ],
83
+ });
84
+
85
+ expect(output).toBe('Order ID: [ORDER_ID]');
86
+ });
87
+
88
+ it('returns the original string when there is nothing to redact', async () => {
89
+ const { redact } = await import('./redactor.js');
90
+
91
+ expect(redact('plain text only')).toBe('plain text only');
92
+ });
93
+ });
@@ -0,0 +1,155 @@
1
+ const path = require('path');
2
+
3
+ const { validateEnvelope } = require('./envelope');
4
+ const { redact } = require('./redactor');
5
+ const { extractDecisions } = require('./extractor');
6
+ const { classify } = require('./classifier');
7
+ const { ensureMemoryReady } = require('./ensure-ready');
8
+
9
+ const SPOOL_PATH = path.join('.tlc', 'memory', '.spool.jsonl');
10
+
11
+ function escapeYamlString(value) {
12
+ return String(value ?? '').replace(/\\/g, '\\\\').replace(/"/g, '\\"');
13
+ }
14
+
15
+ function slugify(statement) {
16
+ const words = String(statement)
17
+ .toLowerCase()
18
+ .replace(/['"]/g, '')
19
+ .match(/[a-z0-9]+/g) || [];
20
+
21
+ const slug = words.slice(0, 5).join('-').slice(0, 40).replace(/-+$/g, '');
22
+
23
+ return slug || 'entry';
24
+ }
25
+
26
+ function dateFromTimestamp(timestamp) {
27
+ const value = typeof timestamp === 'string' ? timestamp.slice(0, 10) : '';
28
+ return /^\d{4}-\d{2}-\d{2}$/.test(value) ? value : 'unknown-date';
29
+ }
30
+
31
+ function buildFrontmatter(envelope, classification, confidence, statement) {
32
+ return [
33
+ '---',
34
+ `provider: ${envelope.provider}`,
35
+ `source: ${envelope.source || 'unknown'}`,
36
+ `timestamp: ${envelope.timestamp}`,
37
+ `taskName: "${escapeYamlString(envelope.taskName || '')}"`,
38
+ `confidence: ${Number(confidence.toFixed(4))}`,
39
+ `type: ${classification.type}`,
40
+ `scope: ${classification.scope}`,
41
+ '---',
42
+ '',
43
+ statement,
44
+ '',
45
+ ].join('\n');
46
+ }
47
+
48
+ function resolveTargetDir(classification) {
49
+ if (classification.scope === 'team' && classification.type === 'decision') {
50
+ return path.join('.tlc', 'memory', 'team', 'decisions');
51
+ }
52
+
53
+ if (classification.scope === 'team' && classification.type === 'gotcha') {
54
+ return path.join('.tlc', 'memory', 'team', 'gotchas');
55
+ }
56
+
57
+ return path.join('.tlc', 'memory', '.local', 'sessions');
58
+ }
59
+
60
+ function nextAvailableFilePath(projectDir, relativeDir, baseName, fs) {
61
+ const directory = path.join(projectDir, relativeDir);
62
+ let candidate = `${baseName}.md`;
63
+ let counter = 2;
64
+
65
+ while (fs.existsSync(path.join(directory, candidate))) {
66
+ candidate = `${baseName}-${counter}.md`;
67
+ counter += 1;
68
+ }
69
+
70
+ return path.join(directory, candidate);
71
+ }
72
+
73
+ function writeStatements(projectDir, envelope, statements, fs) {
74
+ for (const entry of statements) {
75
+ const classification = classify(entry.statement);
76
+
77
+ if (!classification || classification.type === 'session') {
78
+ continue;
79
+ }
80
+
81
+ const relativeDir = resolveTargetDir(classification);
82
+ const baseName = `${dateFromTimestamp(envelope.timestamp)}-${slugify(entry.statement)}`;
83
+ const targetPath = nextAvailableFilePath(projectDir, relativeDir, baseName, fs);
84
+ const content = buildFrontmatter(envelope, classification, entry.confidence, entry.statement);
85
+
86
+ fs.writeFileSync(targetPath, content);
87
+ }
88
+ }
89
+
90
+ function processSpool(projectDir, { fs = require('fs') } = {}) {
91
+ const spoolPath = path.join(projectDir, SPOOL_PATH);
92
+
93
+ if (!fs.existsSync(spoolPath)) {
94
+ return { processed: 0, skipped: 0, warnings: [] };
95
+ }
96
+
97
+ const input = fs.readFileSync(spoolPath, 'utf8');
98
+ const lines = input === '' ? [] : input.split('\n');
99
+ const warnings = [];
100
+ const remainingLines = [];
101
+ let processed = 0;
102
+ let skipped = 0;
103
+ let ensured = false;
104
+
105
+ for (let index = 0; index < lines.length; index += 1) {
106
+ const line = lines[index];
107
+
108
+ if (!line.trim()) {
109
+ continue;
110
+ }
111
+
112
+ let envelope;
113
+
114
+ try {
115
+ envelope = JSON.parse(line);
116
+ } catch (error) {
117
+ skipped += 1;
118
+ remainingLines.push(line);
119
+ warnings.push(`Malformed JSON on line ${index + 1}: ${error.message}`);
120
+ continue;
121
+ }
122
+
123
+ const validation = validateEnvelope(envelope);
124
+
125
+ if (!validation.valid) {
126
+ skipped += 1;
127
+ remainingLines.push(line);
128
+ warnings.push(`Invalid envelope on line ${index + 1}: ${validation.errors.join(', ')}`);
129
+ continue;
130
+ }
131
+
132
+ const redactedText = redact(envelope.text);
133
+ const statements = extractDecisions(redactedText);
134
+
135
+ if (!ensured) {
136
+ ensureMemoryReady(projectDir, { fs });
137
+ ensured = true;
138
+ }
139
+
140
+ writeStatements(projectDir, envelope, statements, fs);
141
+ processed += 1;
142
+ }
143
+
144
+ fs.writeFileSync(spoolPath, remainingLines.join('\n'));
145
+
146
+ return {
147
+ processed,
148
+ skipped,
149
+ warnings,
150
+ };
151
+ }
152
+
153
+ module.exports = {
154
+ processSpool,
155
+ };
@@ -0,0 +1,278 @@
1
+ import { describe, it, beforeEach, afterEach, expect, vi } from 'vitest';
2
+ import fs from 'fs';
3
+ import os from 'os';
4
+ import path from 'path';
5
+
6
+ const spoolRelativePath = path.join('.tlc', 'memory', '.spool.jsonl');
7
+
8
+ function readFile(filePath) {
9
+ return fs.readFileSync(filePath, 'utf8');
10
+ }
11
+
12
+ function readSpool(projectDir) {
13
+ const spoolPath = path.join(projectDir, spoolRelativePath);
14
+
15
+ if (!fs.existsSync(spoolPath)) {
16
+ return null;
17
+ }
18
+
19
+ return readFile(spoolPath);
20
+ }
21
+
22
+ function writeSpool(projectDir, lines) {
23
+ const spoolPath = path.join(projectDir, spoolRelativePath);
24
+ fs.mkdirSync(path.dirname(spoolPath), { recursive: true });
25
+ fs.writeFileSync(spoolPath, lines.join('\n'));
26
+ }
27
+
28
+ function findMarkdownFiles(projectDir, relativeDir) {
29
+ const targetDir = path.join(projectDir, relativeDir);
30
+
31
+ if (!fs.existsSync(targetDir)) {
32
+ return [];
33
+ }
34
+
35
+ return fs.readdirSync(targetDir)
36
+ .filter((entry) => entry.endsWith('.md') && entry !== '.gitkeep')
37
+ .sort();
38
+ }
39
+
40
+ describe('capture/spool-processor', () => {
41
+ let projectDir;
42
+ let processSpool;
43
+
44
+ beforeEach(async () => {
45
+ projectDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tlc-spool-processor-test-'));
46
+ ({ processSpool } = await import('./spool-processor.js'));
47
+ });
48
+
49
+ afterEach(() => {
50
+ vi.resetModules();
51
+ fs.rmSync(projectDir, { recursive: true, force: true });
52
+ });
53
+
54
+ it('returns zero counts when the spool file is missing', () => {
55
+ const result = processSpool(projectDir);
56
+
57
+ expect(result).toEqual({
58
+ processed: 0,
59
+ skipped: 0,
60
+ warnings: [],
61
+ });
62
+ });
63
+
64
+ it('writes team decisions, team gotchas, and personal sessions to their target directories', () => {
65
+ writeSpool(projectDir, [
66
+ JSON.stringify({
67
+ provider: 'codex',
68
+ source: 'stdout',
69
+ taskName: 'Build auth module',
70
+ timestamp: '2026-03-28T12:34:56.000Z',
71
+ text: [
72
+ 'We decided to use Postgres for auth state.',
73
+ "Don't use synchronous fs calls in request handlers.",
74
+ 'I prefer terminal-first workflows for debugging.',
75
+ ].join(' '),
76
+ }),
77
+ ]);
78
+
79
+ const result = processSpool(projectDir);
80
+
81
+ expect(result).toEqual({
82
+ processed: 1,
83
+ skipped: 0,
84
+ warnings: [],
85
+ });
86
+
87
+ const decisionFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'));
88
+ const gotchaFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'gotchas'));
89
+ const sessionFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', '.local', 'sessions'));
90
+
91
+ expect(decisionFiles).toEqual(['2026-03-28-we-decided-to-use-postgres.md']);
92
+ expect(gotchaFiles).toEqual(['2026-03-28-dont-use-synchronous-fs-calls.md']);
93
+ expect(sessionFiles).toEqual(['2026-03-28-i-prefer-terminal-first-workflows.md']);
94
+
95
+ expect(readFile(path.join(projectDir, '.tlc', 'memory', 'team', 'decisions', decisionFiles[0]))).toContain([
96
+ '---',
97
+ 'provider: codex',
98
+ 'source: stdout',
99
+ 'timestamp: 2026-03-28T12:34:56.000Z',
100
+ 'taskName: "Build auth module"',
101
+ 'confidence: 0.9',
102
+ 'type: decision',
103
+ 'scope: team',
104
+ '---',
105
+ '',
106
+ 'We decided to use Postgres for auth state.',
107
+ ].join('\n'));
108
+
109
+ expect(readFile(path.join(projectDir, '.tlc', 'memory', 'team', 'gotchas', gotchaFiles[0]))).toContain('type: gotcha');
110
+ expect(readFile(path.join(projectDir, '.tlc', 'memory', '.local', 'sessions', sessionFiles[0]))).toContain('scope: personal');
111
+ expect(readSpool(projectDir)).toBe('');
112
+ });
113
+
114
+ it('skips malformed JSON lines and keeps them in the spool with a warning', () => {
115
+ writeSpool(projectDir, [
116
+ '{"provider":"codex"',
117
+ JSON.stringify({
118
+ provider: 'codex',
119
+ source: 'stdout',
120
+ taskName: 'Build auth module',
121
+ timestamp: '2026-03-28T12:34:56.000Z',
122
+ text: 'We decided to use Postgres for auth state.',
123
+ }),
124
+ ]);
125
+
126
+ const result = processSpool(projectDir);
127
+
128
+ expect(result.processed).toBe(1);
129
+ expect(result.skipped).toBe(1);
130
+ expect(result.warnings).toHaveLength(1);
131
+ expect(result.warnings[0]).toContain('Malformed JSON');
132
+ expect(readSpool(projectDir)).toBe('{"provider":"codex"');
133
+ });
134
+
135
+ it('skips invalid envelopes and leaves them in the spool', () => {
136
+ writeSpool(projectDir, [
137
+ JSON.stringify({
138
+ provider: 'codex',
139
+ source: 'stdout',
140
+ taskName: 'Build auth module',
141
+ timestamp: '2026-03-28T12:34:56.000Z',
142
+ }),
143
+ JSON.stringify({
144
+ provider: 'codex',
145
+ source: 'stdout',
146
+ taskName: 'Build auth module',
147
+ timestamp: '2026-03-28T12:34:56.000Z',
148
+ text: 'We decided to use Postgres for auth state.',
149
+ }),
150
+ ]);
151
+
152
+ const result = processSpool(projectDir);
153
+
154
+ expect(result.processed).toBe(1);
155
+ expect(result.skipped).toBe(1);
156
+ expect(result.warnings).toHaveLength(1);
157
+ expect(result.warnings[0]).toContain('Invalid envelope');
158
+ expect(readSpool(projectDir)).toBe(JSON.stringify({
159
+ provider: 'codex',
160
+ source: 'stdout',
161
+ taskName: 'Build auth module',
162
+ timestamp: '2026-03-28T12:34:56.000Z',
163
+ }));
164
+ });
165
+
166
+ it('treats valid envelopes with no extracted statements as processed and removes them from the spool', () => {
167
+ writeSpool(projectDir, [
168
+ JSON.stringify({
169
+ provider: 'codex',
170
+ source: 'stdout',
171
+ taskName: 'Build auth module',
172
+ timestamp: '2026-03-28T12:34:56.000Z',
173
+ text: 'The server responded in 120ms and all tests passed.',
174
+ }),
175
+ ]);
176
+
177
+ const result = processSpool(projectDir);
178
+
179
+ expect(result).toEqual({
180
+ processed: 1,
181
+ skipped: 0,
182
+ warnings: [],
183
+ });
184
+ expect(findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'))).toEqual([]);
185
+ expect(findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'gotchas'))).toEqual([]);
186
+ expect(findMarkdownFiles(projectDir, path.join('.tlc', 'memory', '.local', 'sessions'))).toEqual([]);
187
+ expect(readSpool(projectDir)).toBe('');
188
+ });
189
+
190
+ it('redacts secrets before extraction and writing files', () => {
191
+ writeSpool(projectDir, [
192
+ JSON.stringify({
193
+ provider: 'codex',
194
+ source: 'stdout',
195
+ taskName: 'Build auth module',
196
+ timestamp: '2026-03-28T12:34:56.000Z',
197
+ text: 'We decided to store token=abc123secret in env docs.',
198
+ }),
199
+ ]);
200
+
201
+ processSpool(projectDir);
202
+
203
+ const [decisionFile] = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'));
204
+ const content = readFile(path.join(projectDir, '.tlc', 'memory', 'team', 'decisions', decisionFile));
205
+
206
+ expect(content).toContain('We decided to store token=[TOKEN_REDACTED] in env docs.');
207
+ expect(content).not.toContain('abc123secret');
208
+ });
209
+
210
+ it('truncates slugs to 40 characters and uses the timestamp date prefix', () => {
211
+ writeSpool(projectDir, [
212
+ JSON.stringify({
213
+ provider: 'codex',
214
+ source: 'stdout',
215
+ taskName: 'Build auth module',
216
+ timestamp: '2026-03-29T00:00:01.000Z',
217
+ text: 'We decided to use extremely descriptive boundary names across packages for clarity.',
218
+ }),
219
+ ]);
220
+
221
+ processSpool(projectDir);
222
+
223
+ const decisionFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'));
224
+
225
+ expect(decisionFiles).toEqual(['2026-03-29-we-decided-to-use-extremely.md']);
226
+ });
227
+
228
+ it('preserves skipped lines in order after processing other entries', () => {
229
+ const invalidLine = '{"provider":';
230
+ const validDecision = JSON.stringify({
231
+ provider: 'codex',
232
+ source: 'stdout',
233
+ taskName: 'Build auth module',
234
+ timestamp: '2026-03-28T12:34:56.000Z',
235
+ text: 'We decided to use Postgres for auth state.',
236
+ });
237
+ const invalidEnvelope = JSON.stringify({
238
+ provider: 'codex',
239
+ source: 'stdout',
240
+ taskName: 'Build auth module',
241
+ timestamp: '2026-03-28T12:34:56.000Z',
242
+ });
243
+
244
+ writeSpool(projectDir, [invalidLine, validDecision, invalidEnvelope]);
245
+
246
+ const result = processSpool(projectDir);
247
+
248
+ expect(result.processed).toBe(1);
249
+ expect(result.skipped).toBe(2);
250
+ expect(readSpool(projectDir)).toBe([invalidLine, invalidEnvelope].join('\n'));
251
+ });
252
+
253
+ it('supports injected fs implementations', () => {
254
+ writeSpool(projectDir, [
255
+ JSON.stringify({
256
+ provider: 'codex',
257
+ source: 'stdout',
258
+ taskName: 'Build auth module',
259
+ timestamp: '2026-03-28T12:34:56.000Z',
260
+ text: 'We decided to use Postgres for auth state.',
261
+ }),
262
+ ]);
263
+
264
+ const injectedFs = {
265
+ ...fs,
266
+ existsSync: vi.fn(fs.existsSync),
267
+ mkdirSync: vi.fn(fs.mkdirSync),
268
+ readFileSync: vi.fn(fs.readFileSync),
269
+ writeFileSync: vi.fn(fs.writeFileSync),
270
+ };
271
+
272
+ const result = processSpool(projectDir, { fs: injectedFs });
273
+
274
+ expect(result.processed).toBe(1);
275
+ expect(injectedFs.readFileSync).toHaveBeenCalled();
276
+ expect(injectedFs.writeFileSync).toHaveBeenCalled();
277
+ });
278
+ });