tlc-claude-code 2.4.3 → 2.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/tlc/build.md +7 -5
- package/.claude/commands/tlc/recall.md +59 -87
- package/.claude/commands/tlc/remember.md +76 -71
- package/.claude/commands/tlc/review.md +76 -21
- package/.claude/hooks/tlc-capture-exchange.sh +50 -21
- package/.claude/hooks/tlc-session-init.sh +30 -0
- package/bin/init.js +12 -3
- package/package.json +1 -1
- package/server/lib/capture/classifier.js +71 -0
- package/server/lib/capture/classifier.test.js +71 -0
- package/server/lib/capture/claude-capture.js +140 -0
- package/server/lib/capture/claude-capture.test.js +152 -0
- package/server/lib/capture/codex-capture.js +79 -0
- package/server/lib/capture/codex-capture.test.js +161 -0
- package/server/lib/capture/codex-event-parser.js +76 -0
- package/server/lib/capture/codex-event-parser.test.js +83 -0
- package/server/lib/capture/ensure-ready.js +56 -0
- package/server/lib/capture/ensure-ready.test.js +135 -0
- package/server/lib/capture/envelope.js +77 -0
- package/server/lib/capture/envelope.test.js +169 -0
- package/server/lib/capture/extractor.js +51 -0
- package/server/lib/capture/extractor.test.js +92 -0
- package/server/lib/capture/generic-capture.js +96 -0
- package/server/lib/capture/generic-capture.test.js +171 -0
- package/server/lib/capture/index.js +117 -0
- package/server/lib/capture/index.test.js +263 -0
- package/server/lib/capture/redactor.js +68 -0
- package/server/lib/capture/redactor.test.js +93 -0
- package/server/lib/capture/spool-processor.js +155 -0
- package/server/lib/capture/spool-processor.test.js +278 -0
- package/server/lib/health-check.js +255 -0
- package/server/lib/health-check.test.js +243 -0
- package/server/lib/orchestration/cli-dispatch.js +200 -0
- package/server/lib/orchestration/cli-dispatch.test.js +242 -0
- package/server/lib/orchestration/prompt-builder.js +118 -0
- package/server/lib/orchestration/prompt-builder.test.js +200 -0
- package/server/lib/orchestration/standalone-compat.js +39 -0
- package/server/lib/orchestration/standalone-compat.test.js +144 -0
- package/server/lib/orchestration/worktree-manager.js +43 -0
- package/server/lib/orchestration/worktree-manager.test.js +50 -0
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
import { describe, it, beforeEach, afterEach, expect, vi } from 'vitest';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
|
|
6
|
+
const spoolRelativePath = path.join('.tlc', 'memory', '.spool.jsonl');
|
|
7
|
+
|
|
8
|
+
function readFile(filePath) {
|
|
9
|
+
return fs.readFileSync(filePath, 'utf8');
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function readSpool(projectDir) {
|
|
13
|
+
const spoolPath = path.join(projectDir, spoolRelativePath);
|
|
14
|
+
|
|
15
|
+
if (!fs.existsSync(spoolPath)) {
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
return readFile(spoolPath);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function writeSpool(projectDir, lines) {
|
|
23
|
+
const spoolPath = path.join(projectDir, spoolRelativePath);
|
|
24
|
+
fs.mkdirSync(path.dirname(spoolPath), { recursive: true });
|
|
25
|
+
fs.writeFileSync(spoolPath, lines.join('\n'));
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function findMarkdownFiles(projectDir, relativeDir) {
|
|
29
|
+
const targetDir = path.join(projectDir, relativeDir);
|
|
30
|
+
|
|
31
|
+
if (!fs.existsSync(targetDir)) {
|
|
32
|
+
return [];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
return fs.readdirSync(targetDir)
|
|
36
|
+
.filter((entry) => entry.endsWith('.md') && entry !== '.gitkeep')
|
|
37
|
+
.sort();
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
describe('capture/spool-processor', () => {
|
|
41
|
+
let projectDir;
|
|
42
|
+
let processSpool;
|
|
43
|
+
|
|
44
|
+
beforeEach(async () => {
|
|
45
|
+
projectDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tlc-spool-processor-test-'));
|
|
46
|
+
({ processSpool } = await import('./spool-processor.js'));
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
afterEach(() => {
|
|
50
|
+
vi.resetModules();
|
|
51
|
+
fs.rmSync(projectDir, { recursive: true, force: true });
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
it('returns zero counts when the spool file is missing', () => {
|
|
55
|
+
const result = processSpool(projectDir);
|
|
56
|
+
|
|
57
|
+
expect(result).toEqual({
|
|
58
|
+
processed: 0,
|
|
59
|
+
skipped: 0,
|
|
60
|
+
warnings: [],
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it('writes team decisions, team gotchas, and personal sessions to their target directories', () => {
|
|
65
|
+
writeSpool(projectDir, [
|
|
66
|
+
JSON.stringify({
|
|
67
|
+
provider: 'codex',
|
|
68
|
+
source: 'stdout',
|
|
69
|
+
taskName: 'Build auth module',
|
|
70
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
71
|
+
text: [
|
|
72
|
+
'We decided to use Postgres for auth state.',
|
|
73
|
+
"Don't use synchronous fs calls in request handlers.",
|
|
74
|
+
'I prefer terminal-first workflows for debugging.',
|
|
75
|
+
].join(' '),
|
|
76
|
+
}),
|
|
77
|
+
]);
|
|
78
|
+
|
|
79
|
+
const result = processSpool(projectDir);
|
|
80
|
+
|
|
81
|
+
expect(result).toEqual({
|
|
82
|
+
processed: 1,
|
|
83
|
+
skipped: 0,
|
|
84
|
+
warnings: [],
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
const decisionFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'));
|
|
88
|
+
const gotchaFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'gotchas'));
|
|
89
|
+
const sessionFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', '.local', 'sessions'));
|
|
90
|
+
|
|
91
|
+
expect(decisionFiles).toEqual(['2026-03-28-we-decided-to-use-postgres.md']);
|
|
92
|
+
expect(gotchaFiles).toEqual(['2026-03-28-dont-use-synchronous-fs-calls.md']);
|
|
93
|
+
expect(sessionFiles).toEqual(['2026-03-28-i-prefer-terminal-first-workflows.md']);
|
|
94
|
+
|
|
95
|
+
expect(readFile(path.join(projectDir, '.tlc', 'memory', 'team', 'decisions', decisionFiles[0]))).toContain([
|
|
96
|
+
'---',
|
|
97
|
+
'provider: codex',
|
|
98
|
+
'source: stdout',
|
|
99
|
+
'timestamp: 2026-03-28T12:34:56.000Z',
|
|
100
|
+
'taskName: "Build auth module"',
|
|
101
|
+
'confidence: 0.9',
|
|
102
|
+
'type: decision',
|
|
103
|
+
'scope: team',
|
|
104
|
+
'---',
|
|
105
|
+
'',
|
|
106
|
+
'We decided to use Postgres for auth state.',
|
|
107
|
+
].join('\n'));
|
|
108
|
+
|
|
109
|
+
expect(readFile(path.join(projectDir, '.tlc', 'memory', 'team', 'gotchas', gotchaFiles[0]))).toContain('type: gotcha');
|
|
110
|
+
expect(readFile(path.join(projectDir, '.tlc', 'memory', '.local', 'sessions', sessionFiles[0]))).toContain('scope: personal');
|
|
111
|
+
expect(readSpool(projectDir)).toBe('');
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
it('skips malformed JSON lines and keeps them in the spool with a warning', () => {
|
|
115
|
+
writeSpool(projectDir, [
|
|
116
|
+
'{"provider":"codex"',
|
|
117
|
+
JSON.stringify({
|
|
118
|
+
provider: 'codex',
|
|
119
|
+
source: 'stdout',
|
|
120
|
+
taskName: 'Build auth module',
|
|
121
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
122
|
+
text: 'We decided to use Postgres for auth state.',
|
|
123
|
+
}),
|
|
124
|
+
]);
|
|
125
|
+
|
|
126
|
+
const result = processSpool(projectDir);
|
|
127
|
+
|
|
128
|
+
expect(result.processed).toBe(1);
|
|
129
|
+
expect(result.skipped).toBe(1);
|
|
130
|
+
expect(result.warnings).toHaveLength(1);
|
|
131
|
+
expect(result.warnings[0]).toContain('Malformed JSON');
|
|
132
|
+
expect(readSpool(projectDir)).toBe('{"provider":"codex"');
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
it('skips invalid envelopes and leaves them in the spool', () => {
|
|
136
|
+
writeSpool(projectDir, [
|
|
137
|
+
JSON.stringify({
|
|
138
|
+
provider: 'codex',
|
|
139
|
+
source: 'stdout',
|
|
140
|
+
taskName: 'Build auth module',
|
|
141
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
142
|
+
}),
|
|
143
|
+
JSON.stringify({
|
|
144
|
+
provider: 'codex',
|
|
145
|
+
source: 'stdout',
|
|
146
|
+
taskName: 'Build auth module',
|
|
147
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
148
|
+
text: 'We decided to use Postgres for auth state.',
|
|
149
|
+
}),
|
|
150
|
+
]);
|
|
151
|
+
|
|
152
|
+
const result = processSpool(projectDir);
|
|
153
|
+
|
|
154
|
+
expect(result.processed).toBe(1);
|
|
155
|
+
expect(result.skipped).toBe(1);
|
|
156
|
+
expect(result.warnings).toHaveLength(1);
|
|
157
|
+
expect(result.warnings[0]).toContain('Invalid envelope');
|
|
158
|
+
expect(readSpool(projectDir)).toBe(JSON.stringify({
|
|
159
|
+
provider: 'codex',
|
|
160
|
+
source: 'stdout',
|
|
161
|
+
taskName: 'Build auth module',
|
|
162
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
163
|
+
}));
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
it('treats valid envelopes with no extracted statements as processed and removes them from the spool', () => {
|
|
167
|
+
writeSpool(projectDir, [
|
|
168
|
+
JSON.stringify({
|
|
169
|
+
provider: 'codex',
|
|
170
|
+
source: 'stdout',
|
|
171
|
+
taskName: 'Build auth module',
|
|
172
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
173
|
+
text: 'The server responded in 120ms and all tests passed.',
|
|
174
|
+
}),
|
|
175
|
+
]);
|
|
176
|
+
|
|
177
|
+
const result = processSpool(projectDir);
|
|
178
|
+
|
|
179
|
+
expect(result).toEqual({
|
|
180
|
+
processed: 1,
|
|
181
|
+
skipped: 0,
|
|
182
|
+
warnings: [],
|
|
183
|
+
});
|
|
184
|
+
expect(findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'))).toEqual([]);
|
|
185
|
+
expect(findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'gotchas'))).toEqual([]);
|
|
186
|
+
expect(findMarkdownFiles(projectDir, path.join('.tlc', 'memory', '.local', 'sessions'))).toEqual([]);
|
|
187
|
+
expect(readSpool(projectDir)).toBe('');
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
it('redacts secrets before extraction and writing files', () => {
|
|
191
|
+
writeSpool(projectDir, [
|
|
192
|
+
JSON.stringify({
|
|
193
|
+
provider: 'codex',
|
|
194
|
+
source: 'stdout',
|
|
195
|
+
taskName: 'Build auth module',
|
|
196
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
197
|
+
text: 'We decided to store token=abc123secret in env docs.',
|
|
198
|
+
}),
|
|
199
|
+
]);
|
|
200
|
+
|
|
201
|
+
processSpool(projectDir);
|
|
202
|
+
|
|
203
|
+
const [decisionFile] = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'));
|
|
204
|
+
const content = readFile(path.join(projectDir, '.tlc', 'memory', 'team', 'decisions', decisionFile));
|
|
205
|
+
|
|
206
|
+
expect(content).toContain('We decided to store token=[TOKEN_REDACTED] in env docs.');
|
|
207
|
+
expect(content).not.toContain('abc123secret');
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
it('truncates slugs to 40 characters and uses the timestamp date prefix', () => {
|
|
211
|
+
writeSpool(projectDir, [
|
|
212
|
+
JSON.stringify({
|
|
213
|
+
provider: 'codex',
|
|
214
|
+
source: 'stdout',
|
|
215
|
+
taskName: 'Build auth module',
|
|
216
|
+
timestamp: '2026-03-29T00:00:01.000Z',
|
|
217
|
+
text: 'We decided to use extremely descriptive boundary names across packages for clarity.',
|
|
218
|
+
}),
|
|
219
|
+
]);
|
|
220
|
+
|
|
221
|
+
processSpool(projectDir);
|
|
222
|
+
|
|
223
|
+
const decisionFiles = findMarkdownFiles(projectDir, path.join('.tlc', 'memory', 'team', 'decisions'));
|
|
224
|
+
|
|
225
|
+
expect(decisionFiles).toEqual(['2026-03-29-we-decided-to-use-extremely.md']);
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
it('preserves skipped lines in order after processing other entries', () => {
|
|
229
|
+
const invalidLine = '{"provider":';
|
|
230
|
+
const validDecision = JSON.stringify({
|
|
231
|
+
provider: 'codex',
|
|
232
|
+
source: 'stdout',
|
|
233
|
+
taskName: 'Build auth module',
|
|
234
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
235
|
+
text: 'We decided to use Postgres for auth state.',
|
|
236
|
+
});
|
|
237
|
+
const invalidEnvelope = JSON.stringify({
|
|
238
|
+
provider: 'codex',
|
|
239
|
+
source: 'stdout',
|
|
240
|
+
taskName: 'Build auth module',
|
|
241
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
242
|
+
});
|
|
243
|
+
|
|
244
|
+
writeSpool(projectDir, [invalidLine, validDecision, invalidEnvelope]);
|
|
245
|
+
|
|
246
|
+
const result = processSpool(projectDir);
|
|
247
|
+
|
|
248
|
+
expect(result.processed).toBe(1);
|
|
249
|
+
expect(result.skipped).toBe(2);
|
|
250
|
+
expect(readSpool(projectDir)).toBe([invalidLine, invalidEnvelope].join('\n'));
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
it('supports injected fs implementations', () => {
|
|
254
|
+
writeSpool(projectDir, [
|
|
255
|
+
JSON.stringify({
|
|
256
|
+
provider: 'codex',
|
|
257
|
+
source: 'stdout',
|
|
258
|
+
taskName: 'Build auth module',
|
|
259
|
+
timestamp: '2026-03-28T12:34:56.000Z',
|
|
260
|
+
text: 'We decided to use Postgres for auth state.',
|
|
261
|
+
}),
|
|
262
|
+
]);
|
|
263
|
+
|
|
264
|
+
const injectedFs = {
|
|
265
|
+
...fs,
|
|
266
|
+
existsSync: vi.fn(fs.existsSync),
|
|
267
|
+
mkdirSync: vi.fn(fs.mkdirSync),
|
|
268
|
+
readFileSync: vi.fn(fs.readFileSync),
|
|
269
|
+
writeFileSync: vi.fn(fs.writeFileSync),
|
|
270
|
+
};
|
|
271
|
+
|
|
272
|
+
const result = processSpool(projectDir, { fs: injectedFs });
|
|
273
|
+
|
|
274
|
+
expect(result.processed).toBe(1);
|
|
275
|
+
expect(injectedFs.readFileSync).toHaveBeenCalled();
|
|
276
|
+
expect(injectedFs.writeFileSync).toHaveBeenCalled();
|
|
277
|
+
});
|
|
278
|
+
});
|
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
|
|
3
|
+
const DAY_IN_MS = 24 * 60 * 60 * 1000;
|
|
4
|
+
const MEMORY_DIRS = [
|
|
5
|
+
'.tlc/memory/team/decisions',
|
|
6
|
+
'.tlc/memory/team/gotchas',
|
|
7
|
+
'.tlc/memory/.local/sessions',
|
|
8
|
+
];
|
|
9
|
+
|
|
10
|
+
function passResult(message, autoFixed = []) {
|
|
11
|
+
return {
|
|
12
|
+
passed: message,
|
|
13
|
+
warning: null,
|
|
14
|
+
autoFixed,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function warningResult(message, autoFixed = []) {
|
|
19
|
+
return {
|
|
20
|
+
passed: null,
|
|
21
|
+
warning: `[TLC WARNING] ${message}`,
|
|
22
|
+
autoFixed,
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
async function ensureMemoryDirs(projectDir, { fs = require('fs') } = {}) {
|
|
27
|
+
const autoFixed = [];
|
|
28
|
+
|
|
29
|
+
try {
|
|
30
|
+
for (const relativeDir of MEMORY_DIRS) {
|
|
31
|
+
const fullPath = path.join(projectDir, relativeDir);
|
|
32
|
+
if (!fs.existsSync(fullPath)) {
|
|
33
|
+
fs.mkdirSync(fullPath, { recursive: true });
|
|
34
|
+
autoFixed.push(relativeDir);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return passResult('Memory directories ready', autoFixed);
|
|
39
|
+
} catch (error) {
|
|
40
|
+
return warningResult(
|
|
41
|
+
`Unable to verify memory directories under .tlc/memory: ${error.message}`,
|
|
42
|
+
autoFixed
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async function checkGitignoreTeamMemory(
|
|
48
|
+
projectDir,
|
|
49
|
+
{ execSync = require('child_process').execSync } = {}
|
|
50
|
+
) {
|
|
51
|
+
try {
|
|
52
|
+
const output = execSync('git check-ignore .tlc/memory/team/test.md 2>/dev/null', {
|
|
53
|
+
cwd: projectDir,
|
|
54
|
+
encoding: 'utf8',
|
|
55
|
+
stdio: ['ignore', 'pipe', 'ignore'],
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
if (String(output || '').trim()) {
|
|
59
|
+
return warningResult(
|
|
60
|
+
'gitignore is blocking team memory. Remove ignore rules covering .tlc/memory/team/.'
|
|
61
|
+
);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return passResult('Team memory is not ignored by git');
|
|
65
|
+
} catch (error) {
|
|
66
|
+
if (error && error.status === 1) {
|
|
67
|
+
return passResult('Team memory is not ignored by git');
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return warningResult(
|
|
71
|
+
`Unable to confirm gitignore rules for .tlc/memory/team/. Run git check-ignore manually. ${error.message}`
|
|
72
|
+
);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async function checkSpoolEntries(projectDir, { fs = require('fs') } = {}) {
|
|
77
|
+
const spoolPath = path.join(projectDir, '.tlc', 'memory', '.spool.jsonl');
|
|
78
|
+
|
|
79
|
+
try {
|
|
80
|
+
if (!fs.existsSync(spoolPath)) {
|
|
81
|
+
return passResult('No unprocessed spool entries');
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const size = fs.statSync(spoolPath).size;
|
|
85
|
+
if (size > 0) {
|
|
86
|
+
return warningResult(
|
|
87
|
+
`Found unprocessed spool entries in .tlc/memory/.spool.jsonl (${size} bytes). Drain or inspect the spool.`
|
|
88
|
+
);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return passResult('No unprocessed spool entries');
|
|
92
|
+
} catch (error) {
|
|
93
|
+
return warningResult(
|
|
94
|
+
`Unable to inspect .tlc/memory/.spool.jsonl for unprocessed entries: ${error.message}`
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async function checkCaptureHookInstalled(projectDir, { fs = require('fs') } = {}) {
|
|
100
|
+
const hookPath = path.join(projectDir, '.claude', 'hooks', 'tlc-capture-exchange.sh');
|
|
101
|
+
|
|
102
|
+
try {
|
|
103
|
+
if (fs.existsSync(hookPath)) {
|
|
104
|
+
return passResult('Capture hook installed');
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return warningResult(
|
|
108
|
+
'Missing capture hook at .claude/hooks/tlc-capture-exchange.sh. Install the TLC capture hook.'
|
|
109
|
+
);
|
|
110
|
+
} catch (error) {
|
|
111
|
+
return warningResult(`Unable to verify capture hook installation: ${error.message}`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
async function checkRoutingConfigReadable(projectDir, { fs = require('fs') } = {}) {
|
|
116
|
+
const configPath = path.join(projectDir, '.tlc.json');
|
|
117
|
+
|
|
118
|
+
try {
|
|
119
|
+
const content = fs.readFileSync(configPath, 'utf8');
|
|
120
|
+
JSON.parse(content);
|
|
121
|
+
return passResult('Routing config is readable');
|
|
122
|
+
} catch (error) {
|
|
123
|
+
return warningResult(`Unable to read valid JSON from .tlc.json. Fix the routing config. ${error.message}`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
async function checkProviderAvailable(projectDir, { fs = require('fs') } = {}) {
|
|
128
|
+
const statePath = path.join(projectDir, '.tlc', '.router-state.json');
|
|
129
|
+
|
|
130
|
+
try {
|
|
131
|
+
const content = fs.readFileSync(statePath, 'utf8');
|
|
132
|
+
const state = JSON.parse(content);
|
|
133
|
+
const availableCount = Number(state && state.summary && state.summary.available_count);
|
|
134
|
+
|
|
135
|
+
if (availableCount > 0) {
|
|
136
|
+
return passResult('At least one provider is available');
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
return warningResult(
|
|
140
|
+
'No provider available in .tlc/.router-state.json. Refresh router state or configure a provider.'
|
|
141
|
+
);
|
|
142
|
+
} catch (error) {
|
|
143
|
+
return warningResult(
|
|
144
|
+
`Unable to read provider availability from .tlc/.router-state.json. Refresh router state. ${error.message}`
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function countRecentTimestampedLines(content, now = Date.now()) {
|
|
150
|
+
const cutoff = now - DAY_IN_MS;
|
|
151
|
+
const lines = String(content || '')
|
|
152
|
+
.split(/\r?\n/)
|
|
153
|
+
.map((line) => line.trim())
|
|
154
|
+
.filter(Boolean);
|
|
155
|
+
|
|
156
|
+
let recentCount = 0;
|
|
157
|
+
|
|
158
|
+
for (const line of lines) {
|
|
159
|
+
const timestampMatch = line.match(
|
|
160
|
+
/\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z/
|
|
161
|
+
);
|
|
162
|
+
if (!timestampMatch) {
|
|
163
|
+
continue;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
const timestamp = Date.parse(timestampMatch[0]);
|
|
167
|
+
if (!Number.isNaN(timestamp) && timestamp >= cutoff) {
|
|
168
|
+
recentCount += 1;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
return recentCount;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async function checkCaptureWarningsLog(projectDir, { fs = require('fs') } = {}) {
|
|
176
|
+
const warningsPath = path.join(projectDir, '.tlc', 'memory', '.capture-warnings.log');
|
|
177
|
+
|
|
178
|
+
try {
|
|
179
|
+
if (!fs.existsSync(warningsPath)) {
|
|
180
|
+
return passResult('No recent capture warnings');
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
const content = fs.readFileSync(warningsPath, 'utf8');
|
|
184
|
+
const recentCount = countRecentTimestampedLines(content);
|
|
185
|
+
|
|
186
|
+
if (recentCount > 0) {
|
|
187
|
+
return warningResult(
|
|
188
|
+
`Found ${recentCount} capture warnings in the last 24h. Review .tlc/memory/.capture-warnings.log.`
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
return passResult('No recent capture warnings');
|
|
193
|
+
} catch (error) {
|
|
194
|
+
return warningResult(
|
|
195
|
+
`Unable to inspect .tlc/memory/.capture-warnings.log for recent warnings: ${error.message}`
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
async function runHealthChecks(
|
|
201
|
+
projectDir,
|
|
202
|
+
{
|
|
203
|
+
fs = require('fs'),
|
|
204
|
+
execSync = require('child_process').execSync,
|
|
205
|
+
} = {}
|
|
206
|
+
) {
|
|
207
|
+
const results = {
|
|
208
|
+
passed: [],
|
|
209
|
+
warnings: [],
|
|
210
|
+
autoFixed: [],
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
const checks = [
|
|
214
|
+
() => ensureMemoryDirs(projectDir, { fs }),
|
|
215
|
+
() => checkGitignoreTeamMemory(projectDir, { execSync }),
|
|
216
|
+
() => checkSpoolEntries(projectDir, { fs }),
|
|
217
|
+
() => checkCaptureHookInstalled(projectDir, { fs }),
|
|
218
|
+
() => checkRoutingConfigReadable(projectDir, { fs }),
|
|
219
|
+
() => checkProviderAvailable(projectDir, { fs }),
|
|
220
|
+
() => checkCaptureWarningsLog(projectDir, { fs }),
|
|
221
|
+
];
|
|
222
|
+
|
|
223
|
+
for (const check of checks) {
|
|
224
|
+
try {
|
|
225
|
+
const result = await check();
|
|
226
|
+
if (result.passed) {
|
|
227
|
+
results.passed.push(result.passed);
|
|
228
|
+
}
|
|
229
|
+
if (result.warning) {
|
|
230
|
+
results.warnings.push(result.warning);
|
|
231
|
+
}
|
|
232
|
+
if (Array.isArray(result.autoFixed) && result.autoFixed.length > 0) {
|
|
233
|
+
results.autoFixed.push(...result.autoFixed);
|
|
234
|
+
}
|
|
235
|
+
} catch (error) {
|
|
236
|
+
results.warnings.push(
|
|
237
|
+
`[TLC WARNING] Health check execution failed unexpectedly: ${error.message}`
|
|
238
|
+
);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
return results;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
module.exports = {
|
|
246
|
+
ensureMemoryDirs,
|
|
247
|
+
checkGitignoreTeamMemory,
|
|
248
|
+
checkSpoolEntries,
|
|
249
|
+
checkCaptureHookInstalled,
|
|
250
|
+
checkRoutingConfigReadable,
|
|
251
|
+
checkProviderAvailable,
|
|
252
|
+
checkCaptureWarningsLog,
|
|
253
|
+
countRecentTimestampedLines,
|
|
254
|
+
runHealthChecks,
|
|
255
|
+
};
|