tlc-claude-code 2.0.1 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/builder.md +144 -0
- package/.claude/agents/planner.md +143 -0
- package/.claude/agents/reviewer.md +160 -0
- package/.claude/commands/tlc/build.md +4 -0
- package/.claude/commands/tlc/deploy.md +194 -2
- package/.claude/commands/tlc/e2e-verify.md +214 -0
- package/.claude/commands/tlc/guard.md +191 -0
- package/.claude/commands/tlc/help.md +32 -0
- package/.claude/commands/tlc/init.md +73 -37
- package/.claude/commands/tlc/llm.md +19 -4
- package/.claude/commands/tlc/preflight.md +134 -0
- package/.claude/commands/tlc/review-plan.md +363 -0
- package/.claude/commands/tlc/review.md +172 -57
- package/.claude/commands/tlc/watchci.md +159 -0
- package/.claude/hooks/tlc-block-tools.sh +41 -0
- package/.claude/hooks/tlc-capture-exchange.sh +50 -0
- package/.claude/hooks/tlc-post-build.sh +38 -0
- package/.claude/hooks/tlc-post-push.sh +22 -0
- package/.claude/hooks/tlc-prompt-guard.sh +69 -0
- package/.claude/hooks/tlc-session-init.sh +123 -0
- package/CLAUDE.md +13 -0
- package/bin/install.js +268 -2
- package/bin/postinstall.js +102 -24
- package/bin/setup-autoupdate.js +206 -0
- package/bin/setup-autoupdate.test.js +124 -0
- package/bin/tlc.js +0 -0
- package/dashboard-web/dist/assets/index-CdS5CHqu.css +1 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js +483 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js.map +1 -0
- package/dashboard-web/dist/index.html +2 -2
- package/docker-compose.dev.yml +18 -12
- package/package.json +4 -2
- package/scripts/project-docs.js +1 -1
- package/server/index.js +228 -2
- package/server/lib/capture-bridge.js +242 -0
- package/server/lib/capture-bridge.test.js +363 -0
- package/server/lib/capture-guard.js +140 -0
- package/server/lib/capture-guard.test.js +182 -0
- package/server/lib/command-runner.js +159 -0
- package/server/lib/command-runner.test.js +92 -0
- package/server/lib/cost-tracker.test.js +49 -12
- package/server/lib/deploy/runners/dependency-runner.js +106 -0
- package/server/lib/deploy/runners/dependency-runner.test.js +148 -0
- package/server/lib/deploy/runners/secrets-runner.js +174 -0
- package/server/lib/deploy/runners/secrets-runner.test.js +127 -0
- package/server/lib/deploy/security-gates.js +11 -24
- package/server/lib/deploy/security-gates.test.js +9 -2
- package/server/lib/deploy-engine.js +182 -0
- package/server/lib/deploy-engine.test.js +147 -0
- package/server/lib/docker-api.js +137 -0
- package/server/lib/docker-api.test.js +202 -0
- package/server/lib/docker-client.js +297 -0
- package/server/lib/docker-client.test.js +308 -0
- package/server/lib/input-sanitizer.js +86 -0
- package/server/lib/input-sanitizer.test.js +117 -0
- package/server/lib/launchd-agent.js +225 -0
- package/server/lib/launchd-agent.test.js +185 -0
- package/server/lib/memory-api.js +3 -1
- package/server/lib/memory-api.test.js +3 -5
- package/server/lib/memory-bridge-e2e.test.js +160 -0
- package/server/lib/memory-committer.js +18 -4
- package/server/lib/memory-committer.test.js +21 -0
- package/server/lib/memory-hooks-capture.test.js +69 -4
- package/server/lib/memory-hooks-integration.test.js +98 -0
- package/server/lib/memory-hooks.js +42 -4
- package/server/lib/memory-store-adapter.js +105 -0
- package/server/lib/memory-store-adapter.test.js +141 -0
- package/server/lib/memory-wiring-e2e.test.js +93 -0
- package/server/lib/nginx-config.js +114 -0
- package/server/lib/nginx-config.test.js +82 -0
- package/server/lib/ollama-health.js +91 -0
- package/server/lib/ollama-health.test.js +74 -0
- package/server/lib/orchestration/agent-dispatcher.js +114 -0
- package/server/lib/orchestration/agent-dispatcher.test.js +110 -0
- package/server/lib/orchestration/orchestrator.js +130 -0
- package/server/lib/orchestration/orchestrator.test.js +192 -0
- package/server/lib/orchestration/tmux-manager.js +101 -0
- package/server/lib/orchestration/tmux-manager.test.js +109 -0
- package/server/lib/orchestration/worktree-manager.js +132 -0
- package/server/lib/orchestration/worktree-manager.test.js +129 -0
- package/server/lib/port-guard.js +44 -0
- package/server/lib/port-guard.test.js +65 -0
- package/server/lib/project-scanner.js +37 -2
- package/server/lib/project-scanner.test.js +152 -0
- package/server/lib/remember-command.js +2 -0
- package/server/lib/remember-command.test.js +23 -0
- package/server/lib/review/plan-reviewer.js +260 -0
- package/server/lib/review/plan-reviewer.test.js +269 -0
- package/server/lib/review/review-schemas.js +173 -0
- package/server/lib/review/review-schemas.test.js +152 -0
- package/server/lib/security/crypto-utils.test.js +2 -2
- package/server/lib/semantic-recall.js +1 -1
- package/server/lib/semantic-recall.test.js +17 -0
- package/server/lib/ssh-client.js +184 -0
- package/server/lib/ssh-client.test.js +127 -0
- package/server/lib/vps-api.js +184 -0
- package/server/lib/vps-api.test.js +208 -0
- package/server/lib/vps-bootstrap.js +124 -0
- package/server/lib/vps-bootstrap.test.js +79 -0
- package/server/lib/vps-monitor.js +126 -0
- package/server/lib/vps-monitor.test.js +98 -0
- package/server/lib/workspace-api.js +182 -1
- package/server/lib/workspace-api.test.js +474 -0
- package/server/package-lock.json +737 -0
- package/server/package.json +3 -0
- package/server/setup.sh +271 -271
- package/dashboard-web/dist/assets/index-Uhc49PE-.css +0 -1
- package/dashboard-web/dist/assets/index-W36XHPC5.js +0 -431
- package/dashboard-web/dist/assets/index-W36XHPC5.js.map +0 -1
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Bridge E2E Tests - Phase 82 Task 5
|
|
3
|
+
*
|
|
4
|
+
* Tests the full pipeline: capture → observeAndRemember → pattern detect → file store.
|
|
5
|
+
* Proves the memory system achieves its original goal.
|
|
6
|
+
*
|
|
7
|
+
* RED: depends on capture-bridge.js (Task 1) and capture-guard.js (Task 4).
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { describe, it, beforeEach, afterEach, expect, vi } from 'vitest';
|
|
11
|
+
import fs from 'fs';
|
|
12
|
+
import path from 'path';
|
|
13
|
+
import os from 'os';
|
|
14
|
+
|
|
15
|
+
import { captureExchange, drainSpool, SPOOL_FILENAME } from './capture-bridge.js';
|
|
16
|
+
import { observeAndRemember } from './memory-observer.js';
|
|
17
|
+
|
|
18
|
+
describe('memory-bridge e2e', () => {
|
|
19
|
+
let testDir;
|
|
20
|
+
|
|
21
|
+
beforeEach(() => {
|
|
22
|
+
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tlc-bridge-e2e-'));
|
|
23
|
+
// Create memory directory structure
|
|
24
|
+
fs.mkdirSync(path.join(testDir, '.tlc', 'memory', 'team', 'decisions'), { recursive: true });
|
|
25
|
+
fs.mkdirSync(path.join(testDir, '.tlc', 'memory', 'team', 'gotchas'), { recursive: true });
|
|
26
|
+
fs.mkdirSync(path.join(testDir, '.tlc', 'memory', '.local'), { recursive: true });
|
|
27
|
+
fs.writeFileSync(path.join(testDir, '.tlc.json'), JSON.stringify({ project: 'e2e-test' }));
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
afterEach(() => {
|
|
31
|
+
fs.rmSync(testDir, { recursive: true, force: true });
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it('decision in exchange creates decision file', async () => {
|
|
35
|
+
// Pattern detector analyzes the user field for decision patterns
|
|
36
|
+
const exchange = {
|
|
37
|
+
user: "let's use PostgreSQL instead of MySQL because we need JSONB support.",
|
|
38
|
+
assistant: 'Good choice. PostgreSQL has excellent JSONB support.',
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
await observeAndRemember(testDir, exchange);
|
|
42
|
+
|
|
43
|
+
// Wait for setImmediate-based async processing
|
|
44
|
+
await new Promise(resolve => setTimeout(resolve, 500));
|
|
45
|
+
|
|
46
|
+
// Check that a decision file was created
|
|
47
|
+
const decisionsDir = path.join(testDir, '.tlc', 'memory', 'team', 'decisions');
|
|
48
|
+
const files = fs.readdirSync(decisionsDir);
|
|
49
|
+
expect(files.length).toBeGreaterThanOrEqual(1);
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
it('gotcha in exchange creates gotcha file', async () => {
|
|
53
|
+
// Pattern detector looks for "watch out for X" in user field
|
|
54
|
+
const exchange = {
|
|
55
|
+
user: 'watch out for the PGlite WASM driver under concurrent writes.',
|
|
56
|
+
assistant: 'Good catch. Serialize database operations to avoid crashes.',
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
await observeAndRemember(testDir, exchange);
|
|
60
|
+
|
|
61
|
+
await new Promise(resolve => setTimeout(resolve, 500));
|
|
62
|
+
|
|
63
|
+
const gotchasDir = path.join(testDir, '.tlc', 'memory', 'team', 'gotchas');
|
|
64
|
+
const files = fs.readdirSync(gotchasDir);
|
|
65
|
+
expect(files.length).toBeGreaterThanOrEqual(1);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it('full pipeline: captureExchange → observe → file stored', async () => {
|
|
69
|
+
// Simulate what the Stop hook does: POST to a mock server that calls observeAndRemember
|
|
70
|
+
let capturedExchange = null;
|
|
71
|
+
|
|
72
|
+
// Mock fetch that simulates the server calling observeAndRemember
|
|
73
|
+
const mockFetch = vi.fn().mockImplementation(async (url, opts) => {
|
|
74
|
+
const body = JSON.parse(opts.body);
|
|
75
|
+
for (const ex of body.exchanges) {
|
|
76
|
+
capturedExchange = ex;
|
|
77
|
+
await observeAndRemember(testDir, ex);
|
|
78
|
+
}
|
|
79
|
+
return { ok: true, json: async () => ({ captured: body.exchanges.length }) };
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
await captureExchange({
|
|
83
|
+
cwd: testDir,
|
|
84
|
+
// Pattern detector analyzes user field — put decision language there
|
|
85
|
+
assistantMessage: 'Good choice, JWT is better for horizontal scaling.',
|
|
86
|
+
userMessage: "let's use JWT tokens instead of sessions for authentication.",
|
|
87
|
+
sessionId: 'e2e-sess-1',
|
|
88
|
+
}, { fetch: mockFetch });
|
|
89
|
+
|
|
90
|
+
await new Promise(resolve => setTimeout(resolve, 300));
|
|
91
|
+
|
|
92
|
+
// Verify the exchange was captured
|
|
93
|
+
expect(capturedExchange).not.toBeNull();
|
|
94
|
+
expect(capturedExchange.user).toContain('JWT tokens');
|
|
95
|
+
|
|
96
|
+
// Verify a decision file was created
|
|
97
|
+
const decisionsDir = path.join(testDir, '.tlc', 'memory', 'team', 'decisions');
|
|
98
|
+
const files = fs.readdirSync(decisionsDir);
|
|
99
|
+
expect(files.length).toBeGreaterThanOrEqual(1);
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
it('spool entry captured after drain', async () => {
|
|
103
|
+
const spoolDir = path.join(testDir, '.tlc', 'memory');
|
|
104
|
+
const spoolPath = path.join(spoolDir, SPOOL_FILENAME);
|
|
105
|
+
|
|
106
|
+
// Write a spooled entry with a decision (pattern in user field)
|
|
107
|
+
const spooledEntry = JSON.stringify({
|
|
108
|
+
projectId: 'e2e-test',
|
|
109
|
+
exchanges: [{
|
|
110
|
+
user: "we decided to use SQLite for the vector store.",
|
|
111
|
+
assistant: 'SQLite embeds directly and needs no separate process.',
|
|
112
|
+
timestamp: Date.now(),
|
|
113
|
+
}],
|
|
114
|
+
});
|
|
115
|
+
fs.writeFileSync(spoolPath, spooledEntry + '\n');
|
|
116
|
+
|
|
117
|
+
// Mock fetch that calls observeAndRemember (like the real server would)
|
|
118
|
+
const mockFetch = vi.fn().mockImplementation(async (url, opts) => {
|
|
119
|
+
const body = JSON.parse(opts.body);
|
|
120
|
+
for (const ex of body.exchanges) {
|
|
121
|
+
await observeAndRemember(testDir, ex);
|
|
122
|
+
}
|
|
123
|
+
return { ok: true, json: async () => ({ captured: body.exchanges.length }) };
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
await drainSpool(spoolDir, { fetch: mockFetch });
|
|
127
|
+
|
|
128
|
+
await new Promise(resolve => setTimeout(resolve, 500));
|
|
129
|
+
|
|
130
|
+
// Spool should be drained
|
|
131
|
+
if (fs.existsSync(spoolPath)) {
|
|
132
|
+
expect(fs.readFileSync(spoolPath, 'utf-8').trim()).toBe('');
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Decision file should have been created from the spooled exchange
|
|
136
|
+
const decisionsDir = path.join(testDir, '.tlc', 'memory', 'team', 'decisions');
|
|
137
|
+
const files = fs.readdirSync(decisionsDir);
|
|
138
|
+
expect(files.length).toBeGreaterThanOrEqual(1);
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
it('capture guard deduplicates identical exchanges', async () => {
|
|
142
|
+
// Dedup happens at the capture guard level, not the observer
|
|
143
|
+
const { createCaptureGuard } = await import('./capture-guard.js');
|
|
144
|
+
const guard = createCaptureGuard();
|
|
145
|
+
|
|
146
|
+
const exchange = {
|
|
147
|
+
user: "we decided to use Redis as our caching layer.",
|
|
148
|
+
assistant: 'Redis is great for caching.',
|
|
149
|
+
timestamp: Date.now(),
|
|
150
|
+
};
|
|
151
|
+
|
|
152
|
+
// First call returns the exchange
|
|
153
|
+
const first = guard.deduplicate([exchange], 'e2e-test');
|
|
154
|
+
expect(first).toHaveLength(1);
|
|
155
|
+
|
|
156
|
+
// Same exchange immediately — deduplicated
|
|
157
|
+
const second = guard.deduplicate([exchange], 'e2e-test');
|
|
158
|
+
expect(second).toHaveLength(0);
|
|
159
|
+
});
|
|
160
|
+
});
|
|
@@ -23,7 +23,24 @@ async function detectUncommittedMemory(projectRoot) {
|
|
|
23
23
|
return [];
|
|
24
24
|
}
|
|
25
25
|
|
|
26
|
-
//
|
|
26
|
+
// Try git status first — only return modified/untracked files
|
|
27
|
+
try {
|
|
28
|
+
const teamRelative = path.relative(projectRoot, teamDir);
|
|
29
|
+
const { stdout } = await execAsync(
|
|
30
|
+
`git status --porcelain -- "${teamRelative}"`,
|
|
31
|
+
{ cwd: projectRoot }
|
|
32
|
+
);
|
|
33
|
+
if (stdout.trim().length === 0) return [];
|
|
34
|
+
|
|
35
|
+
return stdout.trim().split('\n')
|
|
36
|
+
.map(line => line.slice(3).trim()) // strip status prefix (e.g. "?? ", " M ")
|
|
37
|
+
.filter(f => f.endsWith('.json') || f.endsWith('.md'))
|
|
38
|
+
.filter(f => !f.endsWith('conventions.md'));
|
|
39
|
+
} catch {
|
|
40
|
+
// Not a git repo or git not available — fall back to walkDir
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Fallback: return all files (non-git directory)
|
|
27
44
|
const files = [];
|
|
28
45
|
|
|
29
46
|
async function walkDir(dir) {
|
|
@@ -33,10 +50,7 @@ async function detectUncommittedMemory(projectRoot) {
|
|
|
33
50
|
if (entry.isDirectory()) {
|
|
34
51
|
await walkDir(fullPath);
|
|
35
52
|
} else if (entry.name.endsWith('.json') || entry.name.endsWith('.md')) {
|
|
36
|
-
// Skip template files like conventions.md
|
|
37
53
|
if (entry.name === 'conventions.md') continue;
|
|
38
|
-
|
|
39
|
-
// Get path relative to projectRoot
|
|
40
54
|
const relativePath = path.relative(projectRoot, fullPath);
|
|
41
55
|
files.push(relativePath);
|
|
42
56
|
}
|
|
@@ -58,6 +58,27 @@ describe('memory-committer', () => {
|
|
|
58
58
|
|
|
59
59
|
expect(uncommitted.every(f => !f.includes('.local'))).toBe(true);
|
|
60
60
|
});
|
|
61
|
+
|
|
62
|
+
// Phase 81 Task 4: detectUncommittedMemory should use git status
|
|
63
|
+
it('returns empty for already-committed files in a git repo', async () => {
|
|
64
|
+
// Create a git repo, add a decision, and commit it
|
|
65
|
+
const { execSync } = await import('child_process');
|
|
66
|
+
execSync('git init', { cwd: testDir, stdio: 'pipe' });
|
|
67
|
+
execSync('git config user.email "test@test.com"', { cwd: testDir, stdio: 'pipe' });
|
|
68
|
+
execSync('git config user.name "Test"', { cwd: testDir, stdio: 'pipe' });
|
|
69
|
+
|
|
70
|
+
await writeTeamDecision(testDir, {
|
|
71
|
+
title: 'Committed Decision',
|
|
72
|
+
reasoning: 'Already committed',
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
execSync('git add -A', { cwd: testDir, stdio: 'pipe' });
|
|
76
|
+
execSync('git commit -m "initial"', { cwd: testDir, stdio: 'pipe' });
|
|
77
|
+
|
|
78
|
+
// Now detect uncommitted — should be empty since everything is committed
|
|
79
|
+
const uncommitted = await detectUncommittedMemory(testDir);
|
|
80
|
+
expect(uncommitted).toHaveLength(0);
|
|
81
|
+
});
|
|
61
82
|
});
|
|
62
83
|
|
|
63
84
|
describe('generateCommitMessage', () => {
|
|
@@ -297,18 +297,20 @@ describe('memory-hooks capture (auto-capture hooks)', () => {
|
|
|
297
297
|
it('buffer resets after chunk written', async () => {
|
|
298
298
|
const hooks = createCaptureHooks(testDir, makeDeps());
|
|
299
299
|
|
|
300
|
-
// Fill buffer to threshold
|
|
300
|
+
// Fill buffer to threshold — the 5th exchange triggers processBuffer
|
|
301
|
+
// which atomically swaps the buffer before async processing
|
|
301
302
|
for (let i = 0; i < 5; i++) {
|
|
302
303
|
hooks.onExchange(makeExchange(`q${i}`, `a${i}`));
|
|
303
304
|
}
|
|
304
305
|
|
|
305
|
-
expect(hooks.getBufferSize()).toBe(5);
|
|
306
|
-
|
|
307
306
|
// Wait for async processing to complete
|
|
308
307
|
await new Promise(resolve => setTimeout(resolve, 100));
|
|
309
308
|
|
|
310
|
-
// Buffer should be
|
|
309
|
+
// Buffer should be empty after processing
|
|
311
310
|
expect(hooks.getBufferSize()).toBe(0);
|
|
311
|
+
// Chunker should have received the 5 exchanges
|
|
312
|
+
expect(mockChunker.chunkConversation).toHaveBeenCalled();
|
|
313
|
+
expect(mockChunker.chunkConversation.mock.calls[0][0]).toHaveLength(5);
|
|
312
314
|
});
|
|
313
315
|
|
|
314
316
|
it('flush() forces processing regardless of buffer size', async () => {
|
|
@@ -346,5 +348,68 @@ describe('memory-hooks capture (auto-capture hooks)', () => {
|
|
|
346
348
|
expect(indexCallArgs).toBeDefined();
|
|
347
349
|
expect(indexCallArgs[0]).toHaveProperty('id', 'chunk-1');
|
|
348
350
|
});
|
|
351
|
+
|
|
352
|
+
// Phase 81 Task 3: Buffer race condition tests
|
|
353
|
+
it('exchanges added during async processing are not lost', async () => {
|
|
354
|
+
// Use a synchronous chunker but slow richCapture to simulate the race
|
|
355
|
+
const slowRichCapture = {
|
|
356
|
+
writeConversationChunk: vi.fn().mockImplementation(async () => {
|
|
357
|
+
// Simulate slow async write — yields control back to event loop
|
|
358
|
+
await new Promise(resolve => setTimeout(resolve, 80));
|
|
359
|
+
return '/tmp/slow-write.md';
|
|
360
|
+
}),
|
|
361
|
+
};
|
|
362
|
+
|
|
363
|
+
const hooks = createCaptureHooks(testDir, makeDeps({ richCapture: slowRichCapture }));
|
|
364
|
+
|
|
365
|
+
// Add 5 exchanges to trigger processing
|
|
366
|
+
for (let i = 0; i < 5; i++) {
|
|
367
|
+
hooks.onExchange(makeExchange(`q${i}`, `a${i}`));
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// Wait briefly for processing to start (microtask scheduled)
|
|
371
|
+
await new Promise(resolve => setTimeout(resolve, 20));
|
|
372
|
+
|
|
373
|
+
// Now add more exchanges DURING the slow processing
|
|
374
|
+
hooks.onExchange(makeExchange('late-q1', 'late-a1'));
|
|
375
|
+
hooks.onExchange(makeExchange('late-q2', 'late-a2'));
|
|
376
|
+
|
|
377
|
+
// Wait for processing to complete
|
|
378
|
+
await new Promise(resolve => setTimeout(resolve, 200));
|
|
379
|
+
|
|
380
|
+
// The late exchanges must NOT have been lost
|
|
381
|
+
// They should be in the buffer (preserved) or processed in a second batch
|
|
382
|
+
const bufferSize = hooks.getBufferSize();
|
|
383
|
+
const chunkCalls = mockChunker.chunkConversation.mock.calls.length;
|
|
384
|
+
// Either late exchanges remain in buffer, or they triggered a second batch
|
|
385
|
+
expect(bufferSize === 2 || chunkCalls > 1).toBe(true);
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
it('error during processing preserves new exchanges', async () => {
|
|
389
|
+
const slowFailRichCapture = {
|
|
390
|
+
writeConversationChunk: vi.fn().mockImplementation(async () => {
|
|
391
|
+
await new Promise(resolve => setTimeout(resolve, 30));
|
|
392
|
+
throw new Error('Write failed');
|
|
393
|
+
}),
|
|
394
|
+
};
|
|
395
|
+
|
|
396
|
+
const hooks = createCaptureHooks(testDir, makeDeps({ richCapture: slowFailRichCapture }));
|
|
397
|
+
|
|
398
|
+
// Trigger processing (will fail during write)
|
|
399
|
+
for (let i = 0; i < 5; i++) {
|
|
400
|
+
hooks.onExchange(makeExchange(`q${i}`, `a${i}`));
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// Wait for processing to start, then add exchange during failure
|
|
404
|
+
await new Promise(resolve => setTimeout(resolve, 10));
|
|
405
|
+
hooks.onExchange(makeExchange('after-error-q', 'after-error-a'));
|
|
406
|
+
|
|
407
|
+
// Wait for error processing to complete
|
|
408
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
409
|
+
|
|
410
|
+
// The new exchange added during the failing processing must be preserved
|
|
411
|
+
const bufferSize = hooks.getBufferSize();
|
|
412
|
+
expect(bufferSize).toBeGreaterThanOrEqual(1);
|
|
413
|
+
});
|
|
349
414
|
});
|
|
350
415
|
});
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Hooks Integration Tests - Phase 81 Task 6
|
|
3
|
+
*
|
|
4
|
+
* Tests for createServerMemoryCapture() which wires memory hooks
|
|
5
|
+
* into the TLC server lifecycle so that conversations are automatically
|
|
6
|
+
* captured without user action.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { describe, it, beforeEach, afterEach, expect, vi } from 'vitest';
|
|
10
|
+
import fs from 'fs';
|
|
11
|
+
import path from 'path';
|
|
12
|
+
import os from 'os';
|
|
13
|
+
|
|
14
|
+
// The function under test — will be created in implementation
|
|
15
|
+
import { createServerMemoryCapture } from './memory-hooks.js';
|
|
16
|
+
|
|
17
|
+
describe('memory-hooks server integration', () => {
|
|
18
|
+
let testDir;
|
|
19
|
+
|
|
20
|
+
beforeEach(() => {
|
|
21
|
+
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tlc-hooks-integration-'));
|
|
22
|
+
// Create minimal memory structure
|
|
23
|
+
fs.mkdirSync(path.join(testDir, '.tlc', 'memory', 'team', 'decisions'), { recursive: true });
|
|
24
|
+
fs.mkdirSync(path.join(testDir, '.tlc', 'memory', '.local', 'sessions'), { recursive: true });
|
|
25
|
+
vi.clearAllMocks();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
afterEach(() => {
|
|
29
|
+
fs.rmSync(testDir, { recursive: true, force: true });
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
it('createServerMemoryCapture returns object with expected methods', () => {
|
|
33
|
+
const capture = createServerMemoryCapture({
|
|
34
|
+
projectRoot: testDir,
|
|
35
|
+
observeAndRemember: vi.fn(),
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
expect(capture).toHaveProperty('onAssistantResponse');
|
|
39
|
+
expect(capture).toHaveProperty('onTlcCommand');
|
|
40
|
+
expect(typeof capture.onAssistantResponse).toBe('function');
|
|
41
|
+
expect(typeof capture.onTlcCommand).toBe('function');
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
it('onAssistantResponse triggers observeAndRemember', async () => {
|
|
45
|
+
const mockObserve = vi.fn();
|
|
46
|
+
const capture = createServerMemoryCapture({
|
|
47
|
+
projectRoot: testDir,
|
|
48
|
+
observeAndRemember: mockObserve,
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
await capture.onAssistantResponse('We should use PostgreSQL for better JSON support');
|
|
52
|
+
|
|
53
|
+
expect(mockObserve).toHaveBeenCalledTimes(1);
|
|
54
|
+
expect(mockObserve).toHaveBeenCalledWith(
|
|
55
|
+
testDir,
|
|
56
|
+
expect.objectContaining({ assistant: 'We should use PostgreSQL for better JSON support' })
|
|
57
|
+
);
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
it('onTlcCommand triggers capture flush', async () => {
|
|
61
|
+
const mockObserve = vi.fn();
|
|
62
|
+
const capture = createServerMemoryCapture({
|
|
63
|
+
projectRoot: testDir,
|
|
64
|
+
observeAndRemember: mockObserve,
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
// Add some responses first
|
|
68
|
+
await capture.onAssistantResponse('first response');
|
|
69
|
+
await capture.onAssistantResponse('second response');
|
|
70
|
+
|
|
71
|
+
// TLC command should work without error
|
|
72
|
+
expect(() => capture.onTlcCommand('build')).not.toThrow();
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it('capture failure does not throw', async () => {
|
|
76
|
+
const failingObserve = vi.fn().mockRejectedValue(new Error('Observation failed'));
|
|
77
|
+
const capture = createServerMemoryCapture({
|
|
78
|
+
projectRoot: testDir,
|
|
79
|
+
observeAndRemember: failingObserve,
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
// Should not throw despite observer failure
|
|
83
|
+
await expect(
|
|
84
|
+
capture.onAssistantResponse('this will fail to observe')
|
|
85
|
+
).resolves.not.toThrow();
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it('works when projectRoot has no memory structure', () => {
|
|
89
|
+
const emptyDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tlc-empty-'));
|
|
90
|
+
|
|
91
|
+
expect(() => createServerMemoryCapture({
|
|
92
|
+
projectRoot: emptyDir,
|
|
93
|
+
observeAndRemember: vi.fn(),
|
|
94
|
+
})).not.toThrow();
|
|
95
|
+
|
|
96
|
+
fs.rmSync(emptyDir, { recursive: true, force: true });
|
|
97
|
+
});
|
|
98
|
+
});
|
|
@@ -158,19 +158,21 @@ function createCaptureHooks(projectRoot, deps) {
|
|
|
158
158
|
|
|
159
159
|
processing = true;
|
|
160
160
|
|
|
161
|
+
// Snapshot and reset buffer atomically BEFORE async processing.
|
|
162
|
+
// New exchanges arriving during processing go into the fresh buffer.
|
|
163
|
+
const snapshot = buffer;
|
|
164
|
+
buffer = [];
|
|
165
|
+
|
|
161
166
|
Promise.resolve().then(async () => {
|
|
162
167
|
try {
|
|
163
|
-
const
|
|
164
|
-
const chunks = chunker.chunkConversation(exchanges);
|
|
168
|
+
const chunks = chunker.chunkConversation(snapshot);
|
|
165
169
|
for (const chunk of chunks) {
|
|
166
170
|
await richCapture.writeConversationChunk(projectRoot, chunk);
|
|
167
171
|
await vectorIndexer.indexChunk(chunk);
|
|
168
172
|
}
|
|
169
|
-
buffer = [];
|
|
170
173
|
} catch (_err) {
|
|
171
174
|
// Error resilience: capture failures must not propagate.
|
|
172
175
|
// Hooks remain functional after errors.
|
|
173
|
-
buffer = [];
|
|
174
176
|
} finally {
|
|
175
177
|
processing = false;
|
|
176
178
|
}
|
|
@@ -221,8 +223,44 @@ function createCaptureHooks(projectRoot, deps) {
|
|
|
221
223
|
};
|
|
222
224
|
}
|
|
223
225
|
|
|
226
|
+
/**
|
|
227
|
+
* Create server-level memory capture that wires observeAndRemember into
|
|
228
|
+
* the TLC server lifecycle so conversations are automatically captured.
|
|
229
|
+
*
|
|
230
|
+
* @param {Object} opts
|
|
231
|
+
* @param {string} opts.projectRoot - Project root directory
|
|
232
|
+
* @param {Function} opts.observeAndRemember - The observe-and-remember function from memory-observer
|
|
233
|
+
* @returns {{ onAssistantResponse: Function, onTlcCommand: Function }}
|
|
234
|
+
*/
|
|
235
|
+
function createServerMemoryCapture({ projectRoot, observeAndRemember }) {
|
|
236
|
+
/**
|
|
237
|
+
* Called after each assistant response — fires observeAndRemember.
|
|
238
|
+
* Errors are swallowed to avoid disrupting the response flow.
|
|
239
|
+
* @param {string} response - The assistant response text
|
|
240
|
+
*/
|
|
241
|
+
async function onAssistantResponse(response) {
|
|
242
|
+
try {
|
|
243
|
+
await observeAndRemember(projectRoot, { assistant: response });
|
|
244
|
+
} catch (_err) {
|
|
245
|
+
// Capture failures must not propagate
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* Called when a TLC command is invoked — currently a no-op placeholder
|
|
251
|
+
* for future flush/capture logic.
|
|
252
|
+
* @param {string} _command - The TLC command name
|
|
253
|
+
*/
|
|
254
|
+
function onTlcCommand(_command) {
|
|
255
|
+
// Placeholder for future capture flush on TLC commands
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
return { onAssistantResponse, onTlcCommand };
|
|
259
|
+
}
|
|
260
|
+
|
|
224
261
|
module.exports = {
|
|
225
262
|
createMemoryHooks,
|
|
226
263
|
MemoryHooks,
|
|
227
264
|
createCaptureHooks,
|
|
265
|
+
createServerMemoryCapture,
|
|
228
266
|
};
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File-based memory store adapter.
|
|
3
|
+
*
|
|
4
|
+
* Reads decisions and gotchas from `.tlc/memory/team/` markdown files on disk.
|
|
5
|
+
* No vector DB required — just file-based reading. Returns empty arrays when
|
|
6
|
+
* no files exist.
|
|
7
|
+
*
|
|
8
|
+
* @module memory-store-adapter
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const path = require('path');
|
|
12
|
+
const realFs = require('fs');
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Parse a markdown file into a memory entry.
|
|
16
|
+
* Extracts the title from the first `# Heading` line.
|
|
17
|
+
*
|
|
18
|
+
* @param {string} content - Raw markdown content
|
|
19
|
+
* @param {string} filename - Original filename
|
|
20
|
+
* @returns {{ title: string, content: string, filename: string }}
|
|
21
|
+
*/
|
|
22
|
+
function parseMarkdownEntry(content, filename) {
|
|
23
|
+
const headingMatch = content.match(/^#\s+(.+)$/m);
|
|
24
|
+
const title = headingMatch ? headingMatch[1].trim() : filename.replace(/\.md$/, '');
|
|
25
|
+
return { title, content, filename };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Read markdown files from a directory and parse them into entries.
|
|
30
|
+
*
|
|
31
|
+
* @param {string} dirPath - Directory to read
|
|
32
|
+
* @param {object} fsImpl - fs implementation (for testing)
|
|
33
|
+
* @returns {Array<{ title: string, content: string, filename: string }>}
|
|
34
|
+
*/
|
|
35
|
+
function readMarkdownDir(dirPath, fsImpl) {
|
|
36
|
+
if (!fsImpl.existsSync(dirPath)) {
|
|
37
|
+
return [];
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const files = fsImpl.readdirSync(dirPath);
|
|
41
|
+
const entries = [];
|
|
42
|
+
|
|
43
|
+
for (const file of files) {
|
|
44
|
+
if (!file.endsWith('.md')) continue;
|
|
45
|
+
try {
|
|
46
|
+
const content = fsImpl.readFileSync(path.join(dirPath, file), 'utf-8');
|
|
47
|
+
entries.push(parseMarkdownEntry(content, file));
|
|
48
|
+
} catch {
|
|
49
|
+
// Skip files that can't be read
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return entries;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Create a file-based memory store adapter for a project.
|
|
58
|
+
*
|
|
59
|
+
* @param {string} projectPath - Absolute path to the project root
|
|
60
|
+
* @param {object} [options]
|
|
61
|
+
* @param {object} [options.fs] - fs implementation (for testing)
|
|
62
|
+
* @returns {{ listDecisions: Function, listGotchas: Function, getStats: Function }}
|
|
63
|
+
*/
|
|
64
|
+
function createMemoryStoreAdapter(projectPath, options = {}) {
|
|
65
|
+
const fsImpl = options.fs || realFs;
|
|
66
|
+
const decisionsDir = path.join(projectPath, '.tlc', 'memory', 'team', 'decisions');
|
|
67
|
+
const gotchasDir = path.join(projectPath, '.tlc', 'memory', 'team', 'gotchas');
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* List all decisions from the project's memory directory.
|
|
71
|
+
* @param {object} [options] - Filter options (unused for file-based)
|
|
72
|
+
* @returns {Promise<Array>}
|
|
73
|
+
*/
|
|
74
|
+
async function listDecisions() {
|
|
75
|
+
return readMarkdownDir(decisionsDir, fsImpl);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* List all gotchas from the project's memory directory.
|
|
80
|
+
* @param {object} [options] - Filter options (unused for file-based)
|
|
81
|
+
* @returns {Promise<Array>}
|
|
82
|
+
*/
|
|
83
|
+
async function listGotchas() {
|
|
84
|
+
return readMarkdownDir(gotchasDir, fsImpl);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Get basic stats about the memory store.
|
|
89
|
+
* @returns {Promise<{ decisions: number, gotchas: number, total: number }>}
|
|
90
|
+
*/
|
|
91
|
+
async function getStats() {
|
|
92
|
+
const dCount = fsImpl.existsSync(decisionsDir)
|
|
93
|
+
? fsImpl.readdirSync(decisionsDir).filter(f => f.endsWith('.md')).length
|
|
94
|
+
: 0;
|
|
95
|
+
const gCount = fsImpl.existsSync(gotchasDir)
|
|
96
|
+
? fsImpl.readdirSync(gotchasDir).filter(f => f.endsWith('.md')).length
|
|
97
|
+
: 0;
|
|
98
|
+
|
|
99
|
+
return { decisions: dCount, gotchas: gCount, total: dCount + gCount };
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return { listDecisions, listGotchas, getStats };
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
module.exports = { createMemoryStoreAdapter };
|