tlc-claude-code 2.0.1 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/builder.md +144 -0
- package/.claude/agents/planner.md +143 -0
- package/.claude/agents/reviewer.md +160 -0
- package/.claude/commands/tlc/build.md +4 -0
- package/.claude/commands/tlc/deploy.md +194 -2
- package/.claude/commands/tlc/e2e-verify.md +214 -0
- package/.claude/commands/tlc/guard.md +191 -0
- package/.claude/commands/tlc/help.md +32 -0
- package/.claude/commands/tlc/init.md +73 -37
- package/.claude/commands/tlc/llm.md +19 -4
- package/.claude/commands/tlc/preflight.md +134 -0
- package/.claude/commands/tlc/review-plan.md +363 -0
- package/.claude/commands/tlc/review.md +172 -57
- package/.claude/commands/tlc/watchci.md +159 -0
- package/.claude/hooks/tlc-block-tools.sh +41 -0
- package/.claude/hooks/tlc-capture-exchange.sh +50 -0
- package/.claude/hooks/tlc-post-build.sh +38 -0
- package/.claude/hooks/tlc-post-push.sh +22 -0
- package/.claude/hooks/tlc-prompt-guard.sh +69 -0
- package/.claude/hooks/tlc-session-init.sh +123 -0
- package/CLAUDE.md +13 -0
- package/bin/install.js +268 -2
- package/bin/postinstall.js +102 -24
- package/bin/setup-autoupdate.js +206 -0
- package/bin/setup-autoupdate.test.js +124 -0
- package/bin/tlc.js +0 -0
- package/dashboard-web/dist/assets/index-CdS5CHqu.css +1 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js +483 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js.map +1 -0
- package/dashboard-web/dist/index.html +2 -2
- package/docker-compose.dev.yml +18 -12
- package/package.json +4 -2
- package/scripts/project-docs.js +1 -1
- package/server/index.js +228 -2
- package/server/lib/capture-bridge.js +242 -0
- package/server/lib/capture-bridge.test.js +363 -0
- package/server/lib/capture-guard.js +140 -0
- package/server/lib/capture-guard.test.js +182 -0
- package/server/lib/command-runner.js +159 -0
- package/server/lib/command-runner.test.js +92 -0
- package/server/lib/cost-tracker.test.js +49 -12
- package/server/lib/deploy/runners/dependency-runner.js +106 -0
- package/server/lib/deploy/runners/dependency-runner.test.js +148 -0
- package/server/lib/deploy/runners/secrets-runner.js +174 -0
- package/server/lib/deploy/runners/secrets-runner.test.js +127 -0
- package/server/lib/deploy/security-gates.js +11 -24
- package/server/lib/deploy/security-gates.test.js +9 -2
- package/server/lib/deploy-engine.js +182 -0
- package/server/lib/deploy-engine.test.js +147 -0
- package/server/lib/docker-api.js +137 -0
- package/server/lib/docker-api.test.js +202 -0
- package/server/lib/docker-client.js +297 -0
- package/server/lib/docker-client.test.js +308 -0
- package/server/lib/input-sanitizer.js +86 -0
- package/server/lib/input-sanitizer.test.js +117 -0
- package/server/lib/launchd-agent.js +225 -0
- package/server/lib/launchd-agent.test.js +185 -0
- package/server/lib/memory-api.js +3 -1
- package/server/lib/memory-api.test.js +3 -5
- package/server/lib/memory-bridge-e2e.test.js +160 -0
- package/server/lib/memory-committer.js +18 -4
- package/server/lib/memory-committer.test.js +21 -0
- package/server/lib/memory-hooks-capture.test.js +69 -4
- package/server/lib/memory-hooks-integration.test.js +98 -0
- package/server/lib/memory-hooks.js +42 -4
- package/server/lib/memory-store-adapter.js +105 -0
- package/server/lib/memory-store-adapter.test.js +141 -0
- package/server/lib/memory-wiring-e2e.test.js +93 -0
- package/server/lib/nginx-config.js +114 -0
- package/server/lib/nginx-config.test.js +82 -0
- package/server/lib/ollama-health.js +91 -0
- package/server/lib/ollama-health.test.js +74 -0
- package/server/lib/orchestration/agent-dispatcher.js +114 -0
- package/server/lib/orchestration/agent-dispatcher.test.js +110 -0
- package/server/lib/orchestration/orchestrator.js +130 -0
- package/server/lib/orchestration/orchestrator.test.js +192 -0
- package/server/lib/orchestration/tmux-manager.js +101 -0
- package/server/lib/orchestration/tmux-manager.test.js +109 -0
- package/server/lib/orchestration/worktree-manager.js +132 -0
- package/server/lib/orchestration/worktree-manager.test.js +129 -0
- package/server/lib/port-guard.js +44 -0
- package/server/lib/port-guard.test.js +65 -0
- package/server/lib/project-scanner.js +37 -2
- package/server/lib/project-scanner.test.js +152 -0
- package/server/lib/remember-command.js +2 -0
- package/server/lib/remember-command.test.js +23 -0
- package/server/lib/review/plan-reviewer.js +260 -0
- package/server/lib/review/plan-reviewer.test.js +269 -0
- package/server/lib/review/review-schemas.js +173 -0
- package/server/lib/review/review-schemas.test.js +152 -0
- package/server/lib/security/crypto-utils.test.js +2 -2
- package/server/lib/semantic-recall.js +1 -1
- package/server/lib/semantic-recall.test.js +17 -0
- package/server/lib/ssh-client.js +184 -0
- package/server/lib/ssh-client.test.js +127 -0
- package/server/lib/vps-api.js +184 -0
- package/server/lib/vps-api.test.js +208 -0
- package/server/lib/vps-bootstrap.js +124 -0
- package/server/lib/vps-bootstrap.test.js +79 -0
- package/server/lib/vps-monitor.js +126 -0
- package/server/lib/vps-monitor.test.js +98 -0
- package/server/lib/workspace-api.js +182 -1
- package/server/lib/workspace-api.test.js +474 -0
- package/server/package-lock.json +737 -0
- package/server/package.json +3 -0
- package/server/setup.sh +271 -271
- package/dashboard-web/dist/assets/index-Uhc49PE-.css +0 -1
- package/dashboard-web/dist/assets/index-W36XHPC5.js +0 -431
- package/dashboard-web/dist/assets/index-W36XHPC5.js.map +0 -1
|
@@ -9,8 +9,8 @@
|
|
|
9
9
|
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
|
10
10
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
|
11
11
|
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=JetBrains+Mono:wght@400;500&display=swap" rel="stylesheet" />
|
|
12
|
-
<script type="module" crossorigin src="/assets/index-
|
|
13
|
-
<link rel="stylesheet" crossorigin href="/assets/index-
|
|
12
|
+
<script type="module" crossorigin src="/assets/index-CwNPPVpg.js"></script>
|
|
13
|
+
<link rel="stylesheet" crossorigin href="/assets/index-CdS5CHqu.css">
|
|
14
14
|
</head>
|
|
15
15
|
<body>
|
|
16
16
|
<div id="root"></div>
|
package/docker-compose.dev.yml
CHANGED
|
@@ -22,10 +22,14 @@ services:
|
|
|
22
22
|
retries: 5
|
|
23
23
|
|
|
24
24
|
# Your App (runs from project directory)
|
|
25
|
+
# Only starts when you explicitly provide PROJECT_DIR:
|
|
26
|
+
# PROJECT_DIR=/path/to/your/app docker compose -f docker-compose.dev.yml up app
|
|
25
27
|
app:
|
|
26
28
|
image: node:20-alpine
|
|
27
29
|
container_name: tlc-${COMPOSE_PROJECT_NAME:-dev}-app
|
|
28
30
|
working_dir: /app
|
|
31
|
+
profiles:
|
|
32
|
+
- app
|
|
29
33
|
command: >
|
|
30
34
|
sh -c "
|
|
31
35
|
npm install &&
|
|
@@ -51,7 +55,7 @@ services:
|
|
|
51
55
|
ports:
|
|
52
56
|
- "${APP_PORT:-5001}:5001"
|
|
53
57
|
volumes:
|
|
54
|
-
-
|
|
58
|
+
- ${PROJECT_DIR:-.}:/app
|
|
55
59
|
- /app/node_modules
|
|
56
60
|
depends_on:
|
|
57
61
|
db:
|
|
@@ -95,31 +99,33 @@ services:
|
|
|
95
99
|
restart: on-failure
|
|
96
100
|
|
|
97
101
|
# TLC Dashboard (Express API + React SPA on port 3147)
|
|
98
|
-
#
|
|
102
|
+
# Mounts local TLC repo and user's home directory (read-only)
|
|
103
|
+
# so the dashboard setup screen can scan any code folder.
|
|
104
|
+
# Just run: docker compose -f docker-compose.dev.yml up
|
|
99
105
|
dashboard:
|
|
100
106
|
image: node:20-alpine
|
|
101
107
|
container_name: tlc-${COMPOSE_PROJECT_NAME:-dev}-dashboard
|
|
102
|
-
working_dir: /
|
|
108
|
+
working_dir: /tlc
|
|
103
109
|
command: >
|
|
104
110
|
sh -c "
|
|
105
|
-
echo '
|
|
106
|
-
|
|
107
|
-
TLC_DIR=/usr/local/lib/node_modules/tlc-claude-code &&
|
|
108
|
-
echo 'TLC installed at:' $$TLC_DIR &&
|
|
109
|
-
ls $$TLC_DIR/dashboard-web/dist/index.html && echo '[TLC] React SPA ready' || echo '[TLC] WARNING: dashboard-web/dist not found' &&
|
|
110
|
-
cd /project && node $$TLC_DIR/server/index.js --proxy-only --skip-db
|
|
111
|
+
echo '[TLC] Starting dashboard server...' &&
|
|
112
|
+
node server/index.js --proxy-only --skip-db
|
|
111
113
|
"
|
|
112
114
|
environment:
|
|
113
115
|
- TLC_PORT=3147
|
|
114
116
|
- TLC_PROXY_ONLY=true
|
|
115
117
|
- TLC_APP_PORT=5001
|
|
116
118
|
- TLC_AUTH=false
|
|
119
|
+
- TLC_CONFIG_DIR=${HOME}/.tlc
|
|
120
|
+
- DOCKER_SOCKET=/var/run/docker.sock
|
|
117
121
|
ports:
|
|
118
122
|
- "${DASHBOARD_PORT:-3147}:3147"
|
|
119
123
|
volumes:
|
|
120
|
-
-
|
|
121
|
-
|
|
122
|
-
-
|
|
124
|
+
- .:/tlc
|
|
125
|
+
- /var/run/docker.sock:/var/run/docker.sock
|
|
126
|
+
- ${HOME}/.tlc:${HOME}/.tlc
|
|
127
|
+
- ${HOME}:${HOME}:ro
|
|
128
|
+
- ${HOME}/.ssh:${HOME}/.ssh:ro
|
|
123
129
|
restart: on-failure
|
|
124
130
|
|
|
125
131
|
# Playwright E2E Tests (optional - starts on demand)
|
package/package.json
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "tlc-claude-code",
|
|
3
|
-
"version": "2.0
|
|
3
|
+
"version": "2.2.0",
|
|
4
4
|
"description": "TLC - Test Led Coding for Claude Code",
|
|
5
5
|
"bin": {
|
|
6
|
-
"tlc": "./bin/tlc.js",
|
|
7
6
|
"tlc-claude-code": "./bin/install.js",
|
|
8
7
|
"tlc-docs": "./scripts/project-docs.js"
|
|
9
8
|
},
|
|
10
9
|
"files": [
|
|
11
10
|
"bin/",
|
|
11
|
+
".claude/agents/",
|
|
12
12
|
".claude/commands/",
|
|
13
|
+
".claude/hooks/",
|
|
13
14
|
"dashboard/dist/",
|
|
14
15
|
"dashboard/package.json",
|
|
15
16
|
"dashboard-web/dist/",
|
|
@@ -58,6 +59,7 @@
|
|
|
58
59
|
"devDependencies": {
|
|
59
60
|
"@playwright/test": "^1.58.1",
|
|
60
61
|
"playwright": "^1.58.1",
|
|
62
|
+
"supertest": "^7.2.2",
|
|
61
63
|
"text-to-image": "^8.0.1"
|
|
62
64
|
},
|
|
63
65
|
"dependencies": {
|
package/scripts/project-docs.js
CHANGED
package/server/index.js
CHANGED
|
@@ -24,6 +24,8 @@ const { autoProvision, stopDatabase } = require('./lib/auto-database');
|
|
|
24
24
|
const { GlobalConfig } = require('./lib/global-config');
|
|
25
25
|
const { ProjectScanner } = require('./lib/project-scanner');
|
|
26
26
|
const { createWorkspaceRouter } = require('./lib/workspace-api');
|
|
27
|
+
const { createMemoryApi } = require('./lib/memory-api');
|
|
28
|
+
const { createMemoryStoreAdapter } = require('./lib/memory-store-adapter');
|
|
27
29
|
const {
|
|
28
30
|
createUserStore,
|
|
29
31
|
createAuthMiddleware,
|
|
@@ -34,6 +36,12 @@ const {
|
|
|
34
36
|
hasPermission,
|
|
35
37
|
USER_ROLES,
|
|
36
38
|
} = require('./lib/auth-system');
|
|
39
|
+
const { createDockerClient } = require('./lib/docker-client');
|
|
40
|
+
const { createDockerRouter } = require('./lib/docker-api');
|
|
41
|
+
const { createSshClient } = require('./lib/ssh-client');
|
|
42
|
+
const { createVpsRouter } = require('./lib/vps-api');
|
|
43
|
+
const { createVpsMonitor } = require('./lib/vps-monitor');
|
|
44
|
+
const { createCommandRunner } = require('./lib/command-runner');
|
|
37
45
|
|
|
38
46
|
// Handle PGlite WASM crashes gracefully
|
|
39
47
|
process.on('uncaughtException', (err) => {
|
|
@@ -80,11 +88,146 @@ app.use(cors({ origin: true, credentials: true }));
|
|
|
80
88
|
// Workspace API
|
|
81
89
|
const globalConfig = new GlobalConfig();
|
|
82
90
|
const projectScanner = new ProjectScanner();
|
|
83
|
-
const
|
|
91
|
+
const { observeAndRemember } = require('./lib/memory-observer');
|
|
92
|
+
const { createServerMemoryCapture } = require('./lib/memory-hooks');
|
|
93
|
+
const { createMemoryStoreAdapter } = require('./lib/memory-store-adapter');
|
|
94
|
+
const { checkOllamaHealth } = require('./lib/ollama-health');
|
|
95
|
+
|
|
96
|
+
// Initialize memory directory structure (non-blocking)
|
|
97
|
+
(async () => {
|
|
98
|
+
try {
|
|
99
|
+
const { initMemorySystem } = require('./lib/memory-init');
|
|
100
|
+
await initMemorySystem(PROJECT_DIR);
|
|
101
|
+
} catch (err) {
|
|
102
|
+
console.warn('[TLC] Memory directory init skipped:', err.message);
|
|
103
|
+
}
|
|
104
|
+
})();
|
|
105
|
+
|
|
106
|
+
// Initialize server-level memory capture (auto-captures conversations)
|
|
107
|
+
const memoryCapture = createServerMemoryCapture({
|
|
108
|
+
projectRoot: PROJECT_DIR,
|
|
109
|
+
observeAndRemember,
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
// Lazy-initialized memory dependencies (ESM modules loaded async)
|
|
113
|
+
const memoryDeps = {
|
|
114
|
+
observeAndRemember,
|
|
115
|
+
semanticRecall: null,
|
|
116
|
+
vectorIndexer: null,
|
|
117
|
+
embeddingClient: null,
|
|
118
|
+
vectorStore: null,
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
// Lazy init: load ESM memory modules on first use
|
|
122
|
+
let memoryInitPromise = null;
|
|
123
|
+
async function initMemoryPipeline() {
|
|
124
|
+
if (memoryInitPromise) return memoryInitPromise;
|
|
125
|
+
memoryInitPromise = (async () => {
|
|
126
|
+
try {
|
|
127
|
+
const os = require('os');
|
|
128
|
+
const dbPath = path.join(os.homedir(), '.tlc', 'memory', 'vectors.db');
|
|
129
|
+
|
|
130
|
+
// Ensure directory exists
|
|
131
|
+
const dbDir = path.dirname(dbPath);
|
|
132
|
+
if (!fs.existsSync(dbDir)) {
|
|
133
|
+
fs.mkdirSync(dbDir, { recursive: true });
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const { createEmbeddingClient } = await import('./lib/embedding-client.js');
|
|
137
|
+
const { createVectorStore } = await import('./lib/vector-store.js');
|
|
138
|
+
const { createVectorIndexer } = await import('./lib/vector-indexer.js');
|
|
139
|
+
const { createSemanticRecall } = await import('./lib/semantic-recall.js');
|
|
140
|
+
|
|
141
|
+
memoryDeps.embeddingClient = createEmbeddingClient();
|
|
142
|
+
memoryDeps.vectorStore = await createVectorStore({ dbPath });
|
|
143
|
+
memoryDeps.vectorIndexer = createVectorIndexer({
|
|
144
|
+
vectorStore: memoryDeps.vectorStore,
|
|
145
|
+
embeddingClient: memoryDeps.embeddingClient,
|
|
146
|
+
});
|
|
147
|
+
memoryDeps.semanticRecall = createSemanticRecall({
|
|
148
|
+
vectorStore: memoryDeps.vectorStore,
|
|
149
|
+
embeddingClient: memoryDeps.embeddingClient,
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
console.log('[TLC] Memory pipeline initialized (vector store at', dbPath + ')');
|
|
153
|
+
} catch (err) {
|
|
154
|
+
console.warn('[TLC] Memory pipeline unavailable:', err.message);
|
|
155
|
+
// Non-fatal: server works without vector store
|
|
156
|
+
}
|
|
157
|
+
})();
|
|
158
|
+
return memoryInitPromise;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Start memory init in background (non-blocking)
|
|
162
|
+
initMemoryPipeline();
|
|
163
|
+
|
|
164
|
+
const memoryApi = createMemoryApi({
|
|
165
|
+
semanticRecall: { recall: async (...args) => {
|
|
166
|
+
await initMemoryPipeline();
|
|
167
|
+
return memoryDeps.semanticRecall ? memoryDeps.semanticRecall.recall(...args) : [];
|
|
168
|
+
}},
|
|
169
|
+
vectorIndexer: { indexAll: async (...args) => {
|
|
170
|
+
await initMemoryPipeline();
|
|
171
|
+
return memoryDeps.vectorIndexer ? memoryDeps.vectorIndexer.indexAll(...args) : { indexed: 0 };
|
|
172
|
+
}},
|
|
173
|
+
richCapture: { processChunk: async () => ({ stored: false }) },
|
|
174
|
+
embeddingClient: { embed: async (...args) => {
|
|
175
|
+
await initMemoryPipeline();
|
|
176
|
+
return memoryDeps.embeddingClient ? memoryDeps.embeddingClient.embed(...args) : [];
|
|
177
|
+
}},
|
|
178
|
+
memoryStore: (() => {
|
|
179
|
+
const adapter = createMemoryStoreAdapter(PROJECT_DIR);
|
|
180
|
+
return {
|
|
181
|
+
listConversations: async () => ({ items: [], total: 0 }), // TODO Phase 74: rich conversation capture
|
|
182
|
+
getConversation: async () => null, // TODO Phase 74: rich conversation capture
|
|
183
|
+
listDecisions: async () => adapter.listDecisions(),
|
|
184
|
+
listGotchas: async () => adapter.listGotchas(),
|
|
185
|
+
getStats: async () => adapter.getStats(),
|
|
186
|
+
};
|
|
187
|
+
})(),
|
|
188
|
+
});
|
|
189
|
+
const workspaceRouter = createWorkspaceRouter({
|
|
190
|
+
globalConfig,
|
|
191
|
+
projectScanner,
|
|
192
|
+
memoryApi,
|
|
193
|
+
memoryDeps,
|
|
194
|
+
});
|
|
84
195
|
app.use('/api/workspace', workspaceRouter);
|
|
85
196
|
// Also mount project-level routes at /api/projects for per-project endpoints
|
|
86
197
|
app.use('/api', workspaceRouter);
|
|
87
198
|
|
|
199
|
+
// ============================================
|
|
200
|
+
// Docker + VPS Management (Phase 80)
|
|
201
|
+
// ============================================
|
|
202
|
+
const DOCKER_SOCKET = process.env.DOCKER_SOCKET || '/var/run/docker.sock';
|
|
203
|
+
const TLC_CONFIG_DIR = process.env.TLC_CONFIG_DIR || path.join(require('os').homedir(), '.tlc');
|
|
204
|
+
|
|
205
|
+
// Docker client (graceful when socket unavailable)
|
|
206
|
+
let dockerClient;
|
|
207
|
+
try {
|
|
208
|
+
dockerClient = createDockerClient({ socketPath: DOCKER_SOCKET });
|
|
209
|
+
} catch (err) {
|
|
210
|
+
console.log('[TLC] Docker client init skipped:', err.message);
|
|
211
|
+
dockerClient = null;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
if (dockerClient) {
|
|
215
|
+
const dockerRouter = createDockerRouter({ dockerClient });
|
|
216
|
+
app.use('/api/docker', dockerRouter);
|
|
217
|
+
console.log('[TLC] Docker API mounted at /api/docker');
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// SSH + VPS (always available, VPS operations fail gracefully)
|
|
221
|
+
const sshClient = createSshClient();
|
|
222
|
+
const vpsRouter = createVpsRouter({ sshClient, configDir: TLC_CONFIG_DIR });
|
|
223
|
+
app.use('/api/vps', vpsRouter);
|
|
224
|
+
|
|
225
|
+
// VPS monitor
|
|
226
|
+
const vpsMonitor = createVpsMonitor({ sshClient });
|
|
227
|
+
|
|
228
|
+
// Command runner
|
|
229
|
+
const commandRunner = createCommandRunner();
|
|
230
|
+
|
|
88
231
|
// ============================================
|
|
89
232
|
// Authentication Setup
|
|
90
233
|
// ============================================
|
|
@@ -478,7 +621,61 @@ wss.on('connection', (ws) => {
|
|
|
478
621
|
// Send recent logs to new client
|
|
479
622
|
ws.send(JSON.stringify({ type: 'init', data: { logs, appPort } }));
|
|
480
623
|
|
|
624
|
+
// Docker streaming state per connection
|
|
625
|
+
const dockerStreams = new Map();
|
|
626
|
+
|
|
627
|
+
ws.on('message', (raw) => {
|
|
628
|
+
try {
|
|
629
|
+
const msg = JSON.parse(raw.toString());
|
|
630
|
+
|
|
631
|
+
// Memory capture: auto-observe assistant responses
|
|
632
|
+
if (msg.type === 'assistant_response' && msg.text) {
|
|
633
|
+
memoryCapture.onAssistantResponse(msg.text);
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
// Memory capture: TLC command events flush capture
|
|
637
|
+
if (msg.type === 'tlc_command' && msg.command) {
|
|
638
|
+
memoryCapture.onTlcCommand(msg.command);
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
// Docker log streaming
|
|
642
|
+
if (msg.type === 'docker:subscribe-logs' && dockerClient && msg.containerId) {
|
|
643
|
+
const abort = dockerClient.streamContainerLogs(msg.containerId, (data) => {
|
|
644
|
+
if (ws.readyState === 1) {
|
|
645
|
+
ws.send(JSON.stringify({ type: 'docker:log', containerId: msg.containerId, data, timestamp: new Date().toISOString() }));
|
|
646
|
+
}
|
|
647
|
+
});
|
|
648
|
+
dockerStreams.set(`logs:${msg.containerId}`, abort);
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
if (msg.type === 'docker:unsubscribe-logs' && msg.containerId) {
|
|
652
|
+
const abort = dockerStreams.get(`logs:${msg.containerId}`);
|
|
653
|
+
if (abort) { abort(); dockerStreams.delete(`logs:${msg.containerId}`); }
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// Docker stats streaming
|
|
657
|
+
if (msg.type === 'docker:subscribe-stats' && dockerClient && msg.containerId) {
|
|
658
|
+
const abort = dockerClient.streamContainerStats(msg.containerId, (stats) => {
|
|
659
|
+
if (ws.readyState === 1) {
|
|
660
|
+
ws.send(JSON.stringify({ type: 'docker:stats', containerId: msg.containerId, ...stats }));
|
|
661
|
+
}
|
|
662
|
+
});
|
|
663
|
+
dockerStreams.set(`stats:${msg.containerId}`, abort);
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
if (msg.type === 'docker:unsubscribe-stats' && msg.containerId) {
|
|
667
|
+
const abort = dockerStreams.get(`stats:${msg.containerId}`);
|
|
668
|
+
if (abort) { abort(); dockerStreams.delete(`stats:${msg.containerId}`); }
|
|
669
|
+
}
|
|
670
|
+
} catch {}
|
|
671
|
+
});
|
|
672
|
+
|
|
481
673
|
ws.on('close', () => {
|
|
674
|
+
// Clean up Docker streams
|
|
675
|
+
for (const abort of dockerStreams.values()) {
|
|
676
|
+
try { abort(); } catch {}
|
|
677
|
+
}
|
|
678
|
+
dockerStreams.clear();
|
|
482
679
|
wsClients.delete(ws);
|
|
483
680
|
console.log(`[TLC] Client disconnected (${wsClients.size} total)`);
|
|
484
681
|
});
|
|
@@ -1036,6 +1233,9 @@ app.post('/api/commands/:command', (req, res) => {
|
|
|
1036
1233
|
addLog('app', `Executing command: tlc:${command}${args ? ' ' + args : ''}`, 'info');
|
|
1037
1234
|
broadcast('command-started', { id: entry.id, command });
|
|
1038
1235
|
|
|
1236
|
+
// Flush memory capture on TLC command execution
|
|
1237
|
+
memoryCapture.onTlcCommand(command);
|
|
1238
|
+
|
|
1039
1239
|
// Build the CLI command
|
|
1040
1240
|
const cliArgs = ['tlc', command];
|
|
1041
1241
|
if (args) cliArgs.push(args);
|
|
@@ -1115,8 +1315,16 @@ app.put('/api/config', (req, res) => {
|
|
|
1115
1315
|
});
|
|
1116
1316
|
|
|
1117
1317
|
// GET /api/health - System health status
|
|
1118
|
-
app.get('/api/health', (req, res) => {
|
|
1318
|
+
app.get('/api/health', async (req, res) => {
|
|
1119
1319
|
const memUsage = process.memoryUsage();
|
|
1320
|
+
const ollamaHealth = await checkOllamaHealth();
|
|
1321
|
+
const adapter = createMemoryStoreAdapter(PROJECT_DIR);
|
|
1322
|
+
let memoryStats;
|
|
1323
|
+
try {
|
|
1324
|
+
memoryStats = await adapter.getStats();
|
|
1325
|
+
} catch {
|
|
1326
|
+
memoryStats = { decisions: 0, gotchas: 0, total: 0 };
|
|
1327
|
+
}
|
|
1120
1328
|
res.json({
|
|
1121
1329
|
status: 'ok',
|
|
1122
1330
|
timestamp: new Date().toISOString(),
|
|
@@ -1128,6 +1336,14 @@ app.get('/api/health', (req, res) => {
|
|
|
1128
1336
|
},
|
|
1129
1337
|
appRunning: appProcess !== null || appIsDocker,
|
|
1130
1338
|
appPort,
|
|
1339
|
+
tlcMemory: {
|
|
1340
|
+
ollama: ollamaHealth,
|
|
1341
|
+
fileStore: {
|
|
1342
|
+
decisions: memoryStats.decisions,
|
|
1343
|
+
gotchas: memoryStats.gotchas,
|
|
1344
|
+
total: memoryStats.total,
|
|
1345
|
+
},
|
|
1346
|
+
},
|
|
1131
1347
|
});
|
|
1132
1348
|
});
|
|
1133
1349
|
|
|
@@ -1555,6 +1771,16 @@ async function main() {
|
|
|
1555
1771
|
// Initialize authentication
|
|
1556
1772
|
await initializeAuth();
|
|
1557
1773
|
|
|
1774
|
+
// Check for port conflicts before listening
|
|
1775
|
+
const { checkPort } = require('./lib/port-guard');
|
|
1776
|
+
const portCheck = await checkPort(TLC_PORT);
|
|
1777
|
+
if (!portCheck.available) {
|
|
1778
|
+
console.error(`\n Port ${TLC_PORT} is already in use.`);
|
|
1779
|
+
console.error(' Another TLC server may be running, or a different process holds this port.');
|
|
1780
|
+
console.error(' Use TLC_PORT=<port> to choose a different port.\n');
|
|
1781
|
+
process.exit(1);
|
|
1782
|
+
}
|
|
1783
|
+
|
|
1558
1784
|
server.listen(TLC_PORT, () => {
|
|
1559
1785
|
console.log(` Dashboard: http://localhost:${TLC_PORT}`);
|
|
1560
1786
|
console.log(` Share: http://${getLocalIP()}:${TLC_PORT}`);
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Capture Bridge - Connects Claude Code Stop hooks to TLC memory capture.
|
|
3
|
+
*
|
|
4
|
+
* Called by the shell hook script (.claude/hooks/tlc-capture-exchange.sh).
|
|
5
|
+
* Reads Stop hook stdin, extracts the exchange, POSTs to the TLC server
|
|
6
|
+
* capture endpoint, and spools to a local JSONL file on failure.
|
|
7
|
+
*
|
|
8
|
+
* @module capture-bridge
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const fs = require('fs');
|
|
12
|
+
const path = require('path');
|
|
13
|
+
|
|
14
|
+
/** Spool filename for failed capture attempts */
|
|
15
|
+
const SPOOL_FILENAME = '.spool.jsonl';
|
|
16
|
+
|
|
17
|
+
/** Maximum message size before truncation (10KB) */
|
|
18
|
+
const MAX_MESSAGE_SIZE = 10240;
|
|
19
|
+
|
|
20
|
+
/** Default TLC server port */
|
|
21
|
+
const DEFAULT_PORT = 3147;
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Parse the JSON stdin from a Claude Code Stop hook.
|
|
25
|
+
*
|
|
26
|
+
* @param {string} input - Raw JSON string from stdin
|
|
27
|
+
* @returns {{ sessionId: string, assistantMessage: string|null, transcriptPath: string|null, cwd: string|null } | null}
|
|
28
|
+
*/
|
|
29
|
+
function parseStopHookInput(input) {
|
|
30
|
+
if (!input) return null;
|
|
31
|
+
|
|
32
|
+
try {
|
|
33
|
+
const data = JSON.parse(input);
|
|
34
|
+
return {
|
|
35
|
+
sessionId: data.session_id || null,
|
|
36
|
+
assistantMessage: data.last_assistant_message || null,
|
|
37
|
+
transcriptPath: data.transcript_path || null,
|
|
38
|
+
cwd: data.cwd || null,
|
|
39
|
+
};
|
|
40
|
+
} catch {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Extract the last user message from a Claude Code transcript JSONL file.
|
|
47
|
+
*
|
|
48
|
+
* @param {string} transcriptPath - Absolute path to the transcript .jsonl file
|
|
49
|
+
* @returns {string|null} Last user message text, or null
|
|
50
|
+
*/
|
|
51
|
+
function extractLastUserMessage(transcriptPath) {
|
|
52
|
+
try {
|
|
53
|
+
const content = fs.readFileSync(transcriptPath, 'utf-8').trim();
|
|
54
|
+
if (!content) return null;
|
|
55
|
+
|
|
56
|
+
const lines = content.split('\n').filter(Boolean);
|
|
57
|
+
let lastUserMessage = null;
|
|
58
|
+
|
|
59
|
+
for (const line of lines) {
|
|
60
|
+
try {
|
|
61
|
+
const entry = JSON.parse(line);
|
|
62
|
+
if (entry.role === 'user' && entry.content) {
|
|
63
|
+
lastUserMessage = entry.content;
|
|
64
|
+
}
|
|
65
|
+
} catch {
|
|
66
|
+
// Skip malformed lines
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return lastUserMessage;
|
|
71
|
+
} catch {
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Detect the project ID from the working directory.
|
|
78
|
+
* Reads .tlc.json if present, otherwise uses the directory basename.
|
|
79
|
+
*
|
|
80
|
+
* @param {string} cwd - Working directory path
|
|
81
|
+
* @returns {string} Project identifier
|
|
82
|
+
*/
|
|
83
|
+
function detectProjectId(cwd) {
|
|
84
|
+
try {
|
|
85
|
+
const tlcPath = path.join(cwd, '.tlc.json');
|
|
86
|
+
if (fs.existsSync(tlcPath)) {
|
|
87
|
+
const config = JSON.parse(fs.readFileSync(tlcPath, 'utf-8'));
|
|
88
|
+
if (config.project) return config.project;
|
|
89
|
+
}
|
|
90
|
+
} catch {
|
|
91
|
+
// Fall through to basename
|
|
92
|
+
}
|
|
93
|
+
return path.basename(cwd) || 'unknown';
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Truncate a string to MAX_MESSAGE_SIZE, appending a marker if truncated.
|
|
98
|
+
*
|
|
99
|
+
* @param {string} text - Input text
|
|
100
|
+
* @returns {string} Possibly truncated text
|
|
101
|
+
*/
|
|
102
|
+
function truncate(text) {
|
|
103
|
+
if (!text || text.length <= MAX_MESSAGE_SIZE) return text || '';
|
|
104
|
+
return text.slice(0, MAX_MESSAGE_SIZE) + '... [truncated]';
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Append a failed capture to the local spool file for later retry.
|
|
109
|
+
*
|
|
110
|
+
* @param {string} spoolDir - Directory containing the spool file
|
|
111
|
+
* @param {object} payload - The capture payload that failed to send
|
|
112
|
+
*/
|
|
113
|
+
function appendToSpool(spoolDir, payload) {
|
|
114
|
+
try {
|
|
115
|
+
if (!fs.existsSync(spoolDir)) {
|
|
116
|
+
fs.mkdirSync(spoolDir, { recursive: true });
|
|
117
|
+
}
|
|
118
|
+
const spoolPath = path.join(spoolDir, SPOOL_FILENAME);
|
|
119
|
+
fs.appendFileSync(spoolPath, JSON.stringify(payload) + '\n');
|
|
120
|
+
} catch {
|
|
121
|
+
// Spool write failure is non-fatal
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Capture an exchange and POST it to the TLC server.
|
|
127
|
+
* On failure, spools to a local JSONL file.
|
|
128
|
+
* Never throws — all errors are swallowed.
|
|
129
|
+
*
|
|
130
|
+
* @param {object} opts
|
|
131
|
+
* @param {string} opts.cwd - Working directory
|
|
132
|
+
* @param {string} opts.assistantMessage - The assistant's response text
|
|
133
|
+
* @param {string|null} opts.userMessage - The user's prompt text
|
|
134
|
+
* @param {string} opts.sessionId - Session identifier
|
|
135
|
+
* @param {object} [deps] - Injectable dependencies
|
|
136
|
+
* @param {Function} [deps.fetch] - fetch implementation (default: globalThis.fetch)
|
|
137
|
+
* @param {string} [deps.spoolDir] - Spool directory override
|
|
138
|
+
* @param {number} [deps.port] - Server port override
|
|
139
|
+
*/
|
|
140
|
+
async function captureExchange(opts, deps = {}) {
|
|
141
|
+
try {
|
|
142
|
+
const { cwd, assistantMessage, userMessage, sessionId } = opts;
|
|
143
|
+
const fetchFn = deps.fetch || globalThis.fetch;
|
|
144
|
+
const port = deps.port || DEFAULT_PORT;
|
|
145
|
+
|
|
146
|
+
// Skip empty messages
|
|
147
|
+
if (!assistantMessage) return;
|
|
148
|
+
|
|
149
|
+
const projectId = detectProjectId(cwd || '.');
|
|
150
|
+
const spoolDir = deps.spoolDir || path.join(cwd, '.tlc', 'memory');
|
|
151
|
+
|
|
152
|
+
const payload = {
|
|
153
|
+
projectId,
|
|
154
|
+
exchanges: [{
|
|
155
|
+
user: truncate(userMessage || ''),
|
|
156
|
+
assistant: truncate(assistantMessage),
|
|
157
|
+
timestamp: Date.now(),
|
|
158
|
+
}],
|
|
159
|
+
};
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
const url = `http://localhost:${port}/api/projects/${encodeURIComponent(projectId)}/memory/capture`;
|
|
163
|
+
const response = await fetchFn(url, {
|
|
164
|
+
method: 'POST',
|
|
165
|
+
headers: { 'Content-Type': 'application/json' },
|
|
166
|
+
body: JSON.stringify(payload),
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
if (!response.ok) {
|
|
170
|
+
appendToSpool(spoolDir, payload);
|
|
171
|
+
}
|
|
172
|
+
} catch {
|
|
173
|
+
// Server unreachable — spool for later
|
|
174
|
+
appendToSpool(spoolDir, payload);
|
|
175
|
+
}
|
|
176
|
+
} catch {
|
|
177
|
+
// Total failure — silently ignore
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Drain the local spool file by POSTing each entry to the capture endpoint.
|
|
183
|
+
* Successfully sent entries are removed; failed entries remain for next drain.
|
|
184
|
+
* Never throws.
|
|
185
|
+
*
|
|
186
|
+
* @param {string} spoolDir - Directory containing the spool file
|
|
187
|
+
* @param {object} [deps] - Injectable dependencies
|
|
188
|
+
* @param {Function} [deps.fetch] - fetch implementation
|
|
189
|
+
* @param {number} [deps.port] - Server port override
|
|
190
|
+
*/
|
|
191
|
+
async function drainSpool(spoolDir, deps = {}) {
|
|
192
|
+
try {
|
|
193
|
+
const fetchFn = deps.fetch || globalThis.fetch;
|
|
194
|
+
const port = deps.port || DEFAULT_PORT;
|
|
195
|
+
const spoolPath = path.join(spoolDir, SPOOL_FILENAME);
|
|
196
|
+
|
|
197
|
+
if (!fs.existsSync(spoolPath)) return;
|
|
198
|
+
|
|
199
|
+
const content = fs.readFileSync(spoolPath, 'utf-8').trim();
|
|
200
|
+
if (!content) return;
|
|
201
|
+
|
|
202
|
+
const lines = content.split('\n').filter(Boolean);
|
|
203
|
+
const failed = [];
|
|
204
|
+
|
|
205
|
+
for (const line of lines) {
|
|
206
|
+
try {
|
|
207
|
+
const entry = JSON.parse(line);
|
|
208
|
+
const projectId = entry.projectId || 'unknown';
|
|
209
|
+
const url = `http://localhost:${port}/api/projects/${encodeURIComponent(projectId)}/memory/capture`;
|
|
210
|
+
|
|
211
|
+
const response = await fetchFn(url, {
|
|
212
|
+
method: 'POST',
|
|
213
|
+
headers: { 'Content-Type': 'application/json' },
|
|
214
|
+
body: JSON.stringify({ exchanges: entry.exchanges }),
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
if (!response.ok) {
|
|
218
|
+
failed.push(line);
|
|
219
|
+
}
|
|
220
|
+
} catch {
|
|
221
|
+
failed.push(line);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Rewrite spool with only failed entries
|
|
226
|
+
fs.writeFileSync(spoolPath, failed.length > 0 ? failed.join('\n') + '\n' : '');
|
|
227
|
+
} catch {
|
|
228
|
+
// Drain failure is non-fatal
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
module.exports = {
|
|
233
|
+
parseStopHookInput,
|
|
234
|
+
extractLastUserMessage,
|
|
235
|
+
captureExchange,
|
|
236
|
+
drainSpool,
|
|
237
|
+
detectProjectId,
|
|
238
|
+
truncate,
|
|
239
|
+
SPOOL_FILENAME,
|
|
240
|
+
MAX_MESSAGE_SIZE,
|
|
241
|
+
DEFAULT_PORT,
|
|
242
|
+
};
|