claude-deck 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/LICENSE +201 -0
  2. package/dashboard/assets/index-Cux_zpcb.js +244 -0
  3. package/dashboard/assets/index-Dg_mccmz.css +1 -0
  4. package/dashboard/assets/inter-cyrillic-400-normal-HOLc17fK.woff +0 -0
  5. package/dashboard/assets/inter-cyrillic-400-normal-obahsSVq.woff2 +0 -0
  6. package/dashboard/assets/inter-cyrillic-500-normal-BasfLYem.woff2 +0 -0
  7. package/dashboard/assets/inter-cyrillic-500-normal-CxZf_p3X.woff +0 -0
  8. package/dashboard/assets/inter-cyrillic-600-normal-4D_pXhcN.woff +0 -0
  9. package/dashboard/assets/inter-cyrillic-600-normal-CWCymEST.woff2 +0 -0
  10. package/dashboard/assets/inter-cyrillic-700-normal-CjBOestx.woff2 +0 -0
  11. package/dashboard/assets/inter-cyrillic-700-normal-DrXBdSj3.woff +0 -0
  12. package/dashboard/assets/inter-cyrillic-ext-400-normal-BQZuk6qB.woff2 +0 -0
  13. package/dashboard/assets/inter-cyrillic-ext-400-normal-DQukG94-.woff +0 -0
  14. package/dashboard/assets/inter-cyrillic-ext-500-normal-B0yAr1jD.woff2 +0 -0
  15. package/dashboard/assets/inter-cyrillic-ext-500-normal-BmqWE9Dz.woff +0 -0
  16. package/dashboard/assets/inter-cyrillic-ext-600-normal-Bcila6Z-.woff +0 -0
  17. package/dashboard/assets/inter-cyrillic-ext-600-normal-Dfes3d0z.woff2 +0 -0
  18. package/dashboard/assets/inter-cyrillic-ext-700-normal-BjwYoWNd.woff2 +0 -0
  19. package/dashboard/assets/inter-cyrillic-ext-700-normal-LO58E6JB.woff +0 -0
  20. package/dashboard/assets/inter-greek-400-normal-B4URO6DV.woff2 +0 -0
  21. package/dashboard/assets/inter-greek-400-normal-q2sYcFCs.woff +0 -0
  22. package/dashboard/assets/inter-greek-500-normal-BIZE56-Y.woff2 +0 -0
  23. package/dashboard/assets/inter-greek-500-normal-Xzm54t5V.woff +0 -0
  24. package/dashboard/assets/inter-greek-600-normal-BZpKdvQh.woff +0 -0
  25. package/dashboard/assets/inter-greek-600-normal-plRanbMR.woff2 +0 -0
  26. package/dashboard/assets/inter-greek-700-normal-BUv2fZ6O.woff +0 -0
  27. package/dashboard/assets/inter-greek-700-normal-C3JjAnD8.woff2 +0 -0
  28. package/dashboard/assets/inter-greek-ext-400-normal-DGGRlc-M.woff2 +0 -0
  29. package/dashboard/assets/inter-greek-ext-400-normal-KugGGMne.woff +0 -0
  30. package/dashboard/assets/inter-greek-ext-500-normal-2j5mBUwD.woff +0 -0
  31. package/dashboard/assets/inter-greek-ext-500-normal-C4iEst2y.woff2 +0 -0
  32. package/dashboard/assets/inter-greek-ext-600-normal-B8X0CLgF.woff +0 -0
  33. package/dashboard/assets/inter-greek-ext-600-normal-DRtmH8MT.woff2 +0 -0
  34. package/dashboard/assets/inter-greek-ext-700-normal-BoQ6DsYi.woff +0 -0
  35. package/dashboard/assets/inter-greek-ext-700-normal-qfdV9bQt.woff2 +0 -0
  36. package/dashboard/assets/inter-latin-400-normal-C38fXH4l.woff2 +0 -0
  37. package/dashboard/assets/inter-latin-400-normal-CyCys3Eg.woff +0 -0
  38. package/dashboard/assets/inter-latin-500-normal-BL9OpVg8.woff +0 -0
  39. package/dashboard/assets/inter-latin-500-normal-Cerq10X2.woff2 +0 -0
  40. package/dashboard/assets/inter-latin-600-normal-CiBQ2DWP.woff +0 -0
  41. package/dashboard/assets/inter-latin-600-normal-LgqL8muc.woff2 +0 -0
  42. package/dashboard/assets/inter-latin-700-normal-BLAVimhd.woff +0 -0
  43. package/dashboard/assets/inter-latin-700-normal-Yt3aPRUw.woff2 +0 -0
  44. package/dashboard/assets/inter-latin-ext-400-normal-77YHD8bZ.woff +0 -0
  45. package/dashboard/assets/inter-latin-ext-400-normal-C1nco2VV.woff2 +0 -0
  46. package/dashboard/assets/inter-latin-ext-500-normal-BxGbmqWO.woff +0 -0
  47. package/dashboard/assets/inter-latin-ext-500-normal-CV4jyFjo.woff2 +0 -0
  48. package/dashboard/assets/inter-latin-ext-600-normal-CIVaiw4L.woff +0 -0
  49. package/dashboard/assets/inter-latin-ext-600-normal-D2bJ5OIk.woff2 +0 -0
  50. package/dashboard/assets/inter-latin-ext-700-normal-Ca8adRJv.woff2 +0 -0
  51. package/dashboard/assets/inter-latin-ext-700-normal-TidjK2hL.woff +0 -0
  52. package/dashboard/assets/inter-vietnamese-400-normal-Bbgyi5SW.woff +0 -0
  53. package/dashboard/assets/inter-vietnamese-400-normal-DMkecbls.woff2 +0 -0
  54. package/dashboard/assets/inter-vietnamese-500-normal-DOriooB6.woff2 +0 -0
  55. package/dashboard/assets/inter-vietnamese-500-normal-mJboJaSs.woff +0 -0
  56. package/dashboard/assets/inter-vietnamese-600-normal-BuLX-rYi.woff +0 -0
  57. package/dashboard/assets/inter-vietnamese-600-normal-Cc8MFFhd.woff2 +0 -0
  58. package/dashboard/assets/inter-vietnamese-700-normal-BZaoP0fm.woff +0 -0
  59. package/dashboard/assets/inter-vietnamese-700-normal-DlLaEgI2.woff2 +0 -0
  60. package/dashboard/index.html +13 -0
  61. package/dist/cli.d.ts +2 -0
  62. package/dist/cli.js +101 -0
  63. package/dist/db/index.d.ts +3 -0
  64. package/dist/db/index.js +24 -0
  65. package/dist/db/queries.d.ts +29 -0
  66. package/dist/db/queries.js +388 -0
  67. package/dist/db/schema.d.ts +1 -0
  68. package/dist/db/schema.js +90 -0
  69. package/dist/parser/cost.d.ts +3 -0
  70. package/dist/parser/cost.js +88 -0
  71. package/dist/parser/index.d.ts +9 -0
  72. package/dist/parser/index.js +89 -0
  73. package/dist/parser/session-parser.d.ts +2 -0
  74. package/dist/parser/session-parser.js +229 -0
  75. package/dist/parser/subagent-parser.d.ts +5 -0
  76. package/dist/parser/subagent-parser.js +150 -0
  77. package/dist/server/index.d.ts +2 -0
  78. package/dist/server/index.js +52 -0
  79. package/dist/server/routes/sessions.d.ts +3 -0
  80. package/dist/server/routes/sessions.js +31 -0
  81. package/dist/server/routes/stats.d.ts +3 -0
  82. package/dist/server/routes/stats.js +11 -0
  83. package/dist/server/routes/sync.d.ts +3 -0
  84. package/dist/server/routes/sync.js +11 -0
  85. package/dist/types.d.ts +292 -0
  86. package/dist/types.js +2 -0
  87. package/package.json +56 -0
@@ -0,0 +1,388 @@
1
+ // ─── Insert/Update ───
2
+ export function upsertSession(db, session) {
3
+ const txn = db.transaction(() => {
4
+ // Delete existing data for this session (re-insert on sync)
5
+ db.prepare("DELETE FROM tool_calls WHERE session_id = ?").run(session.id);
6
+ db.prepare("DELETE FROM messages WHERE session_id = ?").run(session.id);
7
+ db.prepare("DELETE FROM subagents WHERE session_id = ?").run(session.id);
8
+ db.prepare("DELETE FROM compactions WHERE session_id = ?").run(session.id);
9
+ db.prepare("DELETE FROM sessions WHERE id = ?").run(session.id);
10
+ // Insert session
11
+ db.prepare(`
12
+ INSERT INTO sessions (
13
+ id, project, project_hash, first_prompt, model, status,
14
+ input_tokens, output_tokens, cache_read_tokens, cache_create_tokens, estimated_cost_usd,
15
+ message_count, tool_call_count, subagent_count, turn_count, peak_context_tokens,
16
+ started_at, ended_at, duration_ms, synced_at, jsonl_path, jsonl_mtime
17
+ ) VALUES (?, ?, ?, ?, ?, 'completed', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
18
+ `).run(session.id, session.project, session.projectHash, session.firstPrompt, session.model, session.inputTokens, session.outputTokens, session.cacheReadTokens, session.cacheCreateTokens, session.estimatedCostUsd, session.messageCount, session.toolCallCount, session.subagentCount, session.turnCount, session.peakContextTokens, session.startedAt, session.endedAt, session.durationMs, new Date().toISOString(), session.jsonlPath, session.jsonlMtime);
19
+ // Insert tool calls
20
+ const insertTc = db.prepare(`
21
+ INSERT INTO tool_calls (session_id, subagent_id, tool_use_id, tool_name, tool_input, tool_response, status, timestamp)
22
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
23
+ `);
24
+ for (const tc of session.toolCalls) {
25
+ insertTc.run(session.id, tc.subagentId, tc.toolUseId, tc.toolName, tc.toolInput, tc.toolResponse, tc.status, tc.timestamp);
26
+ }
27
+ // Insert messages
28
+ const insertMsg = db.prepare(`
29
+ INSERT INTO messages (session_id, role, content, timestamp, model, cost_usd) VALUES (?, ?, ?, ?, ?, ?)
30
+ `);
31
+ for (const msg of session.messages) {
32
+ insertMsg.run(session.id, msg.role, msg.content, msg.timestamp, msg.model, msg.costUsd);
33
+ }
34
+ // Insert subagents
35
+ const insertSub = db.prepare(`
36
+ INSERT INTO subagents (id, session_id, agent_type, model, prompt, input_tokens, output_tokens, cache_read_tokens, cache_create_tokens, estimated_cost_usd, tool_call_count, duration_ms, result_summary)
37
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
38
+ `);
39
+ for (const sub of session.subagents) {
40
+ insertSub.run(sub.id, session.id, sub.agentType, sub.model, sub.prompt, sub.inputTokens, sub.outputTokens, sub.cacheReadTokens, sub.cacheCreateTokens, sub.estimatedCostUsd, sub.toolCallCount, sub.durationMs, sub.resultSummary);
41
+ }
42
+ // Insert compactions
43
+ const insertCompact = db.prepare(`
44
+ INSERT INTO compactions (session_id, trigger, pre_tokens, timestamp) VALUES (?, ?, ?, ?)
45
+ `);
46
+ for (const c of session.compactions) {
47
+ insertCompact.run(session.id, c.trigger, c.preTokens, c.timestamp);
48
+ }
49
+ });
50
+ txn();
51
+ }
52
+ // ─── Queries ───
53
+ export function listSessions(db, opts = {}) {
54
+ const conditions = [];
55
+ const params = [];
56
+ if (opts.project) {
57
+ conditions.push("project LIKE ?");
58
+ params.push(`%${opts.project}%`);
59
+ }
60
+ if (opts.model) {
61
+ conditions.push("model LIKE ?");
62
+ params.push(`%${opts.model}%`);
63
+ }
64
+ if (opts.after) {
65
+ conditions.push("started_at >= ?");
66
+ params.push(opts.after);
67
+ }
68
+ if (opts.before) {
69
+ conditions.push("started_at <= ?");
70
+ params.push(opts.before);
71
+ }
72
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
73
+ const sortCol = {
74
+ cost: "estimated_cost_usd DESC",
75
+ tokens: "(input_tokens + output_tokens + cache_read_tokens + cache_create_tokens) DESC",
76
+ date: "started_at DESC",
77
+ duration: "duration_ms DESC",
78
+ };
79
+ const orderBy = sortCol[opts.sort ?? "date"] ?? sortCol.date;
80
+ const limit = opts.limit ?? 50;
81
+ const offset = opts.offset ?? 0;
82
+ const total = db.prepare(`SELECT COUNT(*) as count FROM sessions ${where}`).get(...params);
83
+ const sessions = db
84
+ .prepare(`SELECT s.*,
85
+ COALESCE((SELECT COUNT(*) FROM compactions c WHERE c.session_id = s.id), 0) as compaction_count,
86
+ ROUND(MAX(s.peak_context_tokens, COALESCE((SELECT MAX(c.pre_tokens) FROM compactions c WHERE c.session_id = s.id), 0)) * 100.0 / 200000, 1) as peak_context_pct
87
+ FROM sessions s ${where ? where.replace(/\b(started_at|project|model)\b/g, "s.$1") : ""} ORDER BY ${orderBy.replace(/\b(started_at|estimated_cost_usd|input_tokens|output_tokens|cache_read_tokens|cache_create_tokens|duration_ms)\b/g, "s.$1")} LIMIT ? OFFSET ?`)
88
+ .all(...params, limit, offset);
89
+ return { sessions, total: total.count };
90
+ }
91
+ export function getSession(db, id) {
92
+ return db
93
+ .prepare(`SELECT s.*,
94
+ COALESCE((SELECT COUNT(*) FROM compactions c WHERE c.session_id = s.id), 0) as compaction_count,
95
+ ROUND(MAX(s.peak_context_tokens, COALESCE((SELECT MAX(c.pre_tokens) FROM compactions c WHERE c.session_id = s.id), 0)) * 100.0 / 200000, 1) as peak_context_pct
96
+ FROM sessions s WHERE s.id = ?`)
97
+ .get(id);
98
+ }
99
+ export function getTimeline(db, sessionId) {
100
+ const toolCalls = db
101
+ .prepare("SELECT *, 'tool_call' as _type FROM tool_calls WHERE session_id = ? ORDER BY timestamp")
102
+ .all(sessionId);
103
+ const messages = db
104
+ .prepare("SELECT *, 'message' as _type FROM messages WHERE session_id = ? ORDER BY timestamp")
105
+ .all(sessionId);
106
+ // Merge and sort by timestamp
107
+ const all = [...toolCalls, ...messages].sort((a, b) => a.timestamp.localeCompare(b.timestamp));
108
+ return all;
109
+ }
110
+ export function getSubagents(db, sessionId) {
111
+ return db
112
+ .prepare("SELECT * FROM subagents WHERE session_id = ? ORDER BY id")
113
+ .all(sessionId);
114
+ }
115
+ export function getStats(db, opts = {}) {
116
+ const conditions = [];
117
+ const params = [];
118
+ if (opts.after) {
119
+ conditions.push("started_at >= ?");
120
+ params.push(opts.after);
121
+ }
122
+ if (opts.before) {
123
+ conditions.push("started_at <= ?");
124
+ params.push(opts.before);
125
+ }
126
+ if (opts.model) {
127
+ conditions.push("model LIKE ?");
128
+ params.push(`%${opts.model}%`);
129
+ }
130
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : "";
131
+ const agg = db
132
+ .prepare(`
133
+ SELECT
134
+ COUNT(*) as totalSessions,
135
+ COALESCE(SUM(estimated_cost_usd), 0) as totalCost,
136
+ COALESCE(SUM(input_tokens + output_tokens + cache_read_tokens + cache_create_tokens), 0) as totalTokens,
137
+ COALESCE(SUM(input_tokens), 0) as inputTokens,
138
+ COALESCE(SUM(output_tokens), 0) as outputTokens,
139
+ COALESCE(SUM(cache_read_tokens), 0) as cacheReadTokens,
140
+ COALESCE(SUM(cache_create_tokens), 0) as cacheCreateTokens
141
+ FROM sessions ${where}
142
+ `)
143
+ .get(...params);
144
+ const byModel = db
145
+ .prepare(`
146
+ SELECT model, COUNT(*) as sessions, SUM(estimated_cost_usd) as cost,
147
+ SUM(input_tokens + output_tokens + cache_read_tokens + cache_create_tokens) as tokens
148
+ FROM sessions ${where}
149
+ GROUP BY model ORDER BY cost DESC
150
+ `)
151
+ .all(...params);
152
+ const byProject = db
153
+ .prepare(`
154
+ SELECT project, COUNT(*) as sessions, SUM(estimated_cost_usd) as cost,
155
+ SUM(input_tokens + output_tokens + cache_read_tokens + cache_create_tokens) as tokens
156
+ FROM sessions ${where}
157
+ GROUP BY project ORDER BY cost DESC
158
+ `)
159
+ .all(...params);
160
+ const byDay = db
161
+ .prepare(`
162
+ SELECT DATE(started_at) as date, COUNT(*) as sessions, SUM(estimated_cost_usd) as cost,
163
+ SUM(input_tokens + output_tokens + cache_read_tokens + cache_create_tokens) as tokens
164
+ FROM sessions ${where}
165
+ GROUP BY DATE(started_at) ORDER BY date
166
+ `)
167
+ .all(...params);
168
+ const topTools = db
169
+ .prepare(`
170
+ SELECT tool_name as tool, COUNT(*) as count
171
+ FROM tool_calls tc JOIN sessions s ON tc.session_id = s.id ${where ? where.replace("started_at", "s.started_at") : ""}
172
+ GROUP BY tool_name ORDER BY count DESC LIMIT 15
173
+ `)
174
+ .all(...params);
175
+ return {
176
+ ...agg,
177
+ avgCostPerSession: agg.totalSessions > 0 ? agg.totalCost / agg.totalSessions : 0,
178
+ byModel,
179
+ byProject,
180
+ byDay,
181
+ topTools,
182
+ };
183
+ }
184
+ export function getSessionMtime(db, id) {
185
+ const row = db.prepare("SELECT jsonl_mtime FROM sessions WHERE id = ?").get(id);
186
+ return row?.jsonl_mtime;
187
+ }
188
+ // ─── Session Insights ───
189
+ const TOOL_CATEGORIES = {
190
+ Read: "explore",
191
+ Glob: "explore",
192
+ Grep: "explore",
193
+ Explore: "explore",
194
+ WebFetch: "research",
195
+ WebSearch: "research",
196
+ Write: "write",
197
+ Edit: "write",
198
+ NotebookEdit: "write",
199
+ Bash: "execute",
200
+ Task: "other",
201
+ TaskCreate: "other",
202
+ TaskUpdate: "other",
203
+ TaskList: "other",
204
+ TaskGet: "other",
205
+ EnterPlanMode: "other",
206
+ ExitPlanMode: "other",
207
+ AskUserQuestion: "other",
208
+ Skill: "other",
209
+ };
210
+ function getCategory(toolName) {
211
+ return TOOL_CATEGORIES[toolName] ?? "other";
212
+ }
213
+ export function getSessionInsights(db, sessionId) {
214
+ // File read patterns — files read 2+ times
215
+ const allReads = db
216
+ .prepare(`SELECT tool_input FROM tool_calls WHERE session_id = ? AND tool_name = 'Read' AND tool_input IS NOT NULL`)
217
+ .all(sessionId);
218
+ const fileCounts = new Map();
219
+ for (const row of allReads) {
220
+ try {
221
+ const input = JSON.parse(row.tool_input);
222
+ const file = input.file_path;
223
+ if (file)
224
+ fileCounts.set(file, (fileCounts.get(file) ?? 0) + 1);
225
+ }
226
+ catch {
227
+ /* skip */
228
+ }
229
+ }
230
+ const fileReads = [...fileCounts.entries()]
231
+ .filter(([, count]) => count >= 2)
232
+ .sort((a, b) => b[1] - a[1])
233
+ .map(([file, count]) => ({ file, count }));
234
+ // Tool distribution
235
+ const toolDistribution = db
236
+ .prepare(`SELECT tool_name as tool, COUNT(*) as count FROM tool_calls WHERE session_id = ? GROUP BY tool_name ORDER BY count DESC`)
237
+ .all(sessionId);
238
+ const totalToolCalls = toolDistribution.reduce((sum, t) => sum + t.count, 0);
239
+ // All tool calls ordered by time for phase + gap analysis
240
+ const allCalls = db
241
+ .prepare(`SELECT tool_name, timestamp FROM tool_calls WHERE session_id = ? AND subagent_id IS NULL ORDER BY timestamp`)
242
+ .all(sessionId);
243
+ // Phase analysis — group consecutive tools of the same category
244
+ const phases = [];
245
+ const CATEGORY_LABELS = {
246
+ explore: "Exploration",
247
+ research: "Research",
248
+ write: "Writing",
249
+ execute: "Execution",
250
+ other: "Other",
251
+ };
252
+ if (allCalls.length > 0) {
253
+ let currentCategory = getCategory(allCalls[0].tool_name);
254
+ let phaseStart = allCalls[0].timestamp;
255
+ let phaseToolCount = 1;
256
+ for (let i = 1; i < allCalls.length; i++) {
257
+ const cat = getCategory(allCalls[i].tool_name);
258
+ if (cat !== currentCategory) {
259
+ // Close current phase
260
+ const endTs = allCalls[i - 1].timestamp;
261
+ phases.push({
262
+ name: CATEGORY_LABELS[currentCategory],
263
+ category: currentCategory,
264
+ toolCount: phaseToolCount,
265
+ durationMs: new Date(endTs).getTime() - new Date(phaseStart).getTime(),
266
+ });
267
+ currentCategory = cat;
268
+ phaseStart = allCalls[i].timestamp;
269
+ phaseToolCount = 1;
270
+ }
271
+ else {
272
+ phaseToolCount++;
273
+ }
274
+ }
275
+ // Close last phase
276
+ const lastTs = allCalls[allCalls.length - 1].timestamp;
277
+ phases.push({
278
+ name: CATEGORY_LABELS[currentCategory],
279
+ category: currentCategory,
280
+ toolCount: phaseToolCount,
281
+ durationMs: new Date(lastTs).getTime() - new Date(phaseStart).getTime(),
282
+ });
283
+ }
284
+ // Merge tiny phases (< 2 tool calls) into neighbors of the same category
285
+ // and aggregate by category for summary
286
+ const phaseSummary = new Map();
287
+ for (const p of phases) {
288
+ const existing = phaseSummary.get(p.category);
289
+ if (existing) {
290
+ existing.toolCount += p.toolCount;
291
+ existing.durationMs += p.durationMs;
292
+ }
293
+ else {
294
+ phaseSummary.set(p.category, { toolCount: p.toolCount, durationMs: p.durationMs });
295
+ }
296
+ }
297
+ // Web fetch details
298
+ const webRows = db
299
+ .prepare(`SELECT tool_name, tool_input, tool_response, timestamp FROM tool_calls WHERE session_id = ? AND tool_name IN ('WebFetch', 'WebSearch') ORDER BY timestamp`)
300
+ .all(sessionId);
301
+ const webFetchDetails = [];
302
+ let webErrors = 0;
303
+ for (const row of webRows) {
304
+ let url = "";
305
+ let query = "";
306
+ try {
307
+ const input = JSON.parse(row.tool_input ?? "{}");
308
+ url = input.url ?? "";
309
+ query = input.query ?? "";
310
+ }
311
+ catch {
312
+ /* skip */
313
+ }
314
+ let isError = false;
315
+ let errorDetail = "";
316
+ if (!row.tool_response) {
317
+ isError = true;
318
+ errorDetail = "No response";
319
+ }
320
+ else {
321
+ // Only flag as error if the response is clearly an error object, not just
322
+ // content that happens to mention error-related words
323
+ try {
324
+ const resp = JSON.parse(row.tool_response);
325
+ if (resp.error || resp.is_error === true) {
326
+ isError = true;
327
+ errorDetail = resp.error ?? resp.message ?? "Failed";
328
+ }
329
+ }
330
+ catch {
331
+ // Non-JSON response — check for short error-like responses only
332
+ if (row.tool_response.length < 200) {
333
+ const lower = row.tool_response.toLowerCase();
334
+ if (lower.includes("error") || lower.includes("failed") || lower.includes("timeout")) {
335
+ isError = true;
336
+ errorDetail = row.tool_response.slice(0, 100);
337
+ }
338
+ }
339
+ }
340
+ }
341
+ if (isError)
342
+ webErrors++;
343
+ webFetchDetails.push({
344
+ tool: row.tool_name,
345
+ url: url || query || "(unknown)",
346
+ timestamp: row.timestamp,
347
+ isError,
348
+ errorDetail,
349
+ });
350
+ }
351
+ const session = db.prepare("SELECT duration_ms FROM sessions WHERE id = ?").get(sessionId);
352
+ // Compaction events
353
+ const compactionRows = db
354
+ .prepare(`SELECT trigger, pre_tokens, timestamp FROM compactions WHERE session_id = ? ORDER BY timestamp`)
355
+ .all(sessionId);
356
+ return {
357
+ fileReads,
358
+ phases: [...phaseSummary.entries()].map(([cat, data]) => ({
359
+ name: CATEGORY_LABELS[cat],
360
+ category: cat,
361
+ toolCount: data.toolCount,
362
+ durationMs: data.durationMs,
363
+ })),
364
+ webFetches: {
365
+ total: webRows.length,
366
+ errors: webErrors,
367
+ successRate: webRows.length > 0 ? (webRows.length - webErrors) / webRows.length : 1,
368
+ details: webFetchDetails,
369
+ },
370
+ toolDistribution,
371
+ totalToolCalls,
372
+ sessionDurationMs: session?.duration_ms ?? 0,
373
+ compaction: {
374
+ count: compactionRows.length,
375
+ compactions: compactionRows.map((r) => ({
376
+ timestamp: r.timestamp,
377
+ trigger: r.trigger,
378
+ preTokens: r.pre_tokens,
379
+ })),
380
+ },
381
+ };
382
+ }
383
+ export function getSyncStatus(db) {
384
+ const row = db
385
+ .prepare("SELECT MAX(synced_at) as lastSyncedAt, COUNT(*) as sessionCount FROM sessions")
386
+ .get();
387
+ return row;
388
+ }
@@ -0,0 +1 @@
1
+ export declare const SCHEMA = "\nPRAGMA journal_mode = WAL;\nPRAGMA busy_timeout = 5000;\nPRAGMA synchronous = NORMAL;\nPRAGMA foreign_keys = ON;\n\nCREATE TABLE IF NOT EXISTS sessions (\n id TEXT PRIMARY KEY,\n project TEXT NOT NULL,\n project_hash TEXT NOT NULL,\n first_prompt TEXT,\n model TEXT,\n status TEXT NOT NULL DEFAULT 'completed',\n input_tokens INTEGER NOT NULL DEFAULT 0,\n output_tokens INTEGER NOT NULL DEFAULT 0,\n cache_read_tokens INTEGER NOT NULL DEFAULT 0,\n cache_create_tokens INTEGER NOT NULL DEFAULT 0,\n estimated_cost_usd REAL NOT NULL DEFAULT 0,\n message_count INTEGER NOT NULL DEFAULT 0,\n tool_call_count INTEGER NOT NULL DEFAULT 0,\n subagent_count INTEGER NOT NULL DEFAULT 0,\n turn_count INTEGER NOT NULL DEFAULT 0,\n peak_context_tokens INTEGER NOT NULL DEFAULT 0,\n started_at TEXT,\n ended_at TEXT,\n duration_ms INTEGER,\n synced_at TEXT NOT NULL,\n jsonl_path TEXT NOT NULL,\n jsonl_mtime TEXT NOT NULL\n);\n\nCREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project);\nCREATE INDEX IF NOT EXISTS idx_sessions_started ON sessions(started_at);\n\nCREATE TABLE IF NOT EXISTS tool_calls (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,\n subagent_id TEXT,\n tool_use_id TEXT,\n tool_name TEXT NOT NULL,\n tool_input TEXT,\n tool_response TEXT,\n status TEXT NOT NULL DEFAULT 'success',\n timestamp TEXT NOT NULL\n);\n\nCREATE INDEX IF NOT EXISTS idx_tc_session ON tool_calls(session_id);\nCREATE INDEX IF NOT EXISTS idx_tc_tool ON tool_calls(tool_name);\n\nCREATE TABLE IF NOT EXISTS messages (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,\n role TEXT NOT NULL,\n content TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n model TEXT,\n cost_usd REAL\n);\n\nCREATE INDEX IF NOT EXISTS idx_msg_session ON messages(session_id);\n\nCREATE TABLE IF NOT EXISTS subagents (\n id TEXT NOT NULL,\n session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,\n agent_type TEXT,\n model TEXT,\n prompt TEXT,\n input_tokens INTEGER NOT NULL DEFAULT 0,\n output_tokens INTEGER NOT NULL DEFAULT 0,\n cache_read_tokens INTEGER NOT NULL DEFAULT 0,\n cache_create_tokens INTEGER NOT NULL DEFAULT 0,\n estimated_cost_usd REAL NOT NULL DEFAULT 0,\n tool_call_count INTEGER NOT NULL DEFAULT 0,\n duration_ms INTEGER,\n result_summary TEXT,\n PRIMARY KEY (id, session_id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_sub_session ON subagents(session_id);\n\nCREATE TABLE IF NOT EXISTS compactions (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,\n trigger TEXT NOT NULL DEFAULT 'auto',\n pre_tokens INTEGER NOT NULL DEFAULT 0,\n timestamp TEXT NOT NULL\n);\n\nCREATE INDEX IF NOT EXISTS idx_compact_session ON compactions(session_id);\n";
@@ -0,0 +1,90 @@
1
+ export const SCHEMA = `
2
+ PRAGMA journal_mode = WAL;
3
+ PRAGMA busy_timeout = 5000;
4
+ PRAGMA synchronous = NORMAL;
5
+ PRAGMA foreign_keys = ON;
6
+
7
+ CREATE TABLE IF NOT EXISTS sessions (
8
+ id TEXT PRIMARY KEY,
9
+ project TEXT NOT NULL,
10
+ project_hash TEXT NOT NULL,
11
+ first_prompt TEXT,
12
+ model TEXT,
13
+ status TEXT NOT NULL DEFAULT 'completed',
14
+ input_tokens INTEGER NOT NULL DEFAULT 0,
15
+ output_tokens INTEGER NOT NULL DEFAULT 0,
16
+ cache_read_tokens INTEGER NOT NULL DEFAULT 0,
17
+ cache_create_tokens INTEGER NOT NULL DEFAULT 0,
18
+ estimated_cost_usd REAL NOT NULL DEFAULT 0,
19
+ message_count INTEGER NOT NULL DEFAULT 0,
20
+ tool_call_count INTEGER NOT NULL DEFAULT 0,
21
+ subagent_count INTEGER NOT NULL DEFAULT 0,
22
+ turn_count INTEGER NOT NULL DEFAULT 0,
23
+ peak_context_tokens INTEGER NOT NULL DEFAULT 0,
24
+ started_at TEXT,
25
+ ended_at TEXT,
26
+ duration_ms INTEGER,
27
+ synced_at TEXT NOT NULL,
28
+ jsonl_path TEXT NOT NULL,
29
+ jsonl_mtime TEXT NOT NULL
30
+ );
31
+
32
+ CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project);
33
+ CREATE INDEX IF NOT EXISTS idx_sessions_started ON sessions(started_at);
34
+
35
+ CREATE TABLE IF NOT EXISTS tool_calls (
36
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
37
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
38
+ subagent_id TEXT,
39
+ tool_use_id TEXT,
40
+ tool_name TEXT NOT NULL,
41
+ tool_input TEXT,
42
+ tool_response TEXT,
43
+ status TEXT NOT NULL DEFAULT 'success',
44
+ timestamp TEXT NOT NULL
45
+ );
46
+
47
+ CREATE INDEX IF NOT EXISTS idx_tc_session ON tool_calls(session_id);
48
+ CREATE INDEX IF NOT EXISTS idx_tc_tool ON tool_calls(tool_name);
49
+
50
+ CREATE TABLE IF NOT EXISTS messages (
51
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
52
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
53
+ role TEXT NOT NULL,
54
+ content TEXT NOT NULL,
55
+ timestamp TEXT NOT NULL,
56
+ model TEXT,
57
+ cost_usd REAL
58
+ );
59
+
60
+ CREATE INDEX IF NOT EXISTS idx_msg_session ON messages(session_id);
61
+
62
+ CREATE TABLE IF NOT EXISTS subagents (
63
+ id TEXT NOT NULL,
64
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
65
+ agent_type TEXT,
66
+ model TEXT,
67
+ prompt TEXT,
68
+ input_tokens INTEGER NOT NULL DEFAULT 0,
69
+ output_tokens INTEGER NOT NULL DEFAULT 0,
70
+ cache_read_tokens INTEGER NOT NULL DEFAULT 0,
71
+ cache_create_tokens INTEGER NOT NULL DEFAULT 0,
72
+ estimated_cost_usd REAL NOT NULL DEFAULT 0,
73
+ tool_call_count INTEGER NOT NULL DEFAULT 0,
74
+ duration_ms INTEGER,
75
+ result_summary TEXT,
76
+ PRIMARY KEY (id, session_id)
77
+ );
78
+
79
+ CREATE INDEX IF NOT EXISTS idx_sub_session ON subagents(session_id);
80
+
81
+ CREATE TABLE IF NOT EXISTS compactions (
82
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
83
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
84
+ trigger TEXT NOT NULL DEFAULT 'auto',
85
+ pre_tokens INTEGER NOT NULL DEFAULT 0,
86
+ timestamp TEXT NOT NULL
87
+ );
88
+
89
+ CREATE INDEX IF NOT EXISTS idx_compact_session ON compactions(session_id);
90
+ `;
@@ -0,0 +1,3 @@
1
+ import type { Usage } from "../types.js";
2
+ export declare function estimateCost(model: string, usage: Usage): number;
3
+ export declare function shortModelName(model: string): string;
@@ -0,0 +1,88 @@
1
+ // Prices per million tokens — from https://platform.claude.com/docs/en/about-claude/pricing
2
+ // Cache read = 0.1x input, cache write 5min = 1.25x input, cache write 1hr = 2x input
3
+ const PRICING = {
4
+ // Current models
5
+ "claude-opus-4-6": { input: 5, output: 25, cacheRead: 0.5, cacheWrite5m: 6.25, cacheWrite1h: 10 },
6
+ "claude-opus-4-5": { input: 5, output: 25, cacheRead: 0.5, cacheWrite5m: 6.25, cacheWrite1h: 10 },
7
+ "claude-sonnet-4-6": {
8
+ input: 3,
9
+ output: 15,
10
+ cacheRead: 0.3,
11
+ cacheWrite5m: 3.75,
12
+ cacheWrite1h: 6,
13
+ },
14
+ "claude-sonnet-4-5": {
15
+ input: 3,
16
+ output: 15,
17
+ cacheRead: 0.3,
18
+ cacheWrite5m: 3.75,
19
+ cacheWrite1h: 6,
20
+ },
21
+ "claude-sonnet-4": { input: 3, output: 15, cacheRead: 0.3, cacheWrite5m: 3.75, cacheWrite1h: 6 },
22
+ "claude-haiku-4-5": { input: 1, output: 5, cacheRead: 0.1, cacheWrite5m: 1.25, cacheWrite1h: 2 },
23
+ // Older models
24
+ "claude-opus-4-1": {
25
+ input: 15,
26
+ output: 75,
27
+ cacheRead: 1.5,
28
+ cacheWrite5m: 18.75,
29
+ cacheWrite1h: 30,
30
+ },
31
+ "claude-opus-4": { input: 15, output: 75, cacheRead: 1.5, cacheWrite5m: 18.75, cacheWrite1h: 30 },
32
+ "claude-sonnet-4-5-20250514": {
33
+ input: 3,
34
+ output: 15,
35
+ cacheRead: 0.3,
36
+ cacheWrite5m: 3.75,
37
+ cacheWrite1h: 6,
38
+ },
39
+ "claude-3-5-sonnet-20241022": {
40
+ input: 3,
41
+ output: 15,
42
+ cacheRead: 0.3,
43
+ cacheWrite5m: 3.75,
44
+ cacheWrite1h: 6,
45
+ },
46
+ "claude-3-5-haiku-20241022": {
47
+ input: 0.8,
48
+ output: 4,
49
+ cacheRead: 0.08,
50
+ cacheWrite5m: 1,
51
+ cacheWrite1h: 1.6,
52
+ },
53
+ "claude-haiku-3": {
54
+ input: 0.25,
55
+ output: 1.25,
56
+ cacheRead: 0.03,
57
+ cacheWrite5m: 0.3,
58
+ cacheWrite1h: 0.5,
59
+ },
60
+ };
61
+ /** Strip date suffix like "-20251001" from model IDs */
62
+ function stripDateSuffix(model) {
63
+ return model.replace(/-\d{8,}$/, "");
64
+ }
65
+ export function estimateCost(model, usage) {
66
+ // 1. Exact match 2. Strip date suffix 3. Fallback to sonnet
67
+ const p = PRICING[model] ?? PRICING[stripDateSuffix(model)] ?? PRICING["claude-sonnet-4-6"];
68
+ // Use granular cache write tiers when available, otherwise default to 5-min pricing
69
+ const cache5m = usage.cache_creation?.ephemeral_5m_input_tokens ?? 0;
70
+ const cache1h = usage.cache_creation?.ephemeral_1h_input_tokens ?? 0;
71
+ const cacheWriteCost = cache5m + cache1h > 0
72
+ ? cache5m * p.cacheWrite5m + cache1h * p.cacheWrite1h
73
+ : (usage.cache_creation_input_tokens ?? 0) * p.cacheWrite5m;
74
+ return ((usage.input_tokens * p.input +
75
+ usage.output_tokens * p.output +
76
+ (usage.cache_read_input_tokens ?? 0) * p.cacheRead +
77
+ cacheWriteCost) /
78
+ 1_000_000);
79
+ }
80
+ export function shortModelName(model) {
81
+ if (model.includes("opus"))
82
+ return "opus";
83
+ if (model.includes("sonnet"))
84
+ return "sonnet";
85
+ if (model.includes("haiku"))
86
+ return "haiku";
87
+ return model;
88
+ }
@@ -0,0 +1,9 @@
1
+ import type Database from "better-sqlite3";
2
+ interface SyncResult {
3
+ parsed: number;
4
+ skipped: number;
5
+ errors: number;
6
+ totalSessions: number;
7
+ }
8
+ export declare function syncAll(db: Database.Database, claudeDir: string): Promise<SyncResult>;
9
+ export {};
@@ -0,0 +1,89 @@
1
+ import { readdirSync, statSync, existsSync } from "fs";
2
+ import { join, basename } from "path";
3
+ import { parseSessionJsonl } from "./session-parser.js";
4
+ import { parseSubagentJsonl } from "./subagent-parser.js";
5
+ import { upsertSession } from "../db/queries.js";
6
+ import { getSessionMtime } from "../db/queries.js";
7
+ function decodeProjectPath(dirName) {
8
+ // ~/.claude/projects/ directory names are the project path with / replaced by -
9
+ // e.g., "-Users-nehadeodhar-work-gypsum-org-code-polos" → "/Users/nehadeodhar/work/gypsum-org/code/polos"
10
+ // We return the last 2 segments for display
11
+ const parts = dirName.split("-").filter(Boolean);
12
+ const fullPath = "/" + parts.join("/");
13
+ const segments = fullPath.split("/").filter(Boolean);
14
+ return segments.length >= 2 ? segments.slice(-2).join("/") : segments.join("/") || dirName;
15
+ }
16
+ export async function syncAll(db, claudeDir) {
17
+ const projectsDir = join(claudeDir, "projects");
18
+ if (!existsSync(projectsDir)) {
19
+ return { parsed: 0, skipped: 0, errors: 0, totalSessions: 0 };
20
+ }
21
+ let parsed = 0;
22
+ let skipped = 0;
23
+ let errors = 0;
24
+ const projectDirs = readdirSync(projectsDir, { withFileTypes: true }).filter((d) => d.isDirectory());
25
+ for (const projectDir of projectDirs) {
26
+ const projectPath = join(projectsDir, projectDir.name);
27
+ const project = decodeProjectPath(projectDir.name);
28
+ const projectHash = projectDir.name;
29
+ // Find JSONL files at depth 1
30
+ let entries;
31
+ try {
32
+ entries = readdirSync(projectPath, { withFileTypes: true });
33
+ }
34
+ catch {
35
+ continue;
36
+ }
37
+ for (const entry of entries) {
38
+ if (!entry.name.endsWith(".jsonl"))
39
+ continue;
40
+ if (!entry.isFile())
41
+ continue;
42
+ const jsonlPath = join(projectPath, entry.name);
43
+ const sessionId = basename(entry.name, ".jsonl");
44
+ try {
45
+ const stat = statSync(jsonlPath);
46
+ const mtime = stat.mtime.toISOString();
47
+ // Skip if not modified since last sync
48
+ const existingMtime = getSessionMtime(db, sessionId);
49
+ if (existingMtime === mtime) {
50
+ skipped++;
51
+ continue;
52
+ }
53
+ // Parse session
54
+ const session = await parseSessionJsonl(jsonlPath, sessionId, project, projectHash, mtime);
55
+ // Parse subagents
56
+ const subagentDir = join(projectPath, sessionId, "subagents");
57
+ if (existsSync(subagentDir)) {
58
+ try {
59
+ const subFiles = readdirSync(subagentDir).filter((f) => f.endsWith(".jsonl"));
60
+ for (const subFile of subFiles) {
61
+ const agentId = basename(subFile, ".jsonl");
62
+ try {
63
+ const { subagent, toolCalls: subToolCalls } = await parseSubagentJsonl(join(subagentDir, subFile), agentId, sessionId);
64
+ session.subagents.push(subagent);
65
+ session.toolCalls.push(...subToolCalls);
66
+ }
67
+ catch {
68
+ // Skip malformed subagent files
69
+ }
70
+ }
71
+ }
72
+ catch {
73
+ // Skip if can't read subagent dir
74
+ }
75
+ }
76
+ session.subagentCount = session.subagents.length;
77
+ // Store in DB
78
+ upsertSession(db, session);
79
+ parsed++;
80
+ }
81
+ catch (err) {
82
+ errors++;
83
+ console.error(`Error parsing ${jsonlPath}:`, err);
84
+ }
85
+ }
86
+ }
87
+ const totalRow = db.prepare("SELECT COUNT(*) as count FROM sessions").get();
88
+ return { parsed, skipped, errors, totalSessions: totalRow.count };
89
+ }
@@ -0,0 +1,2 @@
1
+ import type { ParsedSession } from "../types.js";
2
+ export declare function parseSessionJsonl(jsonlPath: string, sessionId: string, project: string, projectHash: string, jsonlMtime: string): Promise<ParsedSession>;