zozul-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +44 -0
- package/.github/workflows/publish.yml +26 -0
- package/DEVELOPMENT.md +288 -0
- package/LICENSE +201 -0
- package/README.md +178 -0
- package/dist/cli/commands.d.ts +3 -0
- package/dist/cli/commands.d.ts.map +1 -0
- package/dist/cli/commands.js +307 -0
- package/dist/cli/commands.js.map +1 -0
- package/dist/cli/format.d.ts +5 -0
- package/dist/cli/format.d.ts.map +1 -0
- package/dist/cli/format.js +115 -0
- package/dist/cli/format.js.map +1 -0
- package/dist/context/index.d.ts +8 -0
- package/dist/context/index.d.ts.map +1 -0
- package/dist/context/index.js +37 -0
- package/dist/context/index.js.map +1 -0
- package/dist/dashboard/html.d.ts +17 -0
- package/dist/dashboard/html.d.ts.map +1 -0
- package/dist/dashboard/html.js +79 -0
- package/dist/dashboard/html.js.map +1 -0
- package/dist/dashboard/index.html +1245 -0
- package/dist/hooks/config.d.ts +19 -0
- package/dist/hooks/config.d.ts.map +1 -0
- package/dist/hooks/config.js +106 -0
- package/dist/hooks/config.js.map +1 -0
- package/dist/hooks/git.d.ts +6 -0
- package/dist/hooks/git.d.ts.map +1 -0
- package/dist/hooks/git.js +73 -0
- package/dist/hooks/git.js.map +1 -0
- package/dist/hooks/index.d.ts +4 -0
- package/dist/hooks/index.d.ts.map +1 -0
- package/dist/hooks/index.js +3 -0
- package/dist/hooks/index.js.map +1 -0
- package/dist/hooks/server.d.ts +16 -0
- package/dist/hooks/server.d.ts.map +1 -0
- package/dist/hooks/server.js +349 -0
- package/dist/hooks/server.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +6 -0
- package/dist/index.js.map +1 -0
- package/dist/otel/config.d.ts +36 -0
- package/dist/otel/config.d.ts.map +1 -0
- package/dist/otel/config.js +109 -0
- package/dist/otel/config.js.map +1 -0
- package/dist/otel/index.d.ts +4 -0
- package/dist/otel/index.d.ts.map +1 -0
- package/dist/otel/index.js +3 -0
- package/dist/otel/index.js.map +1 -0
- package/dist/otel/receiver.d.ts +10 -0
- package/dist/otel/receiver.d.ts.map +1 -0
- package/dist/otel/receiver.js +155 -0
- package/dist/otel/receiver.js.map +1 -0
- package/dist/parser/index.d.ts +4 -0
- package/dist/parser/index.d.ts.map +1 -0
- package/dist/parser/index.js +3 -0
- package/dist/parser/index.js.map +1 -0
- package/dist/parser/ingest.d.ts +20 -0
- package/dist/parser/ingest.d.ts.map +1 -0
- package/dist/parser/ingest.js +98 -0
- package/dist/parser/ingest.js.map +1 -0
- package/dist/parser/jsonl.d.ts +14 -0
- package/dist/parser/jsonl.d.ts.map +1 -0
- package/dist/parser/jsonl.js +202 -0
- package/dist/parser/jsonl.js.map +1 -0
- package/dist/parser/types.d.ts +81 -0
- package/dist/parser/types.d.ts.map +1 -0
- package/dist/parser/types.js +9 -0
- package/dist/parser/types.js.map +1 -0
- package/dist/parser/watcher.d.ts +16 -0
- package/dist/parser/watcher.d.ts.map +1 -0
- package/dist/parser/watcher.js +103 -0
- package/dist/parser/watcher.js.map +1 -0
- package/dist/pricing/index.d.ts +2 -0
- package/dist/pricing/index.d.ts.map +1 -0
- package/dist/pricing/index.js +37 -0
- package/dist/pricing/index.js.map +1 -0
- package/dist/service/index.d.ts +31 -0
- package/dist/service/index.d.ts.map +1 -0
- package/dist/service/index.js +252 -0
- package/dist/service/index.js.map +1 -0
- package/dist/storage/db.d.ts +75 -0
- package/dist/storage/db.d.ts.map +1 -0
- package/dist/storage/db.js +117 -0
- package/dist/storage/db.js.map +1 -0
- package/dist/storage/index.d.ts +4 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/dist/storage/index.js +3 -0
- package/dist/storage/index.js.map +1 -0
- package/dist/storage/repo.d.ts +162 -0
- package/dist/storage/repo.d.ts.map +1 -0
- package/dist/storage/repo.js +472 -0
- package/dist/storage/repo.js.map +1 -0
- package/dist/sync/client.d.ts +24 -0
- package/dist/sync/client.d.ts.map +1 -0
- package/dist/sync/client.js +41 -0
- package/dist/sync/client.js.map +1 -0
- package/dist/sync/index.d.ts +18 -0
- package/dist/sync/index.d.ts.map +1 -0
- package/dist/sync/index.js +135 -0
- package/dist/sync/index.js.map +1 -0
- package/dist/sync/sync.test.d.ts +2 -0
- package/dist/sync/sync.test.d.ts.map +1 -0
- package/dist/sync/sync.test.js +412 -0
- package/dist/sync/sync.test.js.map +1 -0
- package/dist/sync/transform.d.ts +80 -0
- package/dist/sync/transform.d.ts.map +1 -0
- package/dist/sync/transform.js +90 -0
- package/dist/sync/transform.js.map +1 -0
- package/package.json +50 -0
- package/src/cli/commands.ts +332 -0
- package/src/cli/format.ts +133 -0
- package/src/context/index.ts +42 -0
- package/src/dashboard/html.ts +97 -0
- package/src/dashboard/index.html +1245 -0
- package/src/hooks/config.ts +119 -0
- package/src/hooks/git.ts +77 -0
- package/src/hooks/index.ts +7 -0
- package/src/hooks/server.ts +397 -0
- package/src/index.ts +6 -0
- package/src/otel/config.ts +141 -0
- package/src/otel/index.ts +8 -0
- package/src/otel/receiver.ts +183 -0
- package/src/parser/index.ts +3 -0
- package/src/parser/ingest.ts +119 -0
- package/src/parser/jsonl.ts +241 -0
- package/src/parser/types.ts +89 -0
- package/src/parser/watcher.ts +116 -0
- package/src/pricing/index.ts +51 -0
- package/src/service/index.ts +272 -0
- package/src/storage/db.ts +198 -0
- package/src/storage/index.ts +3 -0
- package/src/storage/repo.ts +601 -0
- package/src/sync/client.ts +63 -0
- package/src/sync/index.ts +207 -0
- package/src/sync/sync.test.ts +447 -0
- package/src/sync/transform.ts +184 -0
- package/tsconfig.json +19 -0
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
|
|
5
|
+
export interface OtelConfig {
|
|
6
|
+
/** OTLP endpoint, e.g. http://localhost:4317 */
|
|
7
|
+
endpoint: string;
|
|
8
|
+
/** Protocol: grpc, http/json, or http/protobuf */
|
|
9
|
+
protocol: "grpc" | "http/json" | "http/protobuf";
|
|
10
|
+
/** Whether to log user prompt content */
|
|
11
|
+
logUserPrompts: boolean;
|
|
12
|
+
/** Whether to log MCP/tool detail names */
|
|
13
|
+
logToolDetails: boolean;
|
|
14
|
+
/** Metrics export interval in ms */
|
|
15
|
+
metricsInterval: number;
|
|
16
|
+
/** Logs export interval in ms */
|
|
17
|
+
logsInterval: number;
|
|
18
|
+
/** Optional auth header */
|
|
19
|
+
authHeader?: string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const DEFAULT_CONFIG: OtelConfig = {
|
|
23
|
+
endpoint: "http://localhost:7890",
|
|
24
|
+
protocol: "http/json",
|
|
25
|
+
logUserPrompts: true,
|
|
26
|
+
logToolDetails: true,
|
|
27
|
+
metricsInterval: 60000,
|
|
28
|
+
logsInterval: 5000,
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Generate the set of environment variables needed to enable
|
|
33
|
+
* Claude Code's built-in OpenTelemetry export.
|
|
34
|
+
*/
|
|
35
|
+
export function generateOtelEnvVars(config: Partial<OtelConfig> = {}): Record<string, string> {
|
|
36
|
+
const cfg = { ...DEFAULT_CONFIG, ...config };
|
|
37
|
+
|
|
38
|
+
const env: Record<string, string> = {
|
|
39
|
+
CLAUDE_CODE_ENABLE_TELEMETRY: "1",
|
|
40
|
+
OTEL_METRICS_EXPORTER: "otlp",
|
|
41
|
+
OTEL_LOGS_EXPORTER: "otlp",
|
|
42
|
+
OTEL_EXPORTER_OTLP_PROTOCOL: cfg.protocol,
|
|
43
|
+
OTEL_EXPORTER_OTLP_ENDPOINT: cfg.endpoint,
|
|
44
|
+
OTEL_METRIC_EXPORT_INTERVAL: String(cfg.metricsInterval),
|
|
45
|
+
OTEL_LOGS_EXPORT_INTERVAL: String(cfg.logsInterval),
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
if (cfg.logUserPrompts) {
|
|
49
|
+
env.OTEL_LOG_USER_PROMPTS = "1";
|
|
50
|
+
}
|
|
51
|
+
if (cfg.logToolDetails) {
|
|
52
|
+
env.OTEL_LOG_TOOL_DETAILS = "1";
|
|
53
|
+
}
|
|
54
|
+
if (cfg.authHeader) {
|
|
55
|
+
env.OTEL_EXPORTER_OTLP_HEADERS = cfg.authHeader;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
return env;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Generate a shell script that exports all required OTEL env vars.
|
|
63
|
+
*/
|
|
64
|
+
export function generateOtelShellExports(config: Partial<OtelConfig> = {}): string {
|
|
65
|
+
const env = generateOtelEnvVars(config);
|
|
66
|
+
const lines = Object.entries(env).map(
|
|
67
|
+
([key, value]) => `export ${key}="${value}"`,
|
|
68
|
+
);
|
|
69
|
+
return lines.join("\n") + "\n";
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Write the OTEL env vars into Claude Code's settings.json under the "env" key.
|
|
74
|
+
*/
|
|
75
|
+
export function installOtelToSettings(config: Partial<OtelConfig> = {}): { path: string } {
|
|
76
|
+
const settingsPath = path.join(os.homedir(), ".claude", "settings.json");
|
|
77
|
+
let existing: Record<string, unknown> = {};
|
|
78
|
+
|
|
79
|
+
if (fs.existsSync(settingsPath)) {
|
|
80
|
+
try {
|
|
81
|
+
existing = JSON.parse(fs.readFileSync(settingsPath, "utf-8"));
|
|
82
|
+
} catch {
|
|
83
|
+
existing = {};
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const envVars = generateOtelEnvVars(config);
|
|
88
|
+
const existingEnv = (existing.env ?? {}) as Record<string, string>;
|
|
89
|
+
existing.env = { ...existingEnv, ...envVars };
|
|
90
|
+
|
|
91
|
+
fs.mkdirSync(path.dirname(settingsPath), { recursive: true });
|
|
92
|
+
fs.writeFileSync(settingsPath, JSON.stringify(existing, null, 2) + "\n");
|
|
93
|
+
|
|
94
|
+
return { path: settingsPath };
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Remove zozul OTEL env vars from Claude Code's settings.json.
|
|
99
|
+
*/
|
|
100
|
+
export function uninstallOtelFromSettings(): boolean {
|
|
101
|
+
const settingsPath = path.join(os.homedir(), ".claude", "settings.json");
|
|
102
|
+
if (!fs.existsSync(settingsPath)) return false;
|
|
103
|
+
|
|
104
|
+
let existing: Record<string, unknown>;
|
|
105
|
+
try {
|
|
106
|
+
existing = JSON.parse(fs.readFileSync(settingsPath, "utf-8"));
|
|
107
|
+
} catch {
|
|
108
|
+
return false;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const env = existing.env as Record<string, string> | undefined;
|
|
112
|
+
if (!env) return false;
|
|
113
|
+
|
|
114
|
+
const otelKeys = [
|
|
115
|
+
"CLAUDE_CODE_ENABLE_TELEMETRY",
|
|
116
|
+
"OTEL_METRICS_EXPORTER",
|
|
117
|
+
"OTEL_LOGS_EXPORTER",
|
|
118
|
+
"OTEL_EXPORTER_OTLP_PROTOCOL",
|
|
119
|
+
"OTEL_EXPORTER_OTLP_ENDPOINT",
|
|
120
|
+
"OTEL_METRIC_EXPORT_INTERVAL",
|
|
121
|
+
"OTEL_LOGS_EXPORT_INTERVAL",
|
|
122
|
+
"OTEL_LOG_USER_PROMPTS",
|
|
123
|
+
"OTEL_LOG_TOOL_DETAILS",
|
|
124
|
+
"OTEL_EXPORTER_OTLP_HEADERS",
|
|
125
|
+
];
|
|
126
|
+
|
|
127
|
+
let removed = false;
|
|
128
|
+
for (const key of otelKeys) {
|
|
129
|
+
if (key in env) {
|
|
130
|
+
delete env[key];
|
|
131
|
+
removed = true;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (Object.keys(env).length === 0) {
|
|
136
|
+
delete existing.env;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
fs.writeFileSync(settingsPath, JSON.stringify(existing, null, 2) + "\n");
|
|
140
|
+
return removed;
|
|
141
|
+
}
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import type { SessionRepo } from "../storage/repo.js";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Parse OTLP JSON metrics export and store in SQLite.
|
|
5
|
+
*
|
|
6
|
+
* Claude Code sends metrics in the standard OTLP JSON format:
|
|
7
|
+
* { resourceMetrics: [{ scopeMetrics: [{ metrics: [...] }] }] }
|
|
8
|
+
*
|
|
9
|
+
* Each metric has data points with attributes, timestamps, and values.
|
|
10
|
+
*/
|
|
11
|
+
type SessionDelta = {
|
|
12
|
+
costDelta: number;
|
|
13
|
+
inputDelta: number;
|
|
14
|
+
outputDelta: number;
|
|
15
|
+
cacheReadDelta: number;
|
|
16
|
+
cacheCreationDelta: number;
|
|
17
|
+
durationMsDelta: number;
|
|
18
|
+
latestTimestamp: string;
|
|
19
|
+
model: string | null;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
export function handleOtlpMetrics(body: string, repo: SessionRepo, verbose?: boolean): number {
|
|
23
|
+
const payload = JSON.parse(body);
|
|
24
|
+
const resourceMetrics = payload.resourceMetrics ?? [];
|
|
25
|
+
const batch: Parameters<typeof repo.insertOtelMetricBatch>[0] = [];
|
|
26
|
+
const sessionDeltas = new Map<string, SessionDelta>();
|
|
27
|
+
|
|
28
|
+
for (const rm of resourceMetrics) {
|
|
29
|
+
const resourceAttrs = flattenAttributes(rm.resource?.attributes);
|
|
30
|
+
|
|
31
|
+
for (const sm of rm.scopeMetrics ?? []) {
|
|
32
|
+
for (const metric of sm.metrics ?? []) {
|
|
33
|
+
const metricName: string = metric.name ?? "unknown";
|
|
34
|
+
const dataPoints = extractDataPoints(metric);
|
|
35
|
+
|
|
36
|
+
for (const dp of dataPoints) {
|
|
37
|
+
const attrs = flattenAttributes(dp.attributes as unknown[] | undefined);
|
|
38
|
+
const merged = { ...resourceAttrs, ...attrs };
|
|
39
|
+
|
|
40
|
+
const rawValue = dp.asDouble ?? dp.asInt ?? dp.value ?? 0;
|
|
41
|
+
const value = typeof rawValue === "string" ? parseFloat(rawValue) : Number(rawValue);
|
|
42
|
+
const ts = nanoToIso(dp.timeUnixNano as string | number | undefined ?? dp.startTimeUnixNano as string | number | undefined);
|
|
43
|
+
|
|
44
|
+
batch.push({
|
|
45
|
+
name: metricName,
|
|
46
|
+
value,
|
|
47
|
+
attributes: JSON.stringify(merged),
|
|
48
|
+
session_id: merged["session.id"] ?? null,
|
|
49
|
+
model: merged["model"] ?? null,
|
|
50
|
+
timestamp: ts,
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
// Accumulate per-session deltas to keep sessions table current
|
|
54
|
+
const sid = merged["session.id"];
|
|
55
|
+
if (sid) {
|
|
56
|
+
if (!sessionDeltas.has(sid)) {
|
|
57
|
+
sessionDeltas.set(sid, {
|
|
58
|
+
costDelta: 0, inputDelta: 0, outputDelta: 0,
|
|
59
|
+
cacheReadDelta: 0, cacheCreationDelta: 0, durationMsDelta: 0,
|
|
60
|
+
latestTimestamp: ts, model: merged["model"] ?? null,
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
const d = sessionDeltas.get(sid)!;
|
|
64
|
+
if (ts > d.latestTimestamp) d.latestTimestamp = ts;
|
|
65
|
+
if (merged["model"]) d.model = merged["model"];
|
|
66
|
+
|
|
67
|
+
if (metricName === "claude_code.cost.usage") {
|
|
68
|
+
d.costDelta += value;
|
|
69
|
+
} else if (metricName === "claude_code.token.usage") {
|
|
70
|
+
const type = merged["type"];
|
|
71
|
+
if (type === "input") d.inputDelta += value;
|
|
72
|
+
else if (type === "output") d.outputDelta += value;
|
|
73
|
+
else if (type === "cacheRead") d.cacheReadDelta += value;
|
|
74
|
+
else if (type === "cacheCreation") d.cacheCreationDelta += value;
|
|
75
|
+
} else if (metricName === "claude_code.active_time.total") {
|
|
76
|
+
d.durationMsDelta += value * 1000; // seconds → ms
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (batch.length > 0) {
|
|
85
|
+
repo.insertOtelMetricBatch(batch);
|
|
86
|
+
for (const [sessionId, deltas] of sessionDeltas) {
|
|
87
|
+
repo.updateSessionFromOtel(sessionId, deltas);
|
|
88
|
+
}
|
|
89
|
+
if (verbose) {
|
|
90
|
+
process.stderr.write(` otel: stored ${batch.length} metric data points across ${sessionDeltas.size} session(s)\n`);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return batch.length;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Parse OTLP JSON logs/events export and store in SQLite.
|
|
99
|
+
*
|
|
100
|
+
* Claude Code sends events in the standard OTLP JSON logs format:
|
|
101
|
+
* { resourceLogs: [{ scopeLogs: [{ logRecords: [...] }] }] }
|
|
102
|
+
*/
|
|
103
|
+
export function handleOtlpLogs(body: string, repo: SessionRepo, verbose?: boolean): number {
|
|
104
|
+
const payload = JSON.parse(body);
|
|
105
|
+
const resourceLogs = payload.resourceLogs ?? [];
|
|
106
|
+
const batch: Parameters<typeof repo.insertOtelEventBatch>[0] = [];
|
|
107
|
+
|
|
108
|
+
for (const rl of resourceLogs) {
|
|
109
|
+
const resourceAttrs = flattenAttributes(rl.resource?.attributes);
|
|
110
|
+
|
|
111
|
+
for (const sl of rl.scopeLogs ?? []) {
|
|
112
|
+
for (const record of sl.logRecords ?? []) {
|
|
113
|
+
const attrs = flattenAttributes(record.attributes);
|
|
114
|
+
const merged = { ...resourceAttrs, ...attrs };
|
|
115
|
+
|
|
116
|
+
const eventName = merged["event.name"]
|
|
117
|
+
?? record.body?.stringValue
|
|
118
|
+
?? "unknown";
|
|
119
|
+
const ts = nanoToIso(record.timeUnixNano ?? record.observedTimeUnixNano);
|
|
120
|
+
|
|
121
|
+
batch.push({
|
|
122
|
+
event_name: eventName,
|
|
123
|
+
attributes: JSON.stringify(merged),
|
|
124
|
+
session_id: merged["session.id"] ?? null,
|
|
125
|
+
prompt_id: merged["prompt.id"] ?? null,
|
|
126
|
+
timestamp: ts,
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (batch.length > 0) {
|
|
133
|
+
repo.insertOtelEventBatch(batch);
|
|
134
|
+
if (verbose) {
|
|
135
|
+
process.stderr.write(` otel: stored ${batch.length} log events\n`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
return batch.length;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* OTLP metrics contain data inside different structures depending on type.
|
|
144
|
+
* Extract the data points array from whichever wrapper is present.
|
|
145
|
+
*/
|
|
146
|
+
function extractDataPoints(metric: Record<string, unknown>): Record<string, unknown>[] {
|
|
147
|
+
if (metric.sum) return (metric.sum as Record<string, unknown>).dataPoints as Record<string, unknown>[] ?? [];
|
|
148
|
+
if (metric.gauge) return (metric.gauge as Record<string, unknown>).dataPoints as Record<string, unknown>[] ?? [];
|
|
149
|
+
if (metric.histogram) return (metric.histogram as Record<string, unknown>).dataPoints as Record<string, unknown>[] ?? [];
|
|
150
|
+
if (metric.summary) return (metric.summary as Record<string, unknown>).dataPoints as Record<string, unknown>[] ?? [];
|
|
151
|
+
return [];
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* OTLP attributes are arrays of { key, value: { stringValue|intValue|... } }.
|
|
156
|
+
* Flatten them into a plain { key: value } object.
|
|
157
|
+
*/
|
|
158
|
+
function flattenAttributes(attrs: unknown[] | undefined): Record<string, string> {
|
|
159
|
+
const result: Record<string, string> = {};
|
|
160
|
+
if (!Array.isArray(attrs)) return result;
|
|
161
|
+
|
|
162
|
+
for (const attr of attrs) {
|
|
163
|
+
const a = attr as { key: string; value?: Record<string, unknown> };
|
|
164
|
+
if (!a.key || !a.value) continue;
|
|
165
|
+
|
|
166
|
+
const v = a.value.stringValue
|
|
167
|
+
?? a.value.intValue
|
|
168
|
+
?? a.value.doubleValue
|
|
169
|
+
?? a.value.boolValue;
|
|
170
|
+
|
|
171
|
+
if (v !== undefined && v !== null) {
|
|
172
|
+
result[a.key] = String(v);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return result;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
function nanoToIso(nanos: string | number | undefined): string {
|
|
180
|
+
if (!nanos) return new Date().toISOString();
|
|
181
|
+
const ms = typeof nanos === "string" ? parseInt(nanos, 10) / 1_000_000 : nanos / 1_000_000;
|
|
182
|
+
return new Date(ms).toISOString();
|
|
183
|
+
}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import type { SessionRepo } from "../storage/repo.js";
|
|
2
|
+
import { discoverSessionFiles, parseSessionFile } from "./jsonl.js";
|
|
3
|
+
import type { ParsedSession } from "./types.js";
|
|
4
|
+
import { getActiveContext } from "../context/index.js";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Ingest all discovered session JSONL files into the database.
|
|
8
|
+
* Skips sessions that already exist unless force=true.
|
|
9
|
+
*/
|
|
10
|
+
export async function ingestAllSessions(
|
|
11
|
+
repo: SessionRepo,
|
|
12
|
+
opts: { force?: boolean; noTag?: boolean } = {},
|
|
13
|
+
): Promise<{ ingested: number; skipped: number }> {
|
|
14
|
+
const files = discoverSessionFiles();
|
|
15
|
+
let ingested = 0;
|
|
16
|
+
let skipped = 0;
|
|
17
|
+
|
|
18
|
+
for (const { filePath, projectPath } of files) {
|
|
19
|
+
const parsed = await parseSessionFile(filePath, projectPath);
|
|
20
|
+
|
|
21
|
+
if (!opts.force) {
|
|
22
|
+
const existing = repo.getSession(parsed.sessionId);
|
|
23
|
+
if (existing) {
|
|
24
|
+
skipped++;
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
persistSession(repo, parsed, { noTag: opts.noTag });
|
|
30
|
+
ingested++;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return { ingested, skipped };
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Ingest a single session file by path.
|
|
38
|
+
*/
|
|
39
|
+
export async function ingestSessionFile(
|
|
40
|
+
repo: SessionRepo,
|
|
41
|
+
filePath: string,
|
|
42
|
+
projectPath?: string,
|
|
43
|
+
opts: { noTag?: boolean } = {},
|
|
44
|
+
): Promise<ParsedSession> {
|
|
45
|
+
const parsed = await parseSessionFile(filePath, projectPath);
|
|
46
|
+
persistSession(repo, parsed, opts);
|
|
47
|
+
return parsed;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function persistSession(repo: SessionRepo, parsed: ParsedSession, opts: { noTag?: boolean } = {}): void {
|
|
51
|
+
repo.upsertSession({
|
|
52
|
+
id: parsed.sessionId,
|
|
53
|
+
project_path: parsed.projectPath,
|
|
54
|
+
started_at: parsed.startedAt,
|
|
55
|
+
ended_at: parsed.endedAt,
|
|
56
|
+
total_input_tokens: parsed.totalInputTokens,
|
|
57
|
+
total_output_tokens: parsed.totalOutputTokens,
|
|
58
|
+
total_cache_read_tokens: parsed.totalCacheReadTokens,
|
|
59
|
+
total_cache_creation_tokens: parsed.totalCacheCreationTokens,
|
|
60
|
+
total_cost_usd: parsed.totalCostUsd,
|
|
61
|
+
total_turns: parsed.turns.length,
|
|
62
|
+
total_duration_ms: parsed.totalDurationMs,
|
|
63
|
+
model: parsed.model,
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
const turnIds: number[] = [];
|
|
67
|
+
|
|
68
|
+
for (const turn of parsed.turns) {
|
|
69
|
+
const turnId = repo.insertTurn({
|
|
70
|
+
session_id: parsed.sessionId,
|
|
71
|
+
turn_index: turn.turnIndex,
|
|
72
|
+
role: turn.role,
|
|
73
|
+
timestamp: turn.timestamp,
|
|
74
|
+
input_tokens: turn.inputTokens,
|
|
75
|
+
output_tokens: turn.outputTokens,
|
|
76
|
+
cache_read_tokens: turn.cacheReadTokens,
|
|
77
|
+
cache_creation_tokens: turn.cacheCreationTokens,
|
|
78
|
+
cost_usd: turn.costUsd,
|
|
79
|
+
duration_ms: turn.durationMs,
|
|
80
|
+
model: turn.model,
|
|
81
|
+
content_text: turn.contentText,
|
|
82
|
+
tool_calls: turn.toolCalls.length > 0 ? JSON.stringify(turn.toolCalls) : null,
|
|
83
|
+
is_real_user: turn.isRealUser ? 1 : 0,
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
turnIds.push(turnId);
|
|
87
|
+
|
|
88
|
+
if (turn.toolCalls.length > 0) {
|
|
89
|
+
// Replace tool uses for this turn atomically to avoid duplicates on re-ingest
|
|
90
|
+
repo.replaceToolUsesForTurn(turnId, turn.toolCalls.map(tc => ({
|
|
91
|
+
session_id: parsed.sessionId,
|
|
92
|
+
turn_id: turnId,
|
|
93
|
+
tool_name: tc.toolName,
|
|
94
|
+
tool_input: JSON.stringify(tc.toolInput),
|
|
95
|
+
tool_result: tc.toolResult ?? null,
|
|
96
|
+
success: null,
|
|
97
|
+
duration_ms: 0,
|
|
98
|
+
timestamp: turn.timestamp,
|
|
99
|
+
})));
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (!opts.noTag) {
|
|
104
|
+
const context = getActiveContext();
|
|
105
|
+
if (context?.active && context.active.length > 0 && turnIds.length > 0) {
|
|
106
|
+
// Only tag turns that occurred after the context was set
|
|
107
|
+
const contextTime = context.set_at ? new Date(context.set_at).getTime() : 0;
|
|
108
|
+
const eligibleIds = turnIds.filter((_, i) => {
|
|
109
|
+
const turnTime = new Date(parsed.turns[i].timestamp).getTime();
|
|
110
|
+
return turnTime >= contextTime;
|
|
111
|
+
});
|
|
112
|
+
if (eligibleIds.length > 0) {
|
|
113
|
+
for (const tag of context.active) {
|
|
114
|
+
repo.tagTurnsBatch(eligibleIds, tag);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import readline from "node:readline";
|
|
5
|
+
import type {
|
|
6
|
+
SessionEntry,
|
|
7
|
+
ParsedSession,
|
|
8
|
+
ParsedTurn,
|
|
9
|
+
ToolCallInfo,
|
|
10
|
+
ContentBlock,
|
|
11
|
+
} from "./types.js";
|
|
12
|
+
import { computeTurnCost } from "../pricing/index.js";
|
|
13
|
+
|
|
14
|
+
const CLAUDE_DIR = path.join(os.homedir(), ".claude");
|
|
15
|
+
const PROJECTS_DIR = path.join(CLAUDE_DIR, "projects");
|
|
16
|
+
|
|
17
|
+
// Claude Code stores session files as:
|
|
18
|
+
// ~/.claude/projects/<encoded-path>/<session-uuid>.jsonl
|
|
19
|
+
// Session UUIDs match the standard UUID v4 format.
|
|
20
|
+
const UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\.jsonl$/i;
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Discover all session JSONL files across all projects.
|
|
24
|
+
* Files are stored directly in each project directory (not in a sessions/ subdir).
|
|
25
|
+
*/
|
|
26
|
+
export function discoverSessionFiles(): { filePath: string; projectPath: string }[] {
|
|
27
|
+
const results: { filePath: string; projectPath: string }[] = [];
|
|
28
|
+
|
|
29
|
+
if (!fs.existsSync(PROJECTS_DIR)) return results;
|
|
30
|
+
|
|
31
|
+
for (const projectDir of fs.readdirSync(PROJECTS_DIR)) {
|
|
32
|
+
const projectDirPath = path.join(PROJECTS_DIR, projectDir);
|
|
33
|
+
const stat = fs.statSync(projectDirPath);
|
|
34
|
+
if (!stat.isDirectory()) continue;
|
|
35
|
+
|
|
36
|
+
const decodedProject = decodeProjectPath(projectDir);
|
|
37
|
+
|
|
38
|
+
for (const file of fs.readdirSync(projectDirPath)) {
|
|
39
|
+
if (!UUID_RE.test(file)) continue;
|
|
40
|
+
results.push({
|
|
41
|
+
filePath: path.join(projectDirPath, file),
|
|
42
|
+
projectPath: decodedProject,
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return results.sort((a, b) => {
|
|
48
|
+
const aStat = fs.statSync(a.filePath);
|
|
49
|
+
const bStat = fs.statSync(b.filePath);
|
|
50
|
+
return bStat.mtimeMs - aStat.mtimeMs;
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Parse a single session JSONL file into a structured ParsedSession.
|
|
56
|
+
*/
|
|
57
|
+
export async function parseSessionFile(
|
|
58
|
+
filePath: string,
|
|
59
|
+
projectPath?: string,
|
|
60
|
+
): Promise<ParsedSession> {
|
|
61
|
+
const entries = await readJsonlFile(filePath);
|
|
62
|
+
const sessionId = path.basename(filePath, ".jsonl");
|
|
63
|
+
|
|
64
|
+
const turns: ParsedTurn[] = [];
|
|
65
|
+
let turnIndex = 0;
|
|
66
|
+
let model: string | null = null;
|
|
67
|
+
let startedAt: string | null = null;
|
|
68
|
+
let endedAt: string | null = null;
|
|
69
|
+
|
|
70
|
+
let totalInputTokens = 0;
|
|
71
|
+
let totalOutputTokens = 0;
|
|
72
|
+
let totalCacheReadTokens = 0;
|
|
73
|
+
let totalCacheCreationTokens = 0;
|
|
74
|
+
let totalCostUsd = 0;
|
|
75
|
+
let totalDurationMs = 0;
|
|
76
|
+
|
|
77
|
+
// Track whether each entry is a real user prompt or an automatic tool-result
|
|
78
|
+
const entryIsRealUser: boolean[] = [];
|
|
79
|
+
|
|
80
|
+
for (const entry of entries) {
|
|
81
|
+
if (!entry.message) continue;
|
|
82
|
+
const msg = entry.message;
|
|
83
|
+
const timestamp = entry.timestamp ?? new Date().toISOString();
|
|
84
|
+
|
|
85
|
+
if (!startedAt) startedAt = timestamp;
|
|
86
|
+
endedAt = timestamp;
|
|
87
|
+
|
|
88
|
+
if (msg.model) model = msg.model;
|
|
89
|
+
|
|
90
|
+
const usage = msg.usage ?? {};
|
|
91
|
+
const inputTokens = usage.input_tokens ?? 0;
|
|
92
|
+
const outputTokens = usage.output_tokens ?? 0;
|
|
93
|
+
const cacheRead = usage.cache_read_input_tokens ?? 0;
|
|
94
|
+
const cacheCreation = usage.cache_creation_input_tokens ?? 0;
|
|
95
|
+
const costUsd = entry.costUSD ?? computeTurnCost(
|
|
96
|
+
msg.model ?? model,
|
|
97
|
+
inputTokens,
|
|
98
|
+
outputTokens,
|
|
99
|
+
cacheRead,
|
|
100
|
+
cacheCreation,
|
|
101
|
+
);
|
|
102
|
+
const durationMs = entry.durationMs ?? 0;
|
|
103
|
+
|
|
104
|
+
totalInputTokens += inputTokens;
|
|
105
|
+
totalOutputTokens += outputTokens;
|
|
106
|
+
totalCacheReadTokens += cacheRead;
|
|
107
|
+
totalCacheCreationTokens += cacheCreation;
|
|
108
|
+
totalCostUsd += costUsd;
|
|
109
|
+
|
|
110
|
+
const { text, toolCalls } = extractContent(msg.content);
|
|
111
|
+
|
|
112
|
+
const realUser = msg.role === "user" && !entry.sourceToolAssistantUUID;
|
|
113
|
+
entryIsRealUser.push(realUser);
|
|
114
|
+
|
|
115
|
+
turns.push({
|
|
116
|
+
turnIndex,
|
|
117
|
+
role: msg.role,
|
|
118
|
+
timestamp,
|
|
119
|
+
inputTokens,
|
|
120
|
+
outputTokens,
|
|
121
|
+
cacheReadTokens: cacheRead,
|
|
122
|
+
cacheCreationTokens: cacheCreation,
|
|
123
|
+
costUsd,
|
|
124
|
+
durationMs,
|
|
125
|
+
model: msg.model ?? null,
|
|
126
|
+
contentText: text,
|
|
127
|
+
toolCalls,
|
|
128
|
+
isRealUser: realUser,
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
turnIndex++;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// Compute processing duration: for each real user prompt, find the last
|
|
135
|
+
// assistant turn before the next real user prompt. Duration = that gap.
|
|
136
|
+
// Only assigned to the real user turn (the one that triggered processing).
|
|
137
|
+
const realUserTurnIndices: number[] = [];
|
|
138
|
+
for (let i = 0; i < turns.length; i++) {
|
|
139
|
+
if (entryIsRealUser[i]) realUserTurnIndices.push(i);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
for (let ri = 0; ri < realUserTurnIndices.length; ri++) {
|
|
143
|
+
const userIdx = realUserTurnIndices[ri];
|
|
144
|
+
const nextUserIdx = ri + 1 < realUserTurnIndices.length
|
|
145
|
+
? realUserTurnIndices[ri + 1]
|
|
146
|
+
: turns.length;
|
|
147
|
+
|
|
148
|
+
let lastAssistantTs: string | null = null;
|
|
149
|
+
for (let j = userIdx + 1; j < nextUserIdx; j++) {
|
|
150
|
+
if (turns[j].role === "assistant") {
|
|
151
|
+
lastAssistantTs = turns[j].timestamp;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (lastAssistantTs && turns[userIdx].durationMs === 0) {
|
|
156
|
+
const userTime = new Date(turns[userIdx].timestamp).getTime();
|
|
157
|
+
const assistantTime = new Date(lastAssistantTs).getTime();
|
|
158
|
+
const gap = assistantTime - userTime;
|
|
159
|
+
if (gap > 0) turns[userIdx].durationMs = gap;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
totalDurationMs = turns.reduce((sum, t) => sum + t.durationMs, 0);
|
|
164
|
+
|
|
165
|
+
return {
|
|
166
|
+
sessionId,
|
|
167
|
+
projectPath: projectPath ?? null,
|
|
168
|
+
startedAt: startedAt ?? new Date().toISOString(),
|
|
169
|
+
endedAt,
|
|
170
|
+
model,
|
|
171
|
+
turns,
|
|
172
|
+
totalInputTokens,
|
|
173
|
+
totalOutputTokens,
|
|
174
|
+
totalCacheReadTokens,
|
|
175
|
+
totalCacheCreationTokens,
|
|
176
|
+
totalCostUsd,
|
|
177
|
+
totalDurationMs,
|
|
178
|
+
};
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Extract text content and tool calls from a message's content field.
|
|
183
|
+
*/
|
|
184
|
+
function extractContent(
|
|
185
|
+
content: string | ContentBlock[] | undefined,
|
|
186
|
+
): { text: string; toolCalls: ToolCallInfo[] } {
|
|
187
|
+
if (!content) return { text: "", toolCalls: [] };
|
|
188
|
+
if (typeof content === "string") return { text: content, toolCalls: [] };
|
|
189
|
+
|
|
190
|
+
const textParts: string[] = [];
|
|
191
|
+
const toolCalls: ToolCallInfo[] = [];
|
|
192
|
+
|
|
193
|
+
for (const block of content) {
|
|
194
|
+
if (block.type === "text" && block.text) {
|
|
195
|
+
textParts.push(block.text);
|
|
196
|
+
} else if (block.type === "tool_use" && block.name) {
|
|
197
|
+
toolCalls.push({
|
|
198
|
+
toolName: block.name,
|
|
199
|
+
toolInput: (block.input as Record<string, unknown>) ?? {},
|
|
200
|
+
});
|
|
201
|
+
} else if (block.type === "tool_result") {
|
|
202
|
+
const resultText = typeof block.content === "string"
|
|
203
|
+
? block.content
|
|
204
|
+
: Array.isArray(block.content)
|
|
205
|
+
? block.content
|
|
206
|
+
.filter((b) => b.type === "text" && b.text)
|
|
207
|
+
.map((b) => b.text)
|
|
208
|
+
.join("\n")
|
|
209
|
+
: "";
|
|
210
|
+
|
|
211
|
+
const matchingCall = toolCalls.find((tc) => !tc.toolResult);
|
|
212
|
+
if (matchingCall) {
|
|
213
|
+
matchingCall.toolResult = resultText;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
return { text: textParts.join("\n"), toolCalls };
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
async function readJsonlFile(filePath: string): Promise<SessionEntry[]> {
|
|
222
|
+
const entries: SessionEntry[] = [];
|
|
223
|
+
const stream = fs.createReadStream(filePath, { encoding: "utf-8" });
|
|
224
|
+
const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
|
|
225
|
+
|
|
226
|
+
for await (const line of rl) {
|
|
227
|
+
const trimmed = line.trim();
|
|
228
|
+
if (!trimmed) continue;
|
|
229
|
+
try {
|
|
230
|
+
entries.push(JSON.parse(trimmed));
|
|
231
|
+
} catch {
|
|
232
|
+
// Skip malformed lines
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
return entries;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
function decodeProjectPath(encoded: string): string {
|
|
240
|
+
return encoded.replace(/-/g, "/");
|
|
241
|
+
}
|