claude-code-tracker 1.2.3 → 1.4.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -2
- package/bin/claude-tracker-cost.js +20 -0
- package/bin/claude-tracker-setup +10 -0
- package/install.js +21 -0
- package/install.sh +36 -2
- package/package.json +7 -3
- package/skills/view-tracking/SKILL.md +54 -0
- package/src/__pycache__/cost.cpython-312.pyc +0 -0
- package/src/__pycache__/parse_friction.cpython-312.pyc +0 -0
- package/src/__pycache__/parse_skills.cpython-312.pyc +0 -0
- package/src/__pycache__/platform_utils.cpython-312.pyc +0 -0
- package/src/__pycache__/storage.cpython-312.pyc +0 -0
- package/src/__pycache__/write-agent.cpython-312.pyc +0 -0
- package/src/__pycache__/write-turns.cpython-312.pyc +0 -0
- package/src/backfill.py +32 -52
- package/src/cost-summary.py +48 -11
- package/src/cost.py +7 -0
- package/src/export-json.py +27 -0
- package/src/generate-charts.py +567 -12
- package/src/init-templates.py +26 -0
- package/src/init-templates.sh +3 -3
- package/src/parse_friction.py +286 -0
- package/src/parse_skills.py +133 -0
- package/src/patch-durations.py +14 -114
- package/src/platform_utils.py +36 -0
- package/src/stop-hook.js +26 -0
- package/src/stop-hook.sh +21 -153
- package/src/storage.py +397 -0
- package/src/subagent-stop-hook.sh +37 -0
- package/src/update-prompts-index.py +177 -20
- package/src/write-agent.py +113 -0
- package/src/write-turns.py +130 -0
- package/uninstall.js +20 -0
- package/uninstall.sh +17 -0
package/src/stop-hook.js
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict';
|
|
3
|
+
const { execFileSync, spawnSync } = require('child_process');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
|
|
7
|
+
const scriptDir = path.dirname(path.resolve(__filename));
|
|
8
|
+
const bashScript = path.join(scriptDir, 'stop-hook.sh');
|
|
9
|
+
|
|
10
|
+
// On Windows, run via bash (Git Bash / WSL); on Unix, run directly
|
|
11
|
+
const input = fs.readFileSync(process.stdin.fd, 'utf8');
|
|
12
|
+
|
|
13
|
+
if (process.platform === 'win32') {
|
|
14
|
+
const result = spawnSync('bash', [bashScript], {
|
|
15
|
+
input,
|
|
16
|
+
stdio: ['pipe', 'inherit', 'inherit'],
|
|
17
|
+
shell: false,
|
|
18
|
+
});
|
|
19
|
+
process.exit(result.status || 0);
|
|
20
|
+
} else {
|
|
21
|
+
const result = spawnSync('bash', [bashScript], {
|
|
22
|
+
input,
|
|
23
|
+
stdio: ['pipe', 'inherit', 'inherit'],
|
|
24
|
+
});
|
|
25
|
+
process.exit(result.status || 0);
|
|
26
|
+
}
|
package/src/stop-hook.sh
CHANGED
|
@@ -50,161 +50,29 @@ if [[ ! -d "$TRACKING_DIR" ]]; then
|
|
|
50
50
|
python3 "$SCRIPT_DIR/backfill.py" "$PROJECT_ROOT" 2>/dev/null || true
|
|
51
51
|
fi
|
|
52
52
|
|
|
53
|
-
# Parse token usage from JSONL
|
|
54
|
-
python3 - "$TRANSCRIPT" "$TRACKING_DIR
|
|
55
|
-
import sys, json, os
|
|
56
|
-
from datetime import datetime, date
|
|
53
|
+
# Parse token usage from JSONL and write to SQLite
|
|
54
|
+
python3 "$SCRIPT_DIR/write-turns.py" "$TRANSCRIPT" "$TRACKING_DIR" "$SESSION_ID" "$(basename "$PROJECT_ROOT")"
|
|
57
55
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
project_name = sys.argv[4]
|
|
56
|
+
# Parse friction events from JSONL
|
|
57
|
+
python3 "$SCRIPT_DIR/parse_friction.py" "$TRANSCRIPT" "$TRACKING_DIR/friction.json" \
|
|
58
|
+
"$SESSION_ID" "$(basename "$PROJECT_ROOT")" "main" 2>/dev/null || true
|
|
62
59
|
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
with open(transcript_path) as f:
|
|
68
|
-
for line in f:
|
|
69
|
-
try:
|
|
70
|
-
obj = json.loads(line)
|
|
71
|
-
t = obj.get('type')
|
|
72
|
-
ts = obj.get('timestamp')
|
|
73
|
-
if t == 'user' and not obj.get('isSidechain') and ts:
|
|
74
|
-
msgs.append(('user', ts))
|
|
75
|
-
elif t == 'assistant' and ts:
|
|
76
|
-
msgs.append(('assistant', ts))
|
|
77
|
-
msg = obj.get('message', {})
|
|
78
|
-
if isinstance(msg, dict) and msg.get('role') == 'assistant':
|
|
79
|
-
usage = msg.get('usage', {})
|
|
80
|
-
if usage:
|
|
81
|
-
usages.append(usage)
|
|
82
|
-
m = msg.get('model', '')
|
|
83
|
-
if m:
|
|
84
|
-
model = m
|
|
85
|
-
except:
|
|
86
|
-
pass
|
|
87
|
-
|
|
88
|
-
# Build per-turn entries
|
|
89
|
-
turn_entries = []
|
|
90
|
-
turn_index = 0
|
|
91
|
-
usage_index = 0
|
|
92
|
-
i = 0
|
|
93
|
-
while i < len(msgs):
|
|
94
|
-
if msgs[i][0] == 'user':
|
|
95
|
-
user_ts = msgs[i][1]
|
|
96
|
-
j = i + 1
|
|
97
|
-
while j < len(msgs) and msgs[j][0] != 'assistant':
|
|
98
|
-
j += 1
|
|
99
|
-
if j < len(msgs):
|
|
100
|
-
asst_ts = msgs[j][1]
|
|
101
|
-
usage = {}
|
|
102
|
-
if usage_index < len(usages):
|
|
103
|
-
usage = usages[usage_index]
|
|
104
|
-
usage_index += 1
|
|
105
|
-
|
|
106
|
-
inp = usage.get('input_tokens', 0)
|
|
107
|
-
out = usage.get('output_tokens', 0)
|
|
108
|
-
cache_create = usage.get('cache_creation_input_tokens', 0)
|
|
109
|
-
cache_read = usage.get('cache_read_input_tokens', 0)
|
|
110
|
-
total = inp + cache_create + cache_read + out
|
|
111
|
-
|
|
112
|
-
if total > 0:
|
|
113
|
-
duration = 0
|
|
114
|
-
try:
|
|
115
|
-
t0 = datetime.fromisoformat(user_ts.replace('Z', '+00:00'))
|
|
116
|
-
t1 = datetime.fromisoformat(asst_ts.replace('Z', '+00:00'))
|
|
117
|
-
duration = max(0, int((t1 - t0).total_seconds()))
|
|
118
|
-
except:
|
|
119
|
-
pass
|
|
120
|
-
|
|
121
|
-
if 'opus' in model:
|
|
122
|
-
cost = inp * 15 / 1e6 + cache_create * 18.75 / 1e6 + cache_read * 1.50 / 1e6 + out * 75 / 1e6
|
|
123
|
-
else:
|
|
124
|
-
cost = inp * 3 / 1e6 + cache_create * 3.75 / 1e6 + cache_read * 0.30 / 1e6 + out * 15 / 1e6
|
|
125
|
-
|
|
126
|
-
try:
|
|
127
|
-
turn_ts = datetime.fromisoformat(user_ts.replace('Z', '+00:00')).strftime('%Y-%m-%dT%H:%M:%SZ')
|
|
128
|
-
turn_date = datetime.fromisoformat(user_ts.replace('Z', '+00:00')).strftime('%Y-%m-%d')
|
|
129
|
-
except:
|
|
130
|
-
turn_ts = user_ts
|
|
131
|
-
turn_date = date.today().isoformat()
|
|
132
|
-
|
|
133
|
-
turn_entries.append({
|
|
134
|
-
'date': turn_date,
|
|
135
|
-
'project': project_name,
|
|
136
|
-
'session_id': session_id,
|
|
137
|
-
'turn_index': turn_index,
|
|
138
|
-
'turn_timestamp': turn_ts,
|
|
139
|
-
'input_tokens': inp,
|
|
140
|
-
'cache_creation_tokens': cache_create,
|
|
141
|
-
'cache_read_tokens': cache_read,
|
|
142
|
-
'output_tokens': out,
|
|
143
|
-
'total_tokens': total,
|
|
144
|
-
'estimated_cost_usd': round(cost, 4),
|
|
145
|
-
'model': model,
|
|
146
|
-
'duration_seconds': duration,
|
|
147
|
-
})
|
|
148
|
-
turn_index += 1
|
|
149
|
-
i = j + 1
|
|
150
|
-
else:
|
|
151
|
-
i += 1
|
|
152
|
-
else:
|
|
153
|
-
i += 1
|
|
154
|
-
|
|
155
|
-
if not turn_entries:
|
|
156
|
-
sys.exit(0)
|
|
157
|
-
|
|
158
|
-
# Load existing data
|
|
159
|
-
data = []
|
|
160
|
-
if os.path.exists(tokens_file):
|
|
161
|
-
try:
|
|
162
|
-
with open(tokens_file) as f:
|
|
163
|
-
data = json.load(f)
|
|
164
|
-
except:
|
|
165
|
-
data = []
|
|
166
|
-
|
|
167
|
-
# Build index of existing (session_id, turn_index) -> position
|
|
168
|
-
existing_idx = {}
|
|
169
|
-
for pos, e in enumerate(data):
|
|
170
|
-
key = (e.get('session_id'), e.get('turn_index'))
|
|
171
|
-
existing_idx[key] = pos
|
|
172
|
-
|
|
173
|
-
# Check if anything actually changed
|
|
174
|
-
changed = False
|
|
175
|
-
for entry in turn_entries:
|
|
176
|
-
key = (entry['session_id'], entry['turn_index'])
|
|
177
|
-
if key not in existing_idx:
|
|
178
|
-
changed = True
|
|
179
|
-
break
|
|
180
|
-
existing = data[existing_idx[key]]
|
|
181
|
-
if (existing.get('total_tokens') != entry['total_tokens'] or
|
|
182
|
-
existing.get('output_tokens') != entry['output_tokens']):
|
|
183
|
-
changed = True
|
|
184
|
-
break
|
|
185
|
-
|
|
186
|
-
if not changed:
|
|
187
|
-
sys.exit(0)
|
|
188
|
-
|
|
189
|
-
# Upsert: update existing entries or append new ones
|
|
190
|
-
for entry in turn_entries:
|
|
191
|
-
key = (entry['session_id'], entry['turn_index'])
|
|
192
|
-
if key in existing_idx:
|
|
193
|
-
data[existing_idx[key]] = entry
|
|
194
|
-
else:
|
|
195
|
-
data.append(entry)
|
|
196
|
-
existing_idx[key] = len(data) - 1
|
|
197
|
-
|
|
198
|
-
# Sort by (date, session_id, turn_index)
|
|
199
|
-
data.sort(key=lambda x: (x.get('date', ''), x.get('session_id', ''), x.get('turn_index', 0)))
|
|
200
|
-
|
|
201
|
-
with open(tokens_file, 'w') as f:
|
|
202
|
-
json.dump(data, f, indent=2)
|
|
203
|
-
f.write('\n')
|
|
204
|
-
PYEOF
|
|
60
|
+
# Parse skill invocations from JSONL
|
|
61
|
+
python3 "$SCRIPT_DIR/parse_skills.py" "$TRANSCRIPT" "$TRACKING_DIR" \
|
|
62
|
+
"$SESSION_ID" "$(basename "$PROJECT_ROOT")" 2>/dev/null || true
|
|
205
63
|
|
|
206
64
|
# Regenerate charts
|
|
207
|
-
python3 "$SCRIPT_DIR/generate-charts.py" "$TRACKING_DIR
|
|
65
|
+
python3 "$SCRIPT_DIR/generate-charts.py" "$TRACKING_DIR" "$TRACKING_DIR/charts.html" 2>/dev/null || true
|
|
66
|
+
|
|
67
|
+
# Regenerate key-prompts index + shadow to OpenMemory
|
|
68
|
+
OM_DB="$HOME/.claude/.claude/openmemory.sqlite"
|
|
69
|
+
LEARNINGS="$HOME/.claude/tool-learnings.md"
|
|
70
|
+
OM_ARGS=""
|
|
71
|
+
if [[ -f "$OM_DB" ]]; then
|
|
72
|
+
OM_ARGS="--om-db $OM_DB"
|
|
73
|
+
if [[ -f "$LEARNINGS" ]]; then
|
|
74
|
+
OM_ARGS="$OM_ARGS --learnings $LEARNINGS"
|
|
75
|
+
fi
|
|
76
|
+
fi
|
|
77
|
+
python3 "$SCRIPT_DIR/update-prompts-index.py" "$TRACKING_DIR" $OM_ARGS 2>/dev/null || true
|
|
208
78
|
|
|
209
|
-
# Regenerate key-prompts index
|
|
210
|
-
python3 "$SCRIPT_DIR/update-prompts-index.py" "$TRACKING_DIR" 2>/dev/null || true
|
package/src/storage.py
ADDED
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""SQLite storage module for Claude Code Tracker.
|
|
3
|
+
|
|
4
|
+
Flat-function API for all DB operations. Replaces flat JSON file storage
|
|
5
|
+
with a WAL-mode SQLite backend. Handles auto-migration from existing
|
|
6
|
+
tokens.json / agents.json on first access.
|
|
7
|
+
"""
|
|
8
|
+
import json
|
|
9
|
+
import os
|
|
10
|
+
import sqlite3
|
|
11
|
+
import sys
|
|
12
|
+
from datetime import datetime, timezone
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
DB_NAME = "tracking.db"
|
|
16
|
+
|
|
17
|
+
TURN_COLS = [
|
|
18
|
+
"session_id", "turn_index", "date", "project", "turn_timestamp",
|
|
19
|
+
"input_tokens", "cache_creation_tokens", "cache_read_tokens",
|
|
20
|
+
"output_tokens", "total_tokens", "estimated_cost_usd", "model",
|
|
21
|
+
"duration_seconds",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
TURN_DEFAULTS = {
|
|
25
|
+
"turn_timestamp": None,
|
|
26
|
+
"input_tokens": 0,
|
|
27
|
+
"cache_creation_tokens": 0,
|
|
28
|
+
"cache_read_tokens": 0,
|
|
29
|
+
"output_tokens": 0,
|
|
30
|
+
"total_tokens": 0,
|
|
31
|
+
"estimated_cost_usd": 0,
|
|
32
|
+
"model": "unknown",
|
|
33
|
+
"duration_seconds": 0,
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
AGENT_COLS = [
|
|
37
|
+
"timestamp", "session_id", "agent_id", "agent_type",
|
|
38
|
+
"input_tokens", "output_tokens", "cache_creation_tokens",
|
|
39
|
+
"cache_read_tokens", "total_tokens", "turns",
|
|
40
|
+
"estimated_cost_usd", "model",
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
AGENT_DEFAULTS = {
|
|
44
|
+
"agent_type": "unknown",
|
|
45
|
+
"input_tokens": 0,
|
|
46
|
+
"output_tokens": 0,
|
|
47
|
+
"cache_creation_tokens": 0,
|
|
48
|
+
"cache_read_tokens": 0,
|
|
49
|
+
"total_tokens": 0,
|
|
50
|
+
"turns": 0,
|
|
51
|
+
"estimated_cost_usd": 0,
|
|
52
|
+
"model": "unknown",
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
SKILL_COLS = [
|
|
56
|
+
"session_id", "date", "project", "skill_name", "args",
|
|
57
|
+
"tool_use_id", "timestamp", "duration_seconds",
|
|
58
|
+
"success", "error_message",
|
|
59
|
+
]
|
|
60
|
+
|
|
61
|
+
SKILL_DEFAULTS = {
|
|
62
|
+
"args": None,
|
|
63
|
+
"tool_use_id": None,
|
|
64
|
+
"timestamp": None,
|
|
65
|
+
"duration_seconds": 0,
|
|
66
|
+
"success": 1,
|
|
67
|
+
"error_message": None,
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
SCHEMA_SQL = """\
|
|
71
|
+
CREATE TABLE IF NOT EXISTS turns (
|
|
72
|
+
session_id TEXT NOT NULL,
|
|
73
|
+
turn_index INTEGER NOT NULL,
|
|
74
|
+
date TEXT NOT NULL,
|
|
75
|
+
project TEXT NOT NULL,
|
|
76
|
+
turn_timestamp TEXT,
|
|
77
|
+
input_tokens INTEGER DEFAULT 0,
|
|
78
|
+
cache_creation_tokens INTEGER DEFAULT 0,
|
|
79
|
+
cache_read_tokens INTEGER DEFAULT 0,
|
|
80
|
+
output_tokens INTEGER DEFAULT 0,
|
|
81
|
+
total_tokens INTEGER DEFAULT 0,
|
|
82
|
+
estimated_cost_usd REAL DEFAULT 0,
|
|
83
|
+
model TEXT DEFAULT 'unknown',
|
|
84
|
+
duration_seconds INTEGER DEFAULT 0,
|
|
85
|
+
PRIMARY KEY (session_id, turn_index)
|
|
86
|
+
);
|
|
87
|
+
CREATE INDEX IF NOT EXISTS idx_turns_date ON turns(date);
|
|
88
|
+
|
|
89
|
+
CREATE TABLE IF NOT EXISTS agents (
|
|
90
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
91
|
+
timestamp TEXT NOT NULL,
|
|
92
|
+
session_id TEXT NOT NULL,
|
|
93
|
+
agent_id TEXT NOT NULL,
|
|
94
|
+
agent_type TEXT DEFAULT 'unknown',
|
|
95
|
+
input_tokens INTEGER DEFAULT 0,
|
|
96
|
+
output_tokens INTEGER DEFAULT 0,
|
|
97
|
+
cache_creation_tokens INTEGER DEFAULT 0,
|
|
98
|
+
cache_read_tokens INTEGER DEFAULT 0,
|
|
99
|
+
total_tokens INTEGER DEFAULT 0,
|
|
100
|
+
turns INTEGER DEFAULT 0,
|
|
101
|
+
estimated_cost_usd REAL DEFAULT 0,
|
|
102
|
+
model TEXT DEFAULT 'unknown'
|
|
103
|
+
);
|
|
104
|
+
CREATE INDEX IF NOT EXISTS idx_agents_session ON agents(session_id);
|
|
105
|
+
|
|
106
|
+
CREATE TABLE IF NOT EXISTS skills (
|
|
107
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
108
|
+
session_id TEXT NOT NULL,
|
|
109
|
+
date TEXT NOT NULL,
|
|
110
|
+
project TEXT NOT NULL,
|
|
111
|
+
skill_name TEXT NOT NULL,
|
|
112
|
+
args TEXT,
|
|
113
|
+
tool_use_id TEXT,
|
|
114
|
+
timestamp TEXT,
|
|
115
|
+
duration_seconds INTEGER DEFAULT 0,
|
|
116
|
+
success INTEGER DEFAULT 1,
|
|
117
|
+
error_message TEXT
|
|
118
|
+
);
|
|
119
|
+
CREATE INDEX IF NOT EXISTS idx_skills_session ON skills(session_id);
|
|
120
|
+
CREATE INDEX IF NOT EXISTS idx_skills_name ON skills(skill_name);
|
|
121
|
+
|
|
122
|
+
CREATE TABLE IF NOT EXISTS metadata (
|
|
123
|
+
key TEXT PRIMARY KEY,
|
|
124
|
+
value TEXT
|
|
125
|
+
);
|
|
126
|
+
"""
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
# ---------------------------------------------------------------------------
|
|
130
|
+
# Internal helpers
|
|
131
|
+
# ---------------------------------------------------------------------------
|
|
132
|
+
|
|
133
|
+
def _db_path(tracking_dir: str) -> str:
|
|
134
|
+
return os.path.join(tracking_dir, DB_NAME)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _insert_turns(conn: sqlite3.Connection, entries: list[dict]) -> None:
|
|
138
|
+
"""INSERT OR REPLACE turns via executemany."""
|
|
139
|
+
placeholders = ", ".join(["?"] * len(TURN_COLS))
|
|
140
|
+
sql = f"INSERT OR REPLACE INTO turns ({', '.join(TURN_COLS)}) VALUES ({placeholders})"
|
|
141
|
+
rows = []
|
|
142
|
+
for e in entries:
|
|
143
|
+
rows.append(tuple(e.get(col, TURN_DEFAULTS.get(col)) for col in TURN_COLS))
|
|
144
|
+
if rows:
|
|
145
|
+
conn.executemany(sql, rows)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def _insert_agents(conn: sqlite3.Connection, entries: list[dict]) -> None:
|
|
149
|
+
"""INSERT agents via executemany (always appends — autoincrement id)."""
|
|
150
|
+
placeholders = ", ".join(["?"] * len(AGENT_COLS))
|
|
151
|
+
sql = f"INSERT INTO agents ({', '.join(AGENT_COLS)}) VALUES ({placeholders})"
|
|
152
|
+
rows = []
|
|
153
|
+
for e in entries:
|
|
154
|
+
rows.append(tuple(e.get(col, AGENT_DEFAULTS.get(col)) for col in AGENT_COLS))
|
|
155
|
+
if rows:
|
|
156
|
+
conn.executemany(sql, rows)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _insert_skills(conn: sqlite3.Connection, entries: list[dict]) -> None:
|
|
160
|
+
"""INSERT skills via executemany (always appends — autoincrement id)."""
|
|
161
|
+
placeholders = ", ".join(["?"] * len(SKILL_COLS))
|
|
162
|
+
sql = f"INSERT INTO skills ({', '.join(SKILL_COLS)}) VALUES ({placeholders})"
|
|
163
|
+
rows = []
|
|
164
|
+
for e in entries:
|
|
165
|
+
rows.append(tuple(e.get(col, SKILL_DEFAULTS.get(col)) for col in SKILL_COLS))
|
|
166
|
+
if rows:
|
|
167
|
+
conn.executemany(sql, rows)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _maybe_migrate(conn: sqlite3.Connection, tracking_dir: str) -> None:
|
|
171
|
+
"""One-time migration from JSON files to SQLite.
|
|
172
|
+
|
|
173
|
+
Checks metadata for 'migrated_at'. If absent and JSON files exist,
|
|
174
|
+
imports their data, stamps metadata, and renames .json -> .json.migrated.
|
|
175
|
+
"""
|
|
176
|
+
row = conn.execute(
|
|
177
|
+
"SELECT value FROM metadata WHERE key = 'migrated_at'"
|
|
178
|
+
).fetchone()
|
|
179
|
+
if row is not None:
|
|
180
|
+
return
|
|
181
|
+
|
|
182
|
+
tokens_path = os.path.join(tracking_dir, "tokens.json")
|
|
183
|
+
agents_path = os.path.join(tracking_dir, "agents.json")
|
|
184
|
+
|
|
185
|
+
if os.path.exists(tokens_path):
|
|
186
|
+
try:
|
|
187
|
+
with open(tokens_path, encoding="utf-8") as f:
|
|
188
|
+
data = json.load(f)
|
|
189
|
+
if data:
|
|
190
|
+
_insert_turns(conn, data)
|
|
191
|
+
except (json.JSONDecodeError, OSError):
|
|
192
|
+
pass
|
|
193
|
+
|
|
194
|
+
if os.path.exists(agents_path):
|
|
195
|
+
try:
|
|
196
|
+
with open(agents_path, encoding="utf-8") as f:
|
|
197
|
+
data = json.load(f)
|
|
198
|
+
if data:
|
|
199
|
+
_insert_agents(conn, data)
|
|
200
|
+
except (json.JSONDecodeError, OSError):
|
|
201
|
+
pass
|
|
202
|
+
|
|
203
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
204
|
+
conn.execute(
|
|
205
|
+
"INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)",
|
|
206
|
+
("schema_version", "1"),
|
|
207
|
+
)
|
|
208
|
+
conn.execute(
|
|
209
|
+
"INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)",
|
|
210
|
+
("migrated_at", now),
|
|
211
|
+
)
|
|
212
|
+
conn.commit()
|
|
213
|
+
|
|
214
|
+
# Rename originals so migration won't re-run even without the metadata check
|
|
215
|
+
for path in (tokens_path, agents_path):
|
|
216
|
+
if os.path.exists(path):
|
|
217
|
+
try:
|
|
218
|
+
os.rename(path, path + ".migrated")
|
|
219
|
+
except OSError:
|
|
220
|
+
pass
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
# ---------------------------------------------------------------------------
|
|
224
|
+
# Core DB access
|
|
225
|
+
# ---------------------------------------------------------------------------
|
|
226
|
+
|
|
227
|
+
def init_db(tracking_dir: str) -> None:
|
|
228
|
+
"""Create the database and tables. Safe to call repeatedly."""
|
|
229
|
+
os.makedirs(tracking_dir, exist_ok=True)
|
|
230
|
+
path = _db_path(tracking_dir)
|
|
231
|
+
conn = sqlite3.connect(path)
|
|
232
|
+
try:
|
|
233
|
+
try:
|
|
234
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
235
|
+
except sqlite3.OperationalError:
|
|
236
|
+
conn.execute("PRAGMA journal_mode=DELETE")
|
|
237
|
+
conn.execute("PRAGMA synchronous=NORMAL")
|
|
238
|
+
conn.executescript(SCHEMA_SQL)
|
|
239
|
+
conn.commit()
|
|
240
|
+
finally:
|
|
241
|
+
conn.close()
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def get_db(tracking_dir: str) -> sqlite3.Connection:
|
|
245
|
+
"""Open (or create) the database and return a connection.
|
|
246
|
+
|
|
247
|
+
Sets row_factory=Row, WAL mode, synchronous=NORMAL, and runs migration
|
|
248
|
+
if needed. Caller should use as context manager: ``with get_db(d) as conn:``.
|
|
249
|
+
"""
|
|
250
|
+
path = _db_path(tracking_dir)
|
|
251
|
+
if not os.path.exists(path):
|
|
252
|
+
init_db(tracking_dir)
|
|
253
|
+
conn = sqlite3.connect(path)
|
|
254
|
+
conn.row_factory = sqlite3.Row
|
|
255
|
+
try:
|
|
256
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
257
|
+
except sqlite3.OperationalError:
|
|
258
|
+
conn.execute("PRAGMA journal_mode=DELETE")
|
|
259
|
+
conn.execute("PRAGMA synchronous=NORMAL")
|
|
260
|
+
conn.executescript(SCHEMA_SQL)
|
|
261
|
+
_maybe_migrate(conn, tracking_dir)
|
|
262
|
+
return conn
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
# ---------------------------------------------------------------------------
|
|
266
|
+
# Public API
|
|
267
|
+
# ---------------------------------------------------------------------------
|
|
268
|
+
|
|
269
|
+
def upsert_turns(tracking_dir: str, entries: list[dict]) -> int:
|
|
270
|
+
"""INSERT OR REPLACE turn entries. Returns number of rows affected."""
|
|
271
|
+
with get_db(tracking_dir) as conn:
|
|
272
|
+
_insert_turns(conn, entries)
|
|
273
|
+
conn.commit()
|
|
274
|
+
return len(entries)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def append_agent(tracking_dir: str, entry: dict) -> None:
|
|
278
|
+
"""Append a single agent entry."""
|
|
279
|
+
with get_db(tracking_dir) as conn:
|
|
280
|
+
_insert_agents(conn, [entry])
|
|
281
|
+
conn.commit()
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def get_all_turns(tracking_dir: str) -> list[dict]:
|
|
285
|
+
"""Return all turn rows as dicts, ordered by date, session_id, turn_index."""
|
|
286
|
+
with get_db(tracking_dir) as conn:
|
|
287
|
+
rows = conn.execute(
|
|
288
|
+
"SELECT * FROM turns ORDER BY date, session_id, turn_index"
|
|
289
|
+
).fetchall()
|
|
290
|
+
return [dict(r) for r in rows]
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
def get_all_agents(tracking_dir: str) -> list[dict]:
|
|
294
|
+
"""Return all agent rows as dicts (without the autoincrement id), ordered by id."""
|
|
295
|
+
with get_db(tracking_dir) as conn:
|
|
296
|
+
rows = conn.execute("SELECT * FROM agents ORDER BY id").fetchall()
|
|
297
|
+
result = []
|
|
298
|
+
for r in rows:
|
|
299
|
+
d = dict(r)
|
|
300
|
+
d.pop("id", None)
|
|
301
|
+
result.append(d)
|
|
302
|
+
return result
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def replace_session_skills(
|
|
306
|
+
tracking_dir: str, session_id: str, entries: list[dict]
|
|
307
|
+
) -> None:
|
|
308
|
+
"""Delete all skills for a session and insert replacements atomically."""
|
|
309
|
+
with get_db(tracking_dir) as conn:
|
|
310
|
+
conn.execute("DELETE FROM skills WHERE session_id = ?", (session_id,))
|
|
311
|
+
_insert_skills(conn, entries)
|
|
312
|
+
conn.commit()
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def get_all_skills(tracking_dir: str) -> list[dict]:
|
|
316
|
+
"""Return all skill rows as dicts (without the autoincrement id), ordered by id."""
|
|
317
|
+
with get_db(tracking_dir) as conn:
|
|
318
|
+
rows = conn.execute("SELECT * FROM skills ORDER BY id").fetchall()
|
|
319
|
+
result = []
|
|
320
|
+
for r in rows:
|
|
321
|
+
d = dict(r)
|
|
322
|
+
d.pop("id", None)
|
|
323
|
+
result.append(d)
|
|
324
|
+
return result
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def count_turns_for_session(tracking_dir: str, session_id: str) -> int:
|
|
328
|
+
"""Return the number of turns for a given session."""
|
|
329
|
+
with get_db(tracking_dir) as conn:
|
|
330
|
+
row = conn.execute(
|
|
331
|
+
"SELECT COUNT(*) FROM turns WHERE session_id = ?", (session_id,)
|
|
332
|
+
).fetchone()
|
|
333
|
+
return row[0]
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def replace_session_turns(
|
|
337
|
+
tracking_dir: str, session_id: str, entries: list[dict]
|
|
338
|
+
) -> None:
|
|
339
|
+
"""Delete all turns for a session and insert replacements atomically."""
|
|
340
|
+
with get_db(tracking_dir) as conn:
|
|
341
|
+
conn.execute("DELETE FROM turns WHERE session_id = ?", (session_id,))
|
|
342
|
+
_insert_turns(conn, entries)
|
|
343
|
+
conn.commit()
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def patch_turn_duration(
|
|
347
|
+
tracking_dir: str, session_id: str, turn_index: int, duration: int
|
|
348
|
+
) -> None:
|
|
349
|
+
"""Update duration_seconds for a specific turn."""
|
|
350
|
+
with get_db(tracking_dir) as conn:
|
|
351
|
+
conn.execute(
|
|
352
|
+
"UPDATE turns SET duration_seconds = ? "
|
|
353
|
+
"WHERE session_id = ? AND turn_index = ?",
|
|
354
|
+
(duration, session_id, turn_index),
|
|
355
|
+
)
|
|
356
|
+
conn.commit()
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def get_session_turns(tracking_dir: str, session_id: str) -> list[dict]:
|
|
360
|
+
"""Return turns for a specific session, ordered by turn_index."""
|
|
361
|
+
with get_db(tracking_dir) as conn:
|
|
362
|
+
rows = conn.execute(
|
|
363
|
+
"SELECT * FROM turns WHERE session_id = ? ORDER BY turn_index",
|
|
364
|
+
(session_id,),
|
|
365
|
+
).fetchall()
|
|
366
|
+
return [dict(r) for r in rows]
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def export_json(
|
|
370
|
+
tracking_dir: str,
|
|
371
|
+
tokens_path: Optional[str] = None,
|
|
372
|
+
agents_path: Optional[str] = None,
|
|
373
|
+
) -> None:
|
|
374
|
+
"""Export turns and/or agents tables to JSON files."""
|
|
375
|
+
if tokens_path is not None:
|
|
376
|
+
data = get_all_turns(tracking_dir)
|
|
377
|
+
with open(tokens_path, "w", encoding="utf-8") as f:
|
|
378
|
+
json.dump(data, f, indent=2)
|
|
379
|
+
f.write("\n")
|
|
380
|
+
|
|
381
|
+
if agents_path is not None:
|
|
382
|
+
data = get_all_agents(tracking_dir)
|
|
383
|
+
with open(agents_path, "w", encoding="utf-8") as f:
|
|
384
|
+
json.dump(data, f, indent=2)
|
|
385
|
+
f.write("\n")
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
# ---------------------------------------------------------------------------
|
|
389
|
+
# CLI
|
|
390
|
+
# ---------------------------------------------------------------------------
|
|
391
|
+
|
|
392
|
+
if __name__ == "__main__":
|
|
393
|
+
if len(sys.argv) < 3 or sys.argv[1] != "--init":
|
|
394
|
+
print(f"Usage: {sys.argv[0]} --init <tracking_dir>", file=sys.stderr)
|
|
395
|
+
sys.exit(1)
|
|
396
|
+
init_db(sys.argv[2])
|
|
397
|
+
print(f"Initialized {os.path.join(sys.argv[2], DB_NAME)}")
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
4
|
+
|
|
5
|
+
INPUT="$(cat)"
|
|
6
|
+
|
|
7
|
+
# Extract fields from SubagentStop payload
|
|
8
|
+
CWD="$(echo "$INPUT" | python3 -c "import sys,json; print(json.load(sys.stdin).get('cwd',''))" 2>/dev/null || true)"
|
|
9
|
+
TRANSCRIPT="$(echo "$INPUT" | python3 -c "import sys,json; print(json.load(sys.stdin).get('agent_transcript_path',''))" 2>/dev/null || true)"
|
|
10
|
+
SESSION_ID="$(echo "$INPUT" | python3 -c "import sys,json; print(json.load(sys.stdin).get('session_id',''))" 2>/dev/null || true)"
|
|
11
|
+
AGENT_ID="$(echo "$INPUT" | python3 -c "import sys,json; print(json.load(sys.stdin).get('agent_id',''))" 2>/dev/null || true)"
|
|
12
|
+
AGENT_TYPE="$(echo "$INPUT" | python3 -c "import sys,json; print(json.load(sys.stdin).get('agent_type','unknown'))" 2>/dev/null || true)"
|
|
13
|
+
|
|
14
|
+
if [[ -z "$CWD" || -z "$TRANSCRIPT" || ! -f "$TRANSCRIPT" ]]; then exit 0; fi
|
|
15
|
+
|
|
16
|
+
# Find project root (walk up to .git)
|
|
17
|
+
PROJECT_ROOT="$CWD"
|
|
18
|
+
while [[ "$PROJECT_ROOT" != "/" ]]; do
|
|
19
|
+
[[ -e "$PROJECT_ROOT/.git" ]] && break
|
|
20
|
+
PROJECT_ROOT="$(dirname "$PROJECT_ROOT")"
|
|
21
|
+
done
|
|
22
|
+
if [[ "$PROJECT_ROOT" == "/" ]]; then exit 0; fi
|
|
23
|
+
|
|
24
|
+
TRACKING_DIR="$PROJECT_ROOT/.claude/tracking"
|
|
25
|
+
# Only run if tracking is already initialized — don't auto-init from subagent hook
|
|
26
|
+
if [[ ! -d "$TRACKING_DIR" ]]; then exit 0; fi
|
|
27
|
+
|
|
28
|
+
# Parse token usage from subagent JSONL and write to SQLite
|
|
29
|
+
python3 "$SCRIPT_DIR/write-agent.py" "$TRANSCRIPT" "$TRACKING_DIR" "$SESSION_ID" "$AGENT_ID" "$AGENT_TYPE"
|
|
30
|
+
|
|
31
|
+
# Parse friction events from subagent JSONL
|
|
32
|
+
python3 "$SCRIPT_DIR/parse_friction.py" "$TRANSCRIPT" "$TRACKING_DIR/friction.json" \
|
|
33
|
+
"$SESSION_ID" "$(basename "$PROJECT_ROOT")" "subagent" \
|
|
34
|
+
--agent-type "$AGENT_TYPE" --agent-id "$AGENT_ID" 2>/dev/null || true
|
|
35
|
+
|
|
36
|
+
# Regenerate charts
|
|
37
|
+
python3 "$SCRIPT_DIR/generate-charts.py" "$TRACKING_DIR" "$TRACKING_DIR/charts.html" 2>/dev/null || true
|