@aladac/hu 0.1.0-a1 → 0.1.0-a2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +54 -29
- package/HOOKS.md +146 -0
- package/commands/reinstall.md +6 -3
- package/hooks/session-start.sh +85 -0
- package/hooks/stop.sh +51 -0
- package/hooks/user-prompt-submit.sh +74 -0
- package/package.json +5 -2
- package/plans/gleaming-crunching-bear.md +179 -0
- package/src/commands/data.ts +877 -0
- package/src/commands/plugin.ts +216 -0
- package/src/index.ts +5 -1
- package/src/lib/claude-paths.ts +136 -0
- package/src/lib/config.ts +244 -0
- package/src/lib/db.ts +59 -0
- package/src/lib/hook-io.ts +128 -0
- package/src/lib/jsonl.ts +95 -0
- package/src/lib/schema.ts +164 -0
- package/src/lib/sync.ts +300 -0
- package/tests/lib/claude-paths.test.ts +73 -0
- package/tests/lib/config.test.ts +163 -0
- package/tests/lib/db.test.ts +230 -0
- package/tests/lib/escaping.test.ts +257 -0
- package/tests/lib/hook-io.test.ts +151 -0
- package/tests/lib/jsonl.test.ts +166 -0
- package/HOOKS-DATA-INTEGRATION.md +0 -457
- package/SAMPLE.md +0 -378
- package/TODO.md +0 -25
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Hook I/O utilities for safe data exchange with Claude Code hooks
|
|
3
|
+
* Uses temp files to avoid shell escaping issues
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import * as fs from 'node:fs';
|
|
7
|
+
import * as path from 'node:path';
|
|
8
|
+
import * as crypto from 'node:crypto';
|
|
9
|
+
import { getConfig } from './config.ts';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Generate a unique temp file path for hook data
|
|
13
|
+
*/
|
|
14
|
+
function getTempPath(prefix: string = 'hu-hook'): string {
|
|
15
|
+
const config = getConfig();
|
|
16
|
+
const uuid = crypto.randomUUID();
|
|
17
|
+
return path.join(config.hooks.temp_dir, `${prefix}-${uuid}.json`);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Write data to a temp file as JSON
|
|
22
|
+
* Returns the path to the created file
|
|
23
|
+
*/
|
|
24
|
+
export function writeTempJson(data: unknown, prefix?: string): string {
|
|
25
|
+
const filePath = getTempPath(prefix);
|
|
26
|
+
const content = JSON.stringify(data, null, 2);
|
|
27
|
+
fs.writeFileSync(filePath, content, 'utf-8');
|
|
28
|
+
return filePath;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Read and parse JSON from a temp file
|
|
33
|
+
*/
|
|
34
|
+
export function readTempJson<T = unknown>(filePath: string): T {
|
|
35
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
36
|
+
return JSON.parse(content) as T;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Delete a temp file
|
|
41
|
+
*/
|
|
42
|
+
export function deleteTempFile(filePath: string): void {
|
|
43
|
+
try {
|
|
44
|
+
fs.unlinkSync(filePath);
|
|
45
|
+
} catch {
|
|
46
|
+
// Ignore if file doesn't exist
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Clean up old temp files based on TTL from config
|
|
52
|
+
*/
|
|
53
|
+
export function cleanupTempFiles(): number {
|
|
54
|
+
const config = getConfig();
|
|
55
|
+
const tempDir = config.hooks.temp_dir;
|
|
56
|
+
const ttlMs = config.hooks.temp_file_ttl * 1000;
|
|
57
|
+
const cutoff = Date.now() - ttlMs;
|
|
58
|
+
|
|
59
|
+
if (!fs.existsSync(tempDir)) {
|
|
60
|
+
return 0;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
let cleaned = 0;
|
|
64
|
+
const files = fs.readdirSync(tempDir);
|
|
65
|
+
|
|
66
|
+
for (const file of files) {
|
|
67
|
+
if (!file.startsWith('hu-hook-') || !file.endsWith('.json')) {
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const filePath = path.join(tempDir, file);
|
|
72
|
+
|
|
73
|
+
try {
|
|
74
|
+
const stat = fs.statSync(filePath);
|
|
75
|
+
if (stat.mtimeMs < cutoff) {
|
|
76
|
+
fs.unlinkSync(filePath);
|
|
77
|
+
cleaned++;
|
|
78
|
+
}
|
|
79
|
+
} catch {
|
|
80
|
+
// Ignore errors for individual files
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return cleaned;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Read hook input from stdin and write to temp file
|
|
89
|
+
* Returns the path to the temp file
|
|
90
|
+
*/
|
|
91
|
+
export async function readStdinToTemp(): Promise<string> {
|
|
92
|
+
return new Promise((resolve, reject) => {
|
|
93
|
+
let data = '';
|
|
94
|
+
|
|
95
|
+
process.stdin.setEncoding('utf-8');
|
|
96
|
+
process.stdin.on('data', (chunk) => {
|
|
97
|
+
data += chunk;
|
|
98
|
+
});
|
|
99
|
+
process.stdin.on('end', () => {
|
|
100
|
+
try {
|
|
101
|
+
const filePath = writeTempJson(JSON.parse(data), 'hu-stdin');
|
|
102
|
+
resolve(filePath);
|
|
103
|
+
} catch (err) {
|
|
104
|
+
reject(err);
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
process.stdin.on('error', reject);
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Write hook response to stdout
|
|
113
|
+
*/
|
|
114
|
+
export function writeHookResponse(response: {
|
|
115
|
+
additionalContext?: string;
|
|
116
|
+
userMessage?: string;
|
|
117
|
+
continueSession?: boolean;
|
|
118
|
+
}): void {
|
|
119
|
+
console.log(JSON.stringify(response));
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Check if hooks are enabled in config
|
|
124
|
+
*/
|
|
125
|
+
export function hooksEnabled(): boolean {
|
|
126
|
+
const config = getConfig();
|
|
127
|
+
return config.hooks.enabled;
|
|
128
|
+
}
|
package/src/lib/jsonl.ts
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* JSONL parser for Claude Code data files
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import * as fs from 'node:fs';
|
|
6
|
+
import * as readline from 'node:readline';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Parse a JSONL file line by line (generator)
|
|
10
|
+
* Skips malformed lines with a warning
|
|
11
|
+
*/
|
|
12
|
+
export async function* parseJsonl<T = unknown>(filePath: string): AsyncGenerator<T> {
|
|
13
|
+
if (!fs.existsSync(filePath)) {
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const fileStream = fs.createReadStream(filePath, { encoding: 'utf-8' });
|
|
18
|
+
const rl = readline.createInterface({
|
|
19
|
+
input: fileStream,
|
|
20
|
+
crlfDelay: Infinity,
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
let lineNum = 0;
|
|
24
|
+
for await (const line of rl) {
|
|
25
|
+
lineNum++;
|
|
26
|
+
const trimmed = line.trim();
|
|
27
|
+
if (!trimmed) {
|
|
28
|
+
continue; // Skip empty lines
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
try {
|
|
32
|
+
yield JSON.parse(trimmed) as T;
|
|
33
|
+
} catch (error) {
|
|
34
|
+
// Skip malformed lines, log warning
|
|
35
|
+
console.warn(`Warning: Malformed JSON at ${filePath}:${lineNum}`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Read entire JSONL file into array
|
|
42
|
+
* Skips malformed lines with a warning
|
|
43
|
+
*/
|
|
44
|
+
export async function readJsonlFile<T = unknown>(filePath: string): Promise<T[]> {
|
|
45
|
+
const results: T[] = [];
|
|
46
|
+
for await (const item of parseJsonl<T>(filePath)) {
|
|
47
|
+
results.push(item);
|
|
48
|
+
}
|
|
49
|
+
return results;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Synchronous JSONL parser for smaller files
|
|
54
|
+
* Skips malformed lines with a warning
|
|
55
|
+
*/
|
|
56
|
+
export function readJsonlFileSync<T = unknown>(filePath: string): T[] {
|
|
57
|
+
if (!fs.existsSync(filePath)) {
|
|
58
|
+
return [];
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
62
|
+
const lines = content.split('\n');
|
|
63
|
+
const results: T[] = [];
|
|
64
|
+
|
|
65
|
+
for (let i = 0; i < lines.length; i++) {
|
|
66
|
+
const line = lines[i].trim();
|
|
67
|
+
if (!line) {
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
try {
|
|
72
|
+
results.push(JSON.parse(line) as T);
|
|
73
|
+
} catch (error) {
|
|
74
|
+
console.warn(`Warning: Malformed JSON at ${filePath}:${i + 1}`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
return results;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Append a single item to a JSONL file
|
|
83
|
+
*/
|
|
84
|
+
export function appendToJsonl<T>(filePath: string, item: T): void {
|
|
85
|
+
const line = JSON.stringify(item) + '\n';
|
|
86
|
+
fs.appendFileSync(filePath, line, 'utf-8');
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Write array to JSONL file
|
|
91
|
+
*/
|
|
92
|
+
export function writeJsonlFile<T>(filePath: string, items: T[]): void {
|
|
93
|
+
const content = items.map(item => JSON.stringify(item)).join('\n') + '\n';
|
|
94
|
+
fs.writeFileSync(filePath, content, 'utf-8');
|
|
95
|
+
}
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database schema and migrations for hu
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import type { Database as DatabaseType } from 'better-sqlite3';
|
|
6
|
+
|
|
7
|
+
interface Migration {
|
|
8
|
+
version: number;
|
|
9
|
+
up: (db: DatabaseType) => void;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const migrations: Migration[] = [
|
|
13
|
+
{
|
|
14
|
+
version: 1,
|
|
15
|
+
up: (db) => {
|
|
16
|
+
// Schema version tracking
|
|
17
|
+
db.exec(`
|
|
18
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
19
|
+
version INTEGER PRIMARY KEY,
|
|
20
|
+
applied_at INTEGER NOT NULL
|
|
21
|
+
);
|
|
22
|
+
`);
|
|
23
|
+
|
|
24
|
+
// Sessions index
|
|
25
|
+
db.exec(`
|
|
26
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
27
|
+
id TEXT PRIMARY KEY,
|
|
28
|
+
project TEXT NOT NULL,
|
|
29
|
+
display TEXT,
|
|
30
|
+
started_at INTEGER NOT NULL,
|
|
31
|
+
message_count INTEGER DEFAULT 0,
|
|
32
|
+
total_cost_usd REAL DEFAULT 0
|
|
33
|
+
);
|
|
34
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project);
|
|
35
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_started_at ON sessions(started_at);
|
|
36
|
+
`);
|
|
37
|
+
|
|
38
|
+
// Messages with threading
|
|
39
|
+
db.exec(`
|
|
40
|
+
CREATE TABLE IF NOT EXISTS messages (
|
|
41
|
+
id TEXT PRIMARY KEY,
|
|
42
|
+
session_id TEXT NOT NULL,
|
|
43
|
+
parent_id TEXT,
|
|
44
|
+
role TEXT NOT NULL,
|
|
45
|
+
content TEXT,
|
|
46
|
+
model TEXT,
|
|
47
|
+
input_tokens INTEGER,
|
|
48
|
+
output_tokens INTEGER,
|
|
49
|
+
cost_usd REAL,
|
|
50
|
+
duration_ms INTEGER,
|
|
51
|
+
created_at INTEGER NOT NULL,
|
|
52
|
+
FOREIGN KEY (session_id) REFERENCES sessions(id)
|
|
53
|
+
);
|
|
54
|
+
CREATE INDEX IF NOT EXISTS idx_messages_session ON messages(session_id);
|
|
55
|
+
CREATE INDEX IF NOT EXISTS idx_messages_created ON messages(created_at);
|
|
56
|
+
`);
|
|
57
|
+
|
|
58
|
+
// Full-text search on message content
|
|
59
|
+
db.exec(`
|
|
60
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS messages_fts USING fts5(
|
|
61
|
+
content,
|
|
62
|
+
content=messages,
|
|
63
|
+
content_rowid=rowid
|
|
64
|
+
);
|
|
65
|
+
`);
|
|
66
|
+
|
|
67
|
+
// Triggers to keep FTS in sync
|
|
68
|
+
db.exec(`
|
|
69
|
+
CREATE TRIGGER IF NOT EXISTS messages_ai AFTER INSERT ON messages BEGIN
|
|
70
|
+
INSERT INTO messages_fts(rowid, content) VALUES (NEW.rowid, NEW.content);
|
|
71
|
+
END;
|
|
72
|
+
|
|
73
|
+
CREATE TRIGGER IF NOT EXISTS messages_ad AFTER DELETE ON messages BEGIN
|
|
74
|
+
INSERT INTO messages_fts(messages_fts, rowid, content) VALUES('delete', OLD.rowid, OLD.content);
|
|
75
|
+
END;
|
|
76
|
+
|
|
77
|
+
CREATE TRIGGER IF NOT EXISTS messages_au AFTER UPDATE ON messages BEGIN
|
|
78
|
+
INSERT INTO messages_fts(messages_fts, rowid, content) VALUES('delete', OLD.rowid, OLD.content);
|
|
79
|
+
INSERT INTO messages_fts(rowid, content) VALUES (NEW.rowid, NEW.content);
|
|
80
|
+
END;
|
|
81
|
+
`);
|
|
82
|
+
|
|
83
|
+
// Todos with status
|
|
84
|
+
db.exec(`
|
|
85
|
+
CREATE TABLE IF NOT EXISTS todos (
|
|
86
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
87
|
+
session_id TEXT NOT NULL,
|
|
88
|
+
content TEXT NOT NULL,
|
|
89
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
90
|
+
active_form TEXT,
|
|
91
|
+
FOREIGN KEY (session_id) REFERENCES sessions(id)
|
|
92
|
+
);
|
|
93
|
+
CREATE INDEX IF NOT EXISTS idx_todos_session ON todos(session_id);
|
|
94
|
+
CREATE INDEX IF NOT EXISTS idx_todos_status ON todos(status);
|
|
95
|
+
`);
|
|
96
|
+
|
|
97
|
+
// Tool usage tracking
|
|
98
|
+
db.exec(`
|
|
99
|
+
CREATE TABLE IF NOT EXISTS tool_usage (
|
|
100
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
101
|
+
session_id TEXT NOT NULL,
|
|
102
|
+
message_id TEXT,
|
|
103
|
+
tool_name TEXT NOT NULL,
|
|
104
|
+
input_json TEXT,
|
|
105
|
+
output_json TEXT,
|
|
106
|
+
duration_ms INTEGER,
|
|
107
|
+
created_at INTEGER NOT NULL,
|
|
108
|
+
FOREIGN KEY (session_id) REFERENCES sessions(id)
|
|
109
|
+
);
|
|
110
|
+
CREATE INDEX IF NOT EXISTS idx_tool_usage_session ON tool_usage(session_id);
|
|
111
|
+
CREATE INDEX IF NOT EXISTS idx_tool_usage_tool ON tool_usage(tool_name);
|
|
112
|
+
`);
|
|
113
|
+
|
|
114
|
+
// Sync tracking
|
|
115
|
+
db.exec(`
|
|
116
|
+
CREATE TABLE IF NOT EXISTS sync_state (
|
|
117
|
+
source TEXT PRIMARY KEY,
|
|
118
|
+
last_sync_at INTEGER NOT NULL,
|
|
119
|
+
last_modified_at INTEGER,
|
|
120
|
+
checksum TEXT
|
|
121
|
+
);
|
|
122
|
+
`);
|
|
123
|
+
},
|
|
124
|
+
},
|
|
125
|
+
];
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Get current schema version
|
|
129
|
+
*/
|
|
130
|
+
export function getSchemaVersion(db: DatabaseType): number {
|
|
131
|
+
try {
|
|
132
|
+
const row = db.prepare('SELECT MAX(version) as version FROM schema_version').get() as { version: number | null } | undefined;
|
|
133
|
+
return row?.version ?? 0;
|
|
134
|
+
} catch {
|
|
135
|
+
// Table doesn't exist yet
|
|
136
|
+
return 0;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Run pending migrations
|
|
142
|
+
*/
|
|
143
|
+
export function runMigrations(db: DatabaseType): void {
|
|
144
|
+
const currentVersion = getSchemaVersion(db);
|
|
145
|
+
|
|
146
|
+
for (const migration of migrations) {
|
|
147
|
+
if (migration.version > currentVersion) {
|
|
148
|
+
db.transaction(() => {
|
|
149
|
+
migration.up(db);
|
|
150
|
+
db.prepare('INSERT INTO schema_version (version, applied_at) VALUES (?, ?)').run(
|
|
151
|
+
migration.version,
|
|
152
|
+
Date.now()
|
|
153
|
+
);
|
|
154
|
+
})();
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Initialize database with schema
|
|
161
|
+
*/
|
|
162
|
+
export function initializeSchema(db: DatabaseType): void {
|
|
163
|
+
runMigrations(db);
|
|
164
|
+
}
|
package/src/lib/sync.ts
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync Claude Code data files to SQLite database
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import type { Database as DatabaseType } from 'better-sqlite3';
|
|
6
|
+
import * as fs from 'node:fs';
|
|
7
|
+
import * as path from 'node:path';
|
|
8
|
+
import {
|
|
9
|
+
getHistoryPath,
|
|
10
|
+
getProjectsDir,
|
|
11
|
+
getTodosDir,
|
|
12
|
+
listProjects,
|
|
13
|
+
listSessionsInProject,
|
|
14
|
+
decodeProjectPath,
|
|
15
|
+
} from './claude-paths.ts';
|
|
16
|
+
import { readJsonlFileSync } from './jsonl.ts';
|
|
17
|
+
import { getConfig } from './config.ts';
|
|
18
|
+
|
|
19
|
+
// Types for Claude Code data structures
|
|
20
|
+
interface HistoryEntry {
|
|
21
|
+
display: string;
|
|
22
|
+
timestamp: number;
|
|
23
|
+
project: string;
|
|
24
|
+
sessionId: string;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
interface MessageEntry {
|
|
28
|
+
uuid: string;
|
|
29
|
+
parentUuid?: string;
|
|
30
|
+
sessionId?: string;
|
|
31
|
+
type: 'user' | 'assistant' | 'summary' | 'file-history-snapshot' | 'queue-operation';
|
|
32
|
+
timestamp?: string;
|
|
33
|
+
message?: {
|
|
34
|
+
role: string;
|
|
35
|
+
content: string | Array<{ type: string; [key: string]: unknown }>;
|
|
36
|
+
model?: string;
|
|
37
|
+
usage?: {
|
|
38
|
+
input_tokens?: number;
|
|
39
|
+
output_tokens?: number;
|
|
40
|
+
};
|
|
41
|
+
};
|
|
42
|
+
costUSD?: number;
|
|
43
|
+
durationMs?: number;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
interface TodoEntry {
|
|
47
|
+
content: string;
|
|
48
|
+
status: 'pending' | 'in_progress' | 'completed';
|
|
49
|
+
activeForm?: string;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Get last sync time for a source
|
|
54
|
+
*/
|
|
55
|
+
export function getLastSyncTime(db: DatabaseType, source: string): number {
|
|
56
|
+
const row = db.prepare('SELECT last_sync_at FROM sync_state WHERE source = ?').get(source) as { last_sync_at: number } | undefined;
|
|
57
|
+
return row?.last_sync_at ?? 0;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Update sync state for a source
|
|
62
|
+
*/
|
|
63
|
+
export function updateSyncState(db: DatabaseType, source: string, lastModified?: number): void {
|
|
64
|
+
db.prepare(`
|
|
65
|
+
INSERT OR REPLACE INTO sync_state (source, last_sync_at, last_modified_at)
|
|
66
|
+
VALUES (?, ?, ?)
|
|
67
|
+
`).run(source, Date.now(), lastModified ?? null);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Check if auto-sync is needed based on config interval
|
|
72
|
+
*/
|
|
73
|
+
export function needsSync(db: DatabaseType, source: string): boolean {
|
|
74
|
+
const config = getConfig();
|
|
75
|
+
const interval = config.sync.auto_sync_interval * 1000; // Convert to ms
|
|
76
|
+
|
|
77
|
+
if (interval === 0) {
|
|
78
|
+
return false; // Manual sync only
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const lastSync = getLastSyncTime(db, source);
|
|
82
|
+
return Date.now() - lastSync > interval;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Sync history.jsonl to sessions table
|
|
87
|
+
*/
|
|
88
|
+
export function syncHistory(db: DatabaseType): number {
|
|
89
|
+
const historyPath = getHistoryPath();
|
|
90
|
+
|
|
91
|
+
if (!fs.existsSync(historyPath)) {
|
|
92
|
+
return 0;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const entries = readJsonlFileSync<HistoryEntry>(historyPath);
|
|
96
|
+
let count = 0;
|
|
97
|
+
|
|
98
|
+
const insertSession = db.prepare(`
|
|
99
|
+
INSERT OR IGNORE INTO sessions (id, project, display, started_at)
|
|
100
|
+
VALUES (?, ?, ?, ?)
|
|
101
|
+
`);
|
|
102
|
+
|
|
103
|
+
const transaction = db.transaction(() => {
|
|
104
|
+
for (const entry of entries) {
|
|
105
|
+
const result = insertSession.run(
|
|
106
|
+
entry.sessionId,
|
|
107
|
+
entry.project,
|
|
108
|
+
entry.display,
|
|
109
|
+
entry.timestamp
|
|
110
|
+
);
|
|
111
|
+
if (result.changes > 0) {
|
|
112
|
+
count++;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
transaction();
|
|
118
|
+
updateSyncState(db, 'history');
|
|
119
|
+
|
|
120
|
+
return count;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* Sync session transcripts from projects directory
|
|
125
|
+
*/
|
|
126
|
+
export function syncSessions(db: DatabaseType): number {
|
|
127
|
+
const projects = listProjects();
|
|
128
|
+
let totalMessages = 0;
|
|
129
|
+
|
|
130
|
+
const insertMessage = db.prepare(`
|
|
131
|
+
INSERT OR IGNORE INTO messages (
|
|
132
|
+
id, session_id, parent_id, role, content, model,
|
|
133
|
+
input_tokens, output_tokens, cost_usd, duration_ms, created_at
|
|
134
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
135
|
+
`);
|
|
136
|
+
|
|
137
|
+
const updateSessionStats = db.prepare(`
|
|
138
|
+
UPDATE sessions SET
|
|
139
|
+
message_count = (SELECT COUNT(*) FROM messages WHERE session_id = ?),
|
|
140
|
+
total_cost_usd = (SELECT COALESCE(SUM(cost_usd), 0) FROM messages WHERE session_id = ?)
|
|
141
|
+
WHERE id = ?
|
|
142
|
+
`);
|
|
143
|
+
|
|
144
|
+
const insertToolUsage = db.prepare(`
|
|
145
|
+
INSERT OR IGNORE INTO tool_usage (
|
|
146
|
+
session_id, message_id, tool_name, input_json, created_at
|
|
147
|
+
) VALUES (?, ?, ?, ?, ?)
|
|
148
|
+
`);
|
|
149
|
+
|
|
150
|
+
for (const project of projects) {
|
|
151
|
+
const sessionIds = listSessionsInProject(project.dir);
|
|
152
|
+
|
|
153
|
+
for (const sessionId of sessionIds) {
|
|
154
|
+
const sessionPath = path.join(project.dir, `${sessionId}.jsonl`);
|
|
155
|
+
const messages = readJsonlFileSync<MessageEntry>(sessionPath);
|
|
156
|
+
|
|
157
|
+
const transaction = db.transaction(() => {
|
|
158
|
+
// Ensure session exists
|
|
159
|
+
db.prepare(`
|
|
160
|
+
INSERT OR IGNORE INTO sessions (id, project, started_at)
|
|
161
|
+
VALUES (?, ?, ?)
|
|
162
|
+
`).run(sessionId, project.path, Date.now());
|
|
163
|
+
|
|
164
|
+
for (const msg of messages) {
|
|
165
|
+
// Skip entries without message property (summaries, file-history-snapshot, queue-operation)
|
|
166
|
+
if (!msg.message || !msg.timestamp) {
|
|
167
|
+
continue;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// Extract content as string
|
|
171
|
+
let content: string;
|
|
172
|
+
if (typeof msg.message.content === 'string') {
|
|
173
|
+
content = msg.message.content;
|
|
174
|
+
} else {
|
|
175
|
+
content = JSON.stringify(msg.message.content);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Use the session ID from the file name, not from the message
|
|
179
|
+
// (msg.sessionId might reference a different session that doesn't exist)
|
|
180
|
+
const effectiveSessionId = sessionId;
|
|
181
|
+
|
|
182
|
+
const result = insertMessage.run(
|
|
183
|
+
msg.uuid,
|
|
184
|
+
effectiveSessionId,
|
|
185
|
+
msg.parentUuid || null,
|
|
186
|
+
msg.message.role,
|
|
187
|
+
content,
|
|
188
|
+
msg.message.model || null,
|
|
189
|
+
msg.message.usage?.input_tokens || null,
|
|
190
|
+
msg.message.usage?.output_tokens || null,
|
|
191
|
+
msg.costUSD || null,
|
|
192
|
+
msg.durationMs || null,
|
|
193
|
+
new Date(msg.timestamp).getTime()
|
|
194
|
+
);
|
|
195
|
+
|
|
196
|
+
if (result.changes > 0) {
|
|
197
|
+
totalMessages++;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Extract tool usage from assistant messages
|
|
201
|
+
if (msg.message.role === 'assistant' && Array.isArray(msg.message.content)) {
|
|
202
|
+
for (const block of msg.message.content) {
|
|
203
|
+
if (block.type === 'tool_use') {
|
|
204
|
+
insertToolUsage.run(
|
|
205
|
+
effectiveSessionId,
|
|
206
|
+
msg.uuid,
|
|
207
|
+
(block as { name?: string }).name || 'unknown',
|
|
208
|
+
JSON.stringify((block as { input?: unknown }).input || {}),
|
|
209
|
+
new Date(msg.timestamp).getTime()
|
|
210
|
+
);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// Update session stats
|
|
217
|
+
updateSessionStats.run(sessionId, sessionId, sessionId);
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
transaction();
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
updateSyncState(db, 'sessions');
|
|
225
|
+
return totalMessages;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Sync todos from todos directory
|
|
230
|
+
*/
|
|
231
|
+
export function syncTodos(db: DatabaseType): number {
|
|
232
|
+
const todosDir = getTodosDir();
|
|
233
|
+
|
|
234
|
+
if (!fs.existsSync(todosDir)) {
|
|
235
|
+
return 0;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
const todoFiles = fs.readdirSync(todosDir).filter(f => f.endsWith('.json'));
|
|
239
|
+
let count = 0;
|
|
240
|
+
|
|
241
|
+
const deleteTodos = db.prepare('DELETE FROM todos WHERE session_id = ?');
|
|
242
|
+
const insertTodo = db.prepare(`
|
|
243
|
+
INSERT INTO todos (session_id, content, status, active_form)
|
|
244
|
+
VALUES (?, ?, ?, ?)
|
|
245
|
+
`);
|
|
246
|
+
|
|
247
|
+
const transaction = db.transaction(() => {
|
|
248
|
+
for (const file of todoFiles) {
|
|
249
|
+
const sessionId = file.replace('.json', '');
|
|
250
|
+
const filePath = path.join(todosDir, file);
|
|
251
|
+
|
|
252
|
+
try {
|
|
253
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
254
|
+
const todos = JSON.parse(content) as TodoEntry[];
|
|
255
|
+
|
|
256
|
+
// Clear existing todos for this session
|
|
257
|
+
deleteTodos.run(sessionId);
|
|
258
|
+
|
|
259
|
+
for (const todo of todos) {
|
|
260
|
+
insertTodo.run(
|
|
261
|
+
sessionId,
|
|
262
|
+
todo.content,
|
|
263
|
+
todo.status,
|
|
264
|
+
todo.activeForm || null
|
|
265
|
+
);
|
|
266
|
+
count++;
|
|
267
|
+
}
|
|
268
|
+
} catch (error) {
|
|
269
|
+
console.warn(`Warning: Failed to parse todos file ${filePath}`);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
transaction();
|
|
275
|
+
updateSyncState(db, 'todos');
|
|
276
|
+
|
|
277
|
+
return count;
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
/**
|
|
281
|
+
* Run full sync of all data sources
|
|
282
|
+
*/
|
|
283
|
+
export function syncAll(db: DatabaseType): { history: number; messages: number; todos: number } {
|
|
284
|
+
const history = syncHistory(db);
|
|
285
|
+
const messages = syncSessions(db);
|
|
286
|
+
const todos = syncTodos(db);
|
|
287
|
+
|
|
288
|
+
return { history, messages, todos };
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* Run sync if needed (based on config interval)
|
|
293
|
+
*/
|
|
294
|
+
export function syncIfNeeded(db: DatabaseType): boolean {
|
|
295
|
+
if (needsSync(db, 'history') || needsSync(db, 'sessions') || needsSync(db, 'todos')) {
|
|
296
|
+
syncAll(db);
|
|
297
|
+
return true;
|
|
298
|
+
}
|
|
299
|
+
return false;
|
|
300
|
+
}
|