agentgui 1.0.831 → 1.0.833

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,4 +1,13 @@
1
+ ## [Unreleased]
2
+
3
+ ### Refactor
4
+ - Split database.js (651L) into database.js (81L) + database-schema.js (176L) + database-migrations.js (150L) + database-migrations-acp.js (134L); all files ≤200L; no circular imports; migration functions receive db as parameter
5
+ - Split claude-runner.js (1267L) into claude-runner.js (56L, AgentRunner class+helpers), claude-runner-direct.js (117L, runDirect method), claude-runner-acp.js (156L, runACP+_runACPOnce methods), claude-runner-agents.js (105L, AgentRegistry+registrations using acp-protocol.js), claude-runner-run.js (50L, runClaudeWithStreaming export); server.js updated to import from claude-runner-run.js
6
+
1
7
  ## 2026-04-12
8
+ - refactor: reduce speech-manager.js from 207L to 200L by removing excess blank lines
9
+ - refactor: split ws-handlers-session.js (209L) into ws-handlers-session.js (94L) + ws-handlers-session2.js (106L); agent.auth/authstat/update handlers moved to session2; server.js imports and calls both
10
+ - refactor: split ws-handlers-conv.js (283L) into ws-handlers-conv.js (139L, conv.ls/tags/new/get/upd/del/del.all/full/chunks/chunks.earlier/reorder) and ws-handlers-conv2.js (169L, conv.export/import/sync/search/prune/cancel/inject/steer); removed 2 comment lines; server.js imports and calls both
2
11
  - refactor: split client.js (3212L) into 22 files ≤200L each; all 122 methods preserved including 2 extracted helpers (_setupUIButtonEvents, _setupUIWindowEvents, _loadConvRender); index.html updated with correct load order
3
12
  - refactor: split streaming-renderer.js (2193L) into 15 files ≤200L each; all 75 prototype methods and 8 static methods preserved; index.html updated with correct load order
4
13
 
@@ -0,0 +1,133 @@
1
+ import fs from 'fs';
2
+
3
+ export function migrateACPSchema(db) {
4
+ try {
5
+ console.log('[Migration] Running ACP schema migration...');
6
+ const convColsACP = db.prepare("PRAGMA table_info(conversations)").all().map(c => c.name);
7
+ if (!convColsACP.includes('metadata')) {
8
+ db.exec('ALTER TABLE conversations ADD COLUMN metadata TEXT DEFAULT "{}"');
9
+ console.log('[Migration] Added metadata column to conversations');
10
+ }
11
+ const sessColsACP = db.prepare("PRAGMA table_info(sessions)").all().map(c => c.name);
12
+ for (const [col] of [['run_id'],['input'],['config'],['interrupt'],['claudeSessionId']]) {
13
+ if (!sessColsACP.includes(col)) {
14
+ db.exec(`ALTER TABLE sessions ADD COLUMN ${col} TEXT`);
15
+ console.log(`[Migration] Added ${col} column to sessions`);
16
+ }
17
+ }
18
+ db.exec(`
19
+ CREATE TABLE IF NOT EXISTS thread_states (
20
+ id TEXT PRIMARY KEY, thread_id TEXT NOT NULL,
21
+ checkpoint_id TEXT NOT NULL, state_data TEXT NOT NULL, created_at INTEGER NOT NULL,
22
+ FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
23
+ );
24
+ CREATE INDEX IF NOT EXISTS idx_thread_states_thread ON thread_states(thread_id);
25
+ CREATE INDEX IF NOT EXISTS idx_thread_states_checkpoint ON thread_states(checkpoint_id);
26
+ CREATE INDEX IF NOT EXISTS idx_thread_states_created ON thread_states(created_at);
27
+ CREATE TABLE IF NOT EXISTS checkpoints (
28
+ id TEXT PRIMARY KEY, thread_id TEXT NOT NULL,
29
+ checkpoint_name TEXT, sequence INTEGER NOT NULL, created_at INTEGER NOT NULL,
30
+ FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
31
+ );
32
+ CREATE INDEX IF NOT EXISTS idx_checkpoints_thread ON checkpoints(thread_id);
33
+ CREATE INDEX IF NOT EXISTS idx_checkpoints_sequence ON checkpoints(thread_id, sequence);
34
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_checkpoints_unique ON checkpoints(thread_id, sequence);
35
+ CREATE TABLE IF NOT EXISTS run_metadata (
36
+ run_id TEXT PRIMARY KEY, thread_id TEXT, agent_id TEXT NOT NULL,
37
+ status TEXT NOT NULL, input TEXT, config TEXT, webhook_url TEXT,
38
+ created_at INTEGER NOT NULL, updated_at INTEGER NOT NULL,
39
+ FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE,
40
+ FOREIGN KEY (run_id) REFERENCES sessions(id) ON DELETE CASCADE
41
+ );
42
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_thread ON run_metadata(thread_id);
43
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_agent ON run_metadata(agent_id);
44
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_status ON run_metadata(status);
45
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_created ON run_metadata(created_at);
46
+ `);
47
+ console.log('[Migration] ACP schema migration complete');
48
+ } catch (err) {
49
+ console.error('[Migration] ACP schema migration error:', err.message);
50
+ }
51
+ }
52
+
53
+ export function migrateBackfillMessages(db) {
54
+ try {
55
+ const emptyImported = db.prepare(`
56
+ SELECT c.id, c.sourcePath FROM conversations c
57
+ LEFT JOIN messages m ON c.id = m.conversationId
58
+ WHERE c.sourcePath IS NOT NULL AND c.status != 'deleted'
59
+ GROUP BY c.id HAVING COUNT(m.id) = 0
60
+ `).all();
61
+ if (emptyImported.length === 0) return;
62
+ console.log(`[Migration] Backfilling messages for ${emptyImported.length} imported conversation(s)`);
63
+ const insertMsg = db.prepare(`INSERT OR IGNORE INTO messages (id, conversationId, role, content, created_at) VALUES (?, ?, ?, ?, ?)`);
64
+ const backfill = db.transaction(() => {
65
+ for (const conv of emptyImported) {
66
+ if (!fs.existsSync(conv.sourcePath)) continue;
67
+ try {
68
+ const lines = fs.readFileSync(conv.sourcePath, 'utf-8').split('\n');
69
+ let count = 0;
70
+ for (const line of lines) {
71
+ if (!line.trim()) continue;
72
+ try {
73
+ const obj = JSON.parse(line);
74
+ const msgId = obj.uuid || `msg-${Date.now()}-${Math.random().toString(36).substr(2,9)}`;
75
+ const ts = obj.timestamp ? new Date(obj.timestamp).getTime() : Date.now();
76
+ if (obj.type === 'user' && obj.message?.content) {
77
+ const raw = obj.message.content;
78
+ const text = typeof raw === 'string' ? raw
79
+ : Array.isArray(raw) ? raw.filter(c => c.type === 'text').map(c => c.text).join('\n')
80
+ : JSON.stringify(raw);
81
+ if (text && !text.startsWith('[{"tool_use_id"')) { insertMsg.run(msgId, conv.id, 'user', text, ts); count++; }
82
+ } else if (obj.type === 'assistant' && obj.message?.content) {
83
+ const raw = obj.message.content;
84
+ const text = Array.isArray(raw)
85
+ ? raw.filter(c => c.type === 'text' && c.text).map(c => c.text).join('\n\n')
86
+ : typeof raw === 'string' ? raw : '';
87
+ if (text) { insertMsg.run(msgId, conv.id, 'assistant', text, ts); count++; }
88
+ }
89
+ } catch (_) {}
90
+ }
91
+ if (count > 0) console.log(`[Migration] Backfilled ${count} messages for conversation ${conv.id}`);
92
+ } catch (e) {
93
+ console.error(`[Migration] Error backfilling ${conv.id}:`, e.message);
94
+ }
95
+ }
96
+ });
97
+ backfill();
98
+ } catch (err) {
99
+ console.error('[Migration] Backfill error:', err.message);
100
+ }
101
+ }
102
+
103
+ export function migrateFTS(db) {
104
+ try {
105
+ const hasFts = db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='messages_fts'").get();
106
+ if (!hasFts) {
107
+ db.exec("CREATE VIRTUAL TABLE messages_fts USING fts5(content, conversationId UNINDEXED, role UNINDEXED, content_rowid='rowid')");
108
+ const msgs = db.prepare("SELECT rowid, content, conversationId, role FROM messages").all();
109
+ if (msgs.length > 0) {
110
+ const ins = db.prepare("INSERT INTO messages_fts(rowid, content, conversationId, role) VALUES (?, ?, ?, ?)");
111
+ const tx = db.transaction(() => { for (const m of msgs) ins.run(m.rowid, m.content, m.conversationId, m.role); });
112
+ tx();
113
+ console.log(`[Migration] FTS5 index created with ${msgs.length} messages`);
114
+ }
115
+ }
116
+ } catch (err) {
117
+ console.error('[Migration] FTS5 error:', err.message);
118
+ }
119
+ }
120
+
121
+ export function migrateAutoVacuum(db) {
122
+ try {
123
+ const autoVacuum = db.prepare('PRAGMA auto_vacuum').get();
124
+ const mode = autoVacuum?.auto_vacuum ?? autoVacuum;
125
+ if (mode !== 2) {
126
+ console.log('[Migration] Enabling incremental auto_vacuum (one-time VACUUM)...');
127
+ db.exec('PRAGMA auto_vacuum = INCREMENTAL');
128
+ console.log('[Migration] auto_vacuum = INCREMENTAL enabled (VACUUM skipped)');
129
+ }
130
+ } catch (err) {
131
+ console.error('[Migration] auto_vacuum setup error:', err.message);
132
+ }
133
+ }
@@ -0,0 +1,149 @@
1
+ import fs from 'fs';
2
+
3
+ export function migrateFromJson(db, oldJsonPath) {
4
+ if (!fs.existsSync(oldJsonPath)) return;
5
+ try {
6
+ const content = fs.readFileSync(oldJsonPath, 'utf-8');
7
+ const data = JSON.parse(content);
8
+ const migrationStmt = db.transaction(() => {
9
+ if (data.conversations) {
10
+ for (const id in data.conversations) {
11
+ const conv = data.conversations[id];
12
+ db.prepare(
13
+ `INSERT OR REPLACE INTO conversations (id, agentId, title, created_at, updated_at, status) VALUES (?, ?, ?, ?, ?, ?)`
14
+ ).run(conv.id, conv.agentId, conv.title || null, conv.created_at, conv.updated_at, conv.status || 'active');
15
+ }
16
+ }
17
+ if (data.messages) {
18
+ for (const id in data.messages) {
19
+ const msg = data.messages[id];
20
+ const contentStr = typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content);
21
+ db.prepare(
22
+ `INSERT OR REPLACE INTO messages (id, conversationId, role, content, created_at) VALUES (?, ?, ?, ?, ?)`
23
+ ).run(msg.id, msg.conversationId, msg.role, contentStr, msg.created_at);
24
+ }
25
+ }
26
+ if (data.sessions) {
27
+ for (const id in data.sessions) {
28
+ const sess = data.sessions[id];
29
+ const responseStr = sess.response ? (typeof sess.response === 'string' ? sess.response : JSON.stringify(sess.response)) : null;
30
+ const errorStr = sess.error ? (typeof sess.error === 'string' ? sess.error : JSON.stringify(sess.error)) : null;
31
+ db.prepare(
32
+ `INSERT OR REPLACE INTO sessions (id, conversationId, status, started_at, completed_at, response, error) VALUES (?, ?, ?, ?, ?, ?, ?)`
33
+ ).run(sess.id, sess.conversationId, sess.status, sess.started_at, sess.completed_at || null, responseStr, errorStr);
34
+ }
35
+ }
36
+ if (data.events) {
37
+ for (const id in data.events) {
38
+ const evt = data.events[id];
39
+ const dataStr = typeof evt.data === 'string' ? evt.data : JSON.stringify(evt.data || {});
40
+ db.prepare(
41
+ `INSERT OR REPLACE INTO events (id, type, conversationId, sessionId, data, created_at) VALUES (?, ?, ?, ?, ?, ?)`
42
+ ).run(evt.id, evt.type, evt.conversationId || null, evt.sessionId || null, dataStr, evt.created_at);
43
+ }
44
+ }
45
+ if (data.idempotencyKeys) {
46
+ for (const key in data.idempotencyKeys) {
47
+ const entry = data.idempotencyKeys[key];
48
+ const valueStr = typeof entry.value === 'string' ? entry.value : JSON.stringify(entry.value || {});
49
+ const ttl = typeof entry.ttl === 'number' ? entry.ttl : (entry.ttl ? parseInt(entry.ttl) : 0);
50
+ db.prepare(
51
+ `INSERT OR REPLACE INTO idempotencyKeys (key, value, created_at, ttl) VALUES (?, ?, ?, ?)`
52
+ ).run(key, valueStr, entry.created_at, ttl);
53
+ }
54
+ }
55
+ });
56
+ migrationStmt();
57
+ fs.renameSync(oldJsonPath, `${oldJsonPath}.migrated`);
58
+ console.log('Migrated data from JSON to SQLite');
59
+ } catch (e) {
60
+ console.error('Error during migration:', e.message);
61
+ }
62
+ }
63
+
64
+ export function migrateToACP(db) {
65
+ try {
66
+ const migrate = db.transaction(() => {
67
+ db.exec(`
68
+ CREATE TABLE IF NOT EXISTS thread_states (
69
+ id TEXT PRIMARY KEY, thread_id TEXT NOT NULL, checkpoint_id TEXT,
70
+ state_data TEXT NOT NULL, created_at INTEGER NOT NULL,
71
+ FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE,
72
+ FOREIGN KEY (checkpoint_id) REFERENCES checkpoints(id) ON DELETE SET NULL
73
+ )
74
+ `);
75
+ db.exec(`
76
+ CREATE TABLE IF NOT EXISTS checkpoints (
77
+ id TEXT PRIMARY KEY, thread_id TEXT NOT NULL, checkpoint_name TEXT NOT NULL,
78
+ sequence INTEGER NOT NULL, created_at INTEGER NOT NULL,
79
+ FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
80
+ )
81
+ `);
82
+ db.exec(`
83
+ CREATE TABLE IF NOT EXISTS run_metadata (
84
+ id TEXT PRIMARY KEY, run_id TEXT NOT NULL UNIQUE, thread_id TEXT,
85
+ agent_id TEXT NOT NULL, status TEXT NOT NULL DEFAULT 'pending',
86
+ input TEXT, config TEXT, webhook_url TEXT,
87
+ created_at INTEGER NOT NULL, updated_at INTEGER NOT NULL,
88
+ FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
89
+ )
90
+ `);
91
+ const convColNames = db.prepare("PRAGMA table_info(conversations)").all().map(c => c.name);
92
+ if (!convColNames.includes('metadata')) db.exec('ALTER TABLE conversations ADD COLUMN metadata TEXT');
93
+ const sessColNames = db.prepare("PRAGMA table_info(sessions)").all().map(c => c.name);
94
+ for (const [colName, colType] of Object.entries({ run_id: 'TEXT', input: 'TEXT', config: 'TEXT', interrupt: 'TEXT' })) {
95
+ if (!sessColNames.includes(colName)) db.exec(`ALTER TABLE sessions ADD COLUMN ${colName} ${colType}`);
96
+ }
97
+ db.exec(`
98
+ CREATE INDEX IF NOT EXISTS idx_thread_states_thread ON thread_states(thread_id);
99
+ CREATE INDEX IF NOT EXISTS idx_thread_states_checkpoint ON thread_states(checkpoint_id);
100
+ CREATE INDEX IF NOT EXISTS idx_thread_states_created ON thread_states(created_at);
101
+ CREATE INDEX IF NOT EXISTS idx_checkpoints_thread ON checkpoints(thread_id);
102
+ CREATE INDEX IF NOT EXISTS idx_checkpoints_sequence ON checkpoints(thread_id, sequence);
103
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_checkpoints_unique_seq ON checkpoints(thread_id, sequence);
104
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_run_id ON run_metadata(run_id);
105
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_thread ON run_metadata(thread_id);
106
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_status ON run_metadata(status);
107
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_agent ON run_metadata(agent_id);
108
+ CREATE INDEX IF NOT EXISTS idx_run_metadata_created ON run_metadata(created_at);
109
+ CREATE INDEX IF NOT EXISTS idx_sessions_run_id ON sessions(run_id);
110
+ `);
111
+ });
112
+ migrate();
113
+ } catch (err) {
114
+ console.error('[Migration] ACP schema migration error:', err.message);
115
+ }
116
+ }
117
+
118
+ export function migrateConversationColumns(db) {
119
+ try {
120
+ const columnNames = db.prepare("PRAGMA table_info(conversations)").all().map(r => r.name);
121
+ const requiredColumns = {
122
+ agentType: 'TEXT', source: 'TEXT DEFAULT "gui"', externalId: 'TEXT',
123
+ firstPrompt: 'TEXT', messageCount: 'INTEGER DEFAULT 0', projectPath: 'TEXT',
124
+ gitBranch: 'TEXT', sourcePath: 'TEXT', lastSyncedAt: 'INTEGER',
125
+ workingDirectory: 'TEXT', claudeSessionId: 'TEXT', isStreaming: 'INTEGER DEFAULT 0',
126
+ model: 'TEXT', subAgent: 'TEXT', pinned: 'INTEGER DEFAULT 0',
127
+ tags: 'TEXT', sortOrder: 'INTEGER DEFAULT 0'
128
+ };
129
+ let addedColumns = false;
130
+ for (const [colName, colDef] of Object.entries(requiredColumns)) {
131
+ if (!columnNames.includes(colName)) {
132
+ db.exec(`ALTER TABLE conversations ADD COLUMN ${colName} ${colDef}`);
133
+ console.log(`[Migration] Added column ${colName} to conversations table`);
134
+ addedColumns = true;
135
+ }
136
+ }
137
+ if (addedColumns) {
138
+ try {
139
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_conversations_external ON conversations(externalId)`);
140
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_conversations_agent_type ON conversations(agentType)`);
141
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_conversations_source ON conversations(source)`);
142
+ } catch (e) {
143
+ console.warn('[Migration] Index creation warning:', e.message);
144
+ }
145
+ }
146
+ } catch (err) {
147
+ console.error('[Migration] Error:', err.message);
148
+ }
149
+ }
@@ -0,0 +1,175 @@
1
+ import fs from 'fs';
2
+
3
+ export function initSchema(db) {
4
+ db.exec(`
5
+ CREATE TABLE IF NOT EXISTS conversations (
6
+ id TEXT PRIMARY KEY,
7
+ agentId TEXT NOT NULL,
8
+ title TEXT,
9
+ created_at INTEGER NOT NULL,
10
+ updated_at INTEGER NOT NULL,
11
+ status TEXT DEFAULT 'active'
12
+ );
13
+
14
+ CREATE INDEX IF NOT EXISTS idx_conversations_agent ON conversations(agentId);
15
+ CREATE INDEX IF NOT EXISTS idx_conversations_updated ON conversations(updated_at DESC);
16
+
17
+ CREATE TABLE IF NOT EXISTS messages (
18
+ id TEXT PRIMARY KEY,
19
+ conversationId TEXT NOT NULL,
20
+ role TEXT NOT NULL,
21
+ content TEXT NOT NULL,
22
+ created_at INTEGER NOT NULL,
23
+ FOREIGN KEY (conversationId) REFERENCES conversations(id)
24
+ );
25
+
26
+ CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(conversationId);
27
+
28
+ CREATE TABLE IF NOT EXISTS sessions (
29
+ id TEXT PRIMARY KEY,
30
+ conversationId TEXT NOT NULL,
31
+ status TEXT NOT NULL,
32
+ started_at INTEGER NOT NULL,
33
+ completed_at INTEGER,
34
+ response TEXT,
35
+ error TEXT,
36
+ FOREIGN KEY (conversationId) REFERENCES conversations(id)
37
+ );
38
+
39
+ CREATE INDEX IF NOT EXISTS idx_sessions_conversation ON sessions(conversationId);
40
+ CREATE INDEX IF NOT EXISTS idx_sessions_status ON sessions(conversationId, status);
41
+
42
+ CREATE TABLE IF NOT EXISTS events (
43
+ id TEXT PRIMARY KEY,
44
+ type TEXT NOT NULL,
45
+ conversationId TEXT,
46
+ sessionId TEXT,
47
+ data TEXT NOT NULL,
48
+ created_at INTEGER NOT NULL,
49
+ FOREIGN KEY (conversationId) REFERENCES conversations(id),
50
+ FOREIGN KEY (sessionId) REFERENCES sessions(id)
51
+ );
52
+
53
+ CREATE INDEX IF NOT EXISTS idx_events_conversation ON events(conversationId);
54
+
55
+ CREATE TABLE IF NOT EXISTS idempotencyKeys (
56
+ key TEXT PRIMARY KEY,
57
+ value TEXT NOT NULL,
58
+ created_at INTEGER NOT NULL,
59
+ ttl INTEGER NOT NULL
60
+ );
61
+
62
+ CREATE INDEX IF NOT EXISTS idx_idempotency_created ON idempotencyKeys(created_at);
63
+
64
+ CREATE TABLE IF NOT EXISTS stream_updates (
65
+ id TEXT PRIMARY KEY,
66
+ sessionId TEXT NOT NULL,
67
+ conversationId TEXT NOT NULL,
68
+ updateType TEXT NOT NULL,
69
+ content TEXT NOT NULL,
70
+ sequence INTEGER NOT NULL,
71
+ created_at INTEGER NOT NULL,
72
+ FOREIGN KEY (sessionId) REFERENCES sessions(id),
73
+ FOREIGN KEY (conversationId) REFERENCES conversations(id)
74
+ );
75
+
76
+ CREATE INDEX IF NOT EXISTS idx_stream_updates_session ON stream_updates(sessionId);
77
+ CREATE INDEX IF NOT EXISTS idx_stream_updates_created ON stream_updates(created_at);
78
+
79
+ CREATE TABLE IF NOT EXISTS chunks (
80
+ id TEXT PRIMARY KEY,
81
+ sessionId TEXT NOT NULL,
82
+ conversationId TEXT NOT NULL,
83
+ sequence INTEGER NOT NULL,
84
+ type TEXT NOT NULL,
85
+ data BLOB NOT NULL,
86
+ created_at INTEGER NOT NULL,
87
+ FOREIGN KEY (sessionId) REFERENCES sessions(id),
88
+ FOREIGN KEY (conversationId) REFERENCES conversations(id)
89
+ );
90
+
91
+ CREATE INDEX IF NOT EXISTS idx_chunks_session ON chunks(sessionId, sequence);
92
+ CREATE INDEX IF NOT EXISTS idx_chunks_conversation ON chunks(conversationId, sequence);
93
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_chunks_unique ON chunks(sessionId, sequence);
94
+ CREATE INDEX IF NOT EXISTS idx_chunks_conv_created ON chunks(conversationId, created_at);
95
+ CREATE INDEX IF NOT EXISTS idx_chunks_sess_created ON chunks(sessionId, created_at);
96
+
97
+ CREATE TABLE IF NOT EXISTS voice_cache (
98
+ id TEXT PRIMARY KEY,
99
+ conversationId TEXT NOT NULL,
100
+ text TEXT NOT NULL,
101
+ audioBlob BLOB,
102
+ byteSize INTEGER NOT NULL,
103
+ created_at INTEGER NOT NULL,
104
+ expires_at INTEGER NOT NULL,
105
+ FOREIGN KEY (conversationId) REFERENCES conversations(id)
106
+ );
107
+
108
+ CREATE INDEX IF NOT EXISTS idx_voice_cache_conv ON voice_cache(conversationId);
109
+ CREATE INDEX IF NOT EXISTS idx_voice_cache_expires ON voice_cache(expires_at);
110
+
111
+ CREATE TABLE IF NOT EXISTS tool_installations (
112
+ id TEXT PRIMARY KEY,
113
+ tool_id TEXT NOT NULL UNIQUE,
114
+ version TEXT,
115
+ installed_at INTEGER,
116
+ status TEXT NOT NULL DEFAULT 'not_installed',
117
+ last_check_at INTEGER,
118
+ error_message TEXT,
119
+ update_available INTEGER DEFAULT 0,
120
+ latest_version TEXT,
121
+ created_at INTEGER NOT NULL,
122
+ updated_at INTEGER NOT NULL
123
+ );
124
+
125
+ CREATE INDEX IF NOT EXISTS idx_tool_installations_status ON tool_installations(status);
126
+ CREATE INDEX IF NOT EXISTS idx_tool_installations_last_check ON tool_installations(last_check_at);
127
+
128
+ CREATE TABLE IF NOT EXISTS tool_install_history (
129
+ id TEXT PRIMARY KEY,
130
+ tool_id TEXT NOT NULL,
131
+ action TEXT NOT NULL,
132
+ started_at INTEGER NOT NULL,
133
+ completed_at INTEGER,
134
+ status TEXT NOT NULL,
135
+ error_message TEXT,
136
+ created_at INTEGER NOT NULL,
137
+ FOREIGN KEY (tool_id) REFERENCES tool_installations(tool_id)
138
+ );
139
+
140
+ CREATE INDEX IF NOT EXISTS idx_tool_install_history_tool ON tool_install_history(tool_id);
141
+ CREATE INDEX IF NOT EXISTS idx_tool_install_history_completed ON tool_install_history(completed_at);
142
+
143
+ CREATE TABLE IF NOT EXISTS workflow_runs (
144
+ id TEXT PRIMARY KEY,
145
+ workflowName TEXT NOT NULL,
146
+ workflowId TEXT,
147
+ runId TEXT,
148
+ sha TEXT,
149
+ branch TEXT,
150
+ status TEXT,
151
+ conclusion TEXT,
152
+ htmlUrl TEXT,
153
+ triggeredAt INTEGER NOT NULL,
154
+ completedAt INTEGER,
155
+ created_at INTEGER NOT NULL
156
+ );
157
+
158
+ CREATE INDEX IF NOT EXISTS idx_workflow_runs_name ON workflow_runs(workflowName);
159
+ CREATE INDEX IF NOT EXISTS idx_workflow_runs_sha ON workflow_runs(sha);
160
+ CREATE INDEX IF NOT EXISTS idx_workflow_runs_completed ON workflow_runs(completedAt);
161
+
162
+ CREATE TABLE IF NOT EXISTS oauth_tokens (
163
+ id TEXT PRIMARY KEY,
164
+ provider TEXT NOT NULL,
165
+ token TEXT NOT NULL,
166
+ email TEXT,
167
+ expires_at INTEGER,
168
+ created_at INTEGER NOT NULL,
169
+ updated_at INTEGER NOT NULL
170
+ );
171
+
172
+ CREATE INDEX IF NOT EXISTS idx_oauth_tokens_provider ON oauth_tokens(provider);
173
+
174
+ `);
175
+ }