agentgui 1.0.279 → 1.0.281

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/database.js +83 -80
  2. package/package.json +1 -1
  3. package/server.js +74 -74
package/database.js CHANGED
@@ -1,28 +1,28 @@
1
- import fs from 'fs';
2
- import path from 'path';
3
- import os from 'os';
4
- import { createRequire } from 'module';
5
-
6
- const require = createRequire(import.meta.url);
7
-
8
- function getDataDir() {
9
- if (process.env.PORTABLE_DATA_DIR) {
10
- return process.env.PORTABLE_DATA_DIR;
11
- }
12
- const exeDir = process.pkg?.path ? path.dirname(process.pkg.path) : null;
13
- if (exeDir) {
14
- return path.join(exeDir, 'data');
15
- }
16
- if (process.env.BUN_BE_BUN && process.argv[1]) {
17
- return path.join(path.dirname(process.argv[1]), 'data');
18
- }
19
- return path.join(os.homedir(), '.gmgui');
20
- }
21
-
22
- export const dataDir = getDataDir();
23
- const dbDir = dataDir;
24
- const dbFilePath = path.join(dbDir, 'data.db');
25
- const oldJsonPath = path.join(dbDir, 'data.json');
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import os from 'os';
4
+ import { createRequire } from 'module';
5
+
6
+ const require = createRequire(import.meta.url);
7
+
8
+ function getDataDir() {
9
+ if (process.env.PORTABLE_DATA_DIR) {
10
+ return process.env.PORTABLE_DATA_DIR;
11
+ }
12
+ const exeDir = process.pkg?.path ? path.dirname(process.pkg.path) : null;
13
+ if (exeDir) {
14
+ return path.join(exeDir, 'data');
15
+ }
16
+ if (process.env.BUN_BE_BUN && process.argv[1]) {
17
+ return path.join(path.dirname(process.argv[1]), 'data');
18
+ }
19
+ return path.join(os.homedir(), '.gmgui');
20
+ }
21
+
22
+ export const dataDir = getDataDir();
23
+ const dbDir = dataDir;
24
+ const dbFilePath = path.join(dbDir, 'data.db');
25
+ const oldJsonPath = path.join(dbDir, 'data.json');
26
26
 
27
27
  if (!fs.existsSync(dbDir)) {
28
28
  fs.mkdirSync(dbDir, { recursive: true });
@@ -326,6 +326,63 @@ try {
326
326
  console.error('[Migration] IPFS schema update warning:', err.message);
327
327
  }
328
328
 
329
+ // Migration: Backfill messages for conversations imported without message content
330
+ try {
331
+ const emptyImported = db.prepare(`
332
+ SELECT c.id, c.sourcePath FROM conversations c
333
+ LEFT JOIN messages m ON c.id = m.conversationId
334
+ WHERE c.sourcePath IS NOT NULL AND c.status != 'deleted'
335
+ GROUP BY c.id HAVING COUNT(m.id) = 0
336
+ `).all();
337
+
338
+ if (emptyImported.length > 0) {
339
+ console.log(`[Migration] Backfilling messages for ${emptyImported.length} imported conversation(s)`);
340
+ const insertMsg = db.prepare(`INSERT OR IGNORE INTO messages (id, conversationId, role, content, created_at) VALUES (?, ?, ?, ?, ?)`);
341
+ const backfill = db.transaction(() => {
342
+ for (const conv of emptyImported) {
343
+ if (!fs.existsSync(conv.sourcePath)) continue;
344
+ try {
345
+ const lines = fs.readFileSync(conv.sourcePath, 'utf-8').split('\n');
346
+ let count = 0;
347
+ for (const line of lines) {
348
+ if (!line.trim()) continue;
349
+ try {
350
+ const obj = JSON.parse(line);
351
+ const msgId = obj.uuid || `msg-${Date.now()}-${Math.random().toString(36).substr(2,9)}`;
352
+ const ts = obj.timestamp ? new Date(obj.timestamp).getTime() : Date.now();
353
+ if (obj.type === 'user' && obj.message?.content) {
354
+ const raw = obj.message.content;
355
+ const text = typeof raw === 'string' ? raw
356
+ : Array.isArray(raw) ? raw.filter(c => c.type === 'text').map(c => c.text).join('\n')
357
+ : JSON.stringify(raw);
358
+ if (text && !text.startsWith('[{"tool_use_id"')) {
359
+ insertMsg.run(msgId, conv.id, 'user', text, ts);
360
+ count++;
361
+ }
362
+ } else if (obj.type === 'assistant' && obj.message?.content) {
363
+ const raw = obj.message.content;
364
+ const text = Array.isArray(raw)
365
+ ? raw.filter(c => c.type === 'text' && c.text).map(c => c.text).join('\n\n')
366
+ : typeof raw === 'string' ? raw : '';
367
+ if (text) {
368
+ insertMsg.run(msgId, conv.id, 'assistant', text, ts);
369
+ count++;
370
+ }
371
+ }
372
+ } catch (_) {}
373
+ }
374
+ if (count > 0) console.log(`[Migration] Backfilled ${count} messages for conversation ${conv.id}`);
375
+ } catch (e) {
376
+ console.error(`[Migration] Error backfilling ${conv.id}:`, e.message);
377
+ }
378
+ }
379
+ });
380
+ backfill();
381
+ }
382
+ } catch (err) {
383
+ console.error('[Migration] Backfill error:', err.message);
384
+ }
385
+
329
386
  // Register official IPFS CIDs for voice models
330
387
  try {
331
388
  const LIGHTHOUSE_GATEWAY = 'https://gateway.lighthouse.storage/ipfs';
@@ -942,7 +999,7 @@ export const queries = {
942
999
 
943
1000
  for (const conv of discovered) {
944
1001
  try {
945
- const existingConv = prep('SELECT id, status FROM conversations WHERE id = ?').get(conv.id);
1002
+ const existingConv = prep('SELECT id, status FROM conversations WHERE id = ? OR externalId = ?').get(conv.id, conv.id);
946
1003
  if (existingConv) {
947
1004
  imported.push({ id: conv.id, status: 'skipped', reason: existingConv.status === 'deleted' ? 'deleted' : 'exists' });
948
1005
  continue;
@@ -1078,60 +1135,6 @@ export const queries = {
1078
1135
  return stmt.all('imported', 'deleted');
1079
1136
  },
1080
1137
 
1081
- importClaudeCodeConversations() {
1082
- const projectsDir = path.join(os.homedir(), '.claude', 'projects');
1083
- if (!fs.existsSync(projectsDir)) return [];
1084
-
1085
- const imported = [];
1086
- const projects = fs.readdirSync(projectsDir);
1087
-
1088
- for (const projectName of projects) {
1089
- const indexPath = path.join(projectsDir, projectName, 'sessions-index.json');
1090
- if (!fs.existsSync(indexPath)) continue;
1091
-
1092
- try {
1093
- const index = JSON.parse(fs.readFileSync(indexPath, 'utf-8'));
1094
- const entries = index.entries || [];
1095
-
1096
- for (const entry of entries) {
1097
- try {
1098
- const existing = this.getConversationByExternalId('claude-code', entry.sessionId);
1099
- if (existing) {
1100
- imported.push({ status: 'skipped', id: existing.id });
1101
- continue;
1102
- }
1103
-
1104
- this.createImportedConversation({
1105
- externalId: entry.sessionId,
1106
- agentType: 'claude-code',
1107
- title: entry.summary || entry.firstPrompt || `Conversation ${entry.sessionId.slice(0, 8)}`,
1108
- firstPrompt: entry.firstPrompt,
1109
- messageCount: entry.messageCount || 0,
1110
- created: new Date(entry.created).getTime(),
1111
- modified: new Date(entry.modified).getTime(),
1112
- projectPath: entry.projectPath,
1113
- gitBranch: entry.gitBranch,
1114
- sourcePath: entry.fullPath,
1115
- source: 'imported'
1116
- });
1117
-
1118
- imported.push({
1119
- status: 'imported',
1120
- id: entry.sessionId,
1121
- title: entry.summary || entry.firstPrompt
1122
- });
1123
- } catch (err) {
1124
- console.error(`[DB] Error importing session ${entry.sessionId}:`, err.message);
1125
- }
1126
- }
1127
- } catch (err) {
1128
- console.error(`[DB] Error reading ${indexPath}:`, err.message);
1129
- }
1130
- }
1131
-
1132
- return imported;
1133
- },
1134
-
1135
1138
  createChunk(sessionId, conversationId, sequence, type, data) {
1136
1139
  const id = generateId('chunk');
1137
1140
  const now = Date.now();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "agentgui",
3
- "version": "1.0.279",
3
+ "version": "1.0.281",
4
4
  "description": "Multi-agent ACP client with real-time communication",
5
5
  "type": "module",
6
6
  "main": "server.js",
package/server.js CHANGED
@@ -1,4 +1,5 @@
1
1
  import http from 'http';
2
+ import https from 'https';
2
3
  import fs from 'fs';
3
4
  import path from 'path';
4
5
  import os from 'os';
@@ -63,6 +64,45 @@ function broadcastModelProgress(progress) {
63
64
  broadcastSync(broadcastData);
64
65
  }
65
66
 
67
+ const LIGHTHOUSE_STT_BASE = 'https://gateway.lighthouse.storage/ipfs/bafybeidyw252ecy4vs46bbmezrtw325gl2ymdltosmzqgx4edjsc3fbofy/stt/onnx-community/whisper-base/';
68
+ const LIGHTHOUSE_STT_ONNX_BASE = 'https://gateway.lighthouse.storage/ipfs/bafybeidyw252ecy4vs46bbmezrtw325gl2ymdltosmzqgx4edjsc3fbofy/stt/onnx-community/whisper-base/onnx/';
69
+ const LIGHTHOUSE_TTS_BASE = 'https://gateway.lighthouse.storage/ipfs/bafybeidyw252ecy4vs46bbmezrtw325gl2ymdltosmzqgx4edjsc3fbofy/tts/';
70
+
71
+ const STT_BASE_FILES = ['config.json', 'preprocessor_config.json', 'tokenizer.json', 'tokenizer_config.json', 'vocab.json', 'merges.txt', 'model_quantized.onnx'];
72
+ const STT_ONNX_FILES = ['encoder_model.onnx', 'decoder_model_merged_q4.onnx', 'decoder_model_merged.onnx'];
73
+ const TTS_FILES = ['mimi_encoder.onnx', 'text_conditioner.onnx', 'flow_lm_main_int8.onnx', 'flow_lm_flow_int8.onnx', 'mimi_decoder_int8.onnx', 'tokenizer.model'];
74
+
75
+ function lighthouseDownload(url, dest, retries = 3, attempt = 1) {
76
+ return new Promise((resolve, reject) => {
77
+ const req = https.get(url, { timeout: 120000 }, (res) => {
78
+ if ([301, 302, 307, 308].includes(res.statusCode)) {
79
+ return lighthouseDownload(res.headers.location, dest, retries, attempt).then(resolve).catch(reject);
80
+ }
81
+ if (res.statusCode !== 200) {
82
+ res.resume();
83
+ const err = new Error(`Lighthouse HTTP ${res.statusCode} for ${url}`);
84
+ if (attempt < retries) return setTimeout(() => lighthouseDownload(url, dest, retries, attempt + 1).then(resolve).catch(reject), 2000 * attempt);
85
+ return reject(err);
86
+ }
87
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
88
+ const file = fs.createWriteStream(dest);
89
+ res.pipe(file);
90
+ file.on('finish', () => { file.close(); resolve(); });
91
+ file.on('error', (e) => { try { fs.unlinkSync(dest); } catch (_) {} reject(e); });
92
+ res.on('error', (e) => { try { fs.unlinkSync(dest); } catch (_) {} reject(e); });
93
+ });
94
+ req.on('timeout', () => {
95
+ req.destroy();
96
+ if (attempt < retries) return setTimeout(() => lighthouseDownload(url, dest, retries, attempt + 1).then(resolve).catch(reject), 2000 * attempt);
97
+ reject(new Error(`Lighthouse timeout for ${url}`));
98
+ });
99
+ req.on('error', (e) => {
100
+ if (attempt < retries) return setTimeout(() => lighthouseDownload(url, dest, retries, attempt + 1).then(resolve).catch(reject), 2000 * attempt);
101
+ reject(e);
102
+ });
103
+ });
104
+ }
105
+
66
106
  async function ensureModelsDownloaded() {
67
107
  if (modelDownloadState.downloading) {
68
108
  while (modelDownloadState.downloading) {
@@ -76,8 +116,10 @@ async function ensureModelsDownloaded() {
76
116
  const sttDir = path.join(gmguiModels, 'onnx-community', 'whisper-base');
77
117
  const ttsDir = path.join(gmguiModels, 'tts');
78
118
 
79
- const sttOk = fs.existsSync(sttDir) && fs.readdirSync(sttDir).length > 0;
80
- const ttsOk = fs.existsSync(ttsDir) && fs.readdirSync(ttsDir).length > 0;
119
+ const sttOnnxDir = path.join(sttDir, 'onnx');
120
+ const sttOk = STT_BASE_FILES.every(f => fs.existsSync(path.join(sttDir, f))) &&
121
+ STT_ONNX_FILES.some(f => fs.existsSync(path.join(sttOnnxDir, f)));
122
+ const ttsOk = TTS_FILES.every(f => fs.existsSync(path.join(ttsDir, f)));
81
123
 
82
124
  if (sttOk && ttsOk) {
83
125
  console.log('[MODELS] All model files present');
@@ -88,96 +130,54 @@ async function ensureModelsDownloaded() {
88
130
  modelDownloadState.downloading = true;
89
131
  modelDownloadState.error = null;
90
132
 
91
- const totalFiles = 16;
133
+ const totalFiles = STT_BASE_FILES.length + STT_ONNX_FILES.length + TTS_FILES.length;
92
134
  let completedFiles = 0;
93
135
 
94
- const require = createRequire(import.meta.url);
95
-
96
136
  if (!sttOk) {
97
- broadcastModelProgress({ started: true, done: false, downloading: true, type: 'stt', source: 'ipfs', completedFiles, totalFiles });
98
- let sttDownloaded = false;
99
-
100
- const LIGHTHOUSE_STT_CID = 'bafybeidyw252ecy4vs46bbmezrtw325gl2ymdltosmzqgx4edjsc3fbofy';
101
- const lighthouseSttBase = `https://gateway.lighthouse.storage/ipfs/${LIGHTHOUSE_STT_CID}/stt/onnx-community/whisper-base/`;
102
- const whisperModels = require('webtalk/whisper-models');
103
- console.log('[MODELS] Downloading STT from Lighthouse IPFS:', LIGHTHOUSE_STT_CID);
137
+ broadcastModelProgress({ started: true, done: false, downloading: true, type: 'stt', source: 'lighthouse', completedFiles, totalFiles });
138
+ console.log('[MODELS] Downloading STT from Lighthouse IPFS...');
104
139
  fs.mkdirSync(sttDir, { recursive: true });
105
- const WHISPER_FILES = [
106
- 'config.json', 'preprocessor_config.json', 'tokenizer.json',
107
- 'tokenizer_config.json', 'vocab.json', 'merges.txt',
108
- 'model_quantized.onnx', 'onnx/encoder_model.onnx',
109
- 'onnx/decoder_model_merged_q4.onnx', 'onnx/decoder_model_merged.onnx'
110
- ];
140
+ fs.mkdirSync(sttOnnxDir, { recursive: true });
111
141
  try {
112
- for (const file of WHISPER_FILES) {
142
+ for (const file of STT_BASE_FILES) {
113
143
  const dest = path.join(sttDir, file);
114
- if (!fs.existsSync(dest)) {
115
- fs.mkdirSync(path.dirname(dest), { recursive: true });
116
- await whisperModels.downloadFile(lighthouseSttBase + file, dest, 3);
117
- }
144
+ if (!fs.existsSync(dest)) await lighthouseDownload(LIGHTHOUSE_STT_BASE + file, dest);
145
+ completedFiles++;
146
+ broadcastModelProgress({ started: true, done: false, downloading: true, type: 'stt', source: 'lighthouse', completedFiles, totalFiles });
147
+ }
148
+ for (const file of STT_ONNX_FILES) {
149
+ const dest = path.join(sttOnnxDir, file);
150
+ if (!fs.existsSync(dest)) await lighthouseDownload(LIGHTHOUSE_STT_ONNX_BASE + file, dest);
151
+ completedFiles++;
152
+ broadcastModelProgress({ started: true, done: false, downloading: true, type: 'stt', source: 'lighthouse', completedFiles, totalFiles });
118
153
  }
119
154
  console.log('[MODELS] STT model downloaded from Lighthouse IPFS');
120
- sttDownloaded = true;
121
155
  } catch (err) {
122
- console.error('[MODELS] IPFS STT download failed:', err.message, '- falling back to HuggingFace');
156
+ const msg = `STT download failed from Lighthouse IPFS: ${err.message}. Check your internet connection and try again.`;
157
+ console.error('[MODELS]', msg);
158
+ broadcastModelProgress({ done: true, error: msg, type: 'stt', completedFiles, totalFiles });
159
+ throw new Error(msg);
123
160
  }
124
-
125
- if (!sttDownloaded) {
126
- console.log('[MODELS] Downloading STT model via HuggingFace...');
127
- broadcastModelProgress({ started: true, done: false, downloading: true, type: 'stt', source: 'huggingface', completedFiles, totalFiles });
128
- try {
129
- const modelsDir = path.join(dataDir, 'models');
130
- fs.mkdirSync(modelsDir, { recursive: true });
131
- await whisperModels.ensureModel('onnx-community/whisper-base', {
132
- modelsDir,
133
- whisperBaseUrl: 'https://huggingface.co/',
134
- });
135
- console.log('[MODELS] STT model downloaded from HuggingFace');
136
- } catch (hfErr) {
137
- console.error('[MODELS] HuggingFace STT download failed:', hfErr.message);
138
- broadcastModelProgress({ done: true, error: `STT download failed: ${hfErr.message}`, type: 'stt', completedFiles, totalFiles });
139
- }
140
- }
141
- completedFiles += 10;
142
161
  }
143
162
 
144
163
  if (!ttsOk) {
145
- broadcastModelProgress({ started: true, done: false, downloading: true, type: 'tts', source: 'ipfs', completedFiles, totalFiles });
146
- let ttsDownloaded = false;
147
-
148
- const LIGHTHOUSE_TTS_CID = 'bafybeidyw252ecy4vs46bbmezrtw325gl2ymdltosmzqgx4edjsc3fbofy';
149
- const lighthouseTtsBase = `https://gateway.lighthouse.storage/ipfs/${LIGHTHOUSE_TTS_CID}/tts/`;
150
- const ttsModels = require('webtalk/tts-models');
151
- console.log('[MODELS] Downloading TTS from Lighthouse IPFS:', LIGHTHOUSE_TTS_CID);
164
+ broadcastModelProgress({ started: true, done: false, downloading: true, type: 'tts', source: 'lighthouse', completedFiles, totalFiles });
165
+ console.log('[MODELS] Downloading TTS from Lighthouse IPFS...');
152
166
  fs.mkdirSync(ttsDir, { recursive: true });
153
167
  try {
154
- await ttsModels.ensureTTSModels({
155
- ttsModelsDir: ttsDir,
156
- ttsDir: path.join(dataDir, 'models', 'tts'),
157
- ttsBaseUrl: lighthouseTtsBase,
158
- });
168
+ for (const file of TTS_FILES) {
169
+ const dest = path.join(ttsDir, file);
170
+ if (!fs.existsSync(dest)) await lighthouseDownload(LIGHTHOUSE_TTS_BASE + file, dest);
171
+ completedFiles++;
172
+ broadcastModelProgress({ started: true, done: false, downloading: true, type: 'tts', source: 'lighthouse', completedFiles, totalFiles });
173
+ }
159
174
  console.log('[MODELS] TTS models downloaded from Lighthouse IPFS');
160
- ttsDownloaded = true;
161
175
  } catch (err) {
162
- console.error('[MODELS] IPFS TTS download failed:', err.message, '- falling back to HuggingFace');
163
- }
164
-
165
- if (!ttsDownloaded) {
166
- console.log('[MODELS] Downloading TTS models via HuggingFace...');
167
- broadcastModelProgress({ started: true, done: false, downloading: true, type: 'tts', source: 'huggingface', completedFiles, totalFiles });
168
- try {
169
- await ttsModels.ensureTTSModels({
170
- ttsModelsDir: ttsDir,
171
- ttsDir: path.join(dataDir, 'models', 'tts'),
172
- ttsBaseUrl: 'https://huggingface.co/datasets/AnEntrypoint/sttttsmodels/resolve/main/tts/',
173
- });
174
- console.log('[MODELS] TTS models downloaded from HuggingFace');
175
- } catch (hfErr) {
176
- console.error('[MODELS] HuggingFace TTS download failed:', hfErr.message);
177
- broadcastModelProgress({ done: true, error: `TTS download failed: ${hfErr.message}`, type: 'tts', completedFiles, totalFiles });
178
- }
176
+ const msg = `TTS download failed from Lighthouse IPFS: ${err.message}. Check your internet connection and try again.`;
177
+ console.error('[MODELS]', msg);
178
+ broadcastModelProgress({ done: true, error: msg, type: 'tts', completedFiles, totalFiles });
179
+ throw new Error(msg);
179
180
  }
180
- completedFiles += 6;
181
181
  }
182
182
 
183
183
  modelDownloadState.complete = true;