@uniqueli/openwork 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1989 @@
1
+ "use strict";
2
+ const electron = require("electron");
3
+ const path = require("path");
4
+ const messages = require("@langchain/core/messages");
5
+ const langgraph = require("@langchain/langgraph");
6
+ const deepagents = require("deepagents");
7
+ const Store = require("electron-store");
8
+ const fs$1 = require("fs/promises");
9
+ const fs = require("fs");
10
+ const os = require("os");
11
+ const anthropic = require("@langchain/anthropic");
12
+ const openai = require("@langchain/openai");
13
+ const googleGenai = require("@langchain/google-genai");
14
+ const initSqlJs = require("sql.js");
15
+ const langgraphCheckpoint = require("@langchain/langgraph-checkpoint");
16
+ const node_child_process = require("node:child_process");
17
+ const node_crypto = require("node:crypto");
18
+ const uuid = require("uuid");
19
+ function _interopNamespaceDefault(e) {
20
+ const n = Object.create(null, { [Symbol.toStringTag]: { value: "Module" } });
21
+ if (e) {
22
+ for (const k in e) {
23
+ if (k !== "default") {
24
+ const d = Object.getOwnPropertyDescriptor(e, k);
25
+ Object.defineProperty(n, k, d.get ? d : {
26
+ enumerable: true,
27
+ get: () => e[k]
28
+ });
29
+ }
30
+ }
31
+ }
32
+ n.default = e;
33
+ return Object.freeze(n);
34
+ }
35
+ const path__namespace = /* @__PURE__ */ _interopNamespaceDefault(path);
36
+ const fs__namespace$1 = /* @__PURE__ */ _interopNamespaceDefault(fs$1);
37
+ const fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs);
38
+ const activeWatchers = /* @__PURE__ */ new Map();
39
+ const debounceTimers = /* @__PURE__ */ new Map();
40
+ const DEBOUNCE_DELAY = 500;
41
+ function startWatching(threadId, workspacePath) {
42
+ stopWatching(threadId);
43
+ try {
44
+ const stat = fs__namespace.statSync(workspacePath);
45
+ if (!stat.isDirectory()) {
46
+ console.warn(`[WorkspaceWatcher] Path is not a directory: ${workspacePath}`);
47
+ return;
48
+ }
49
+ } catch (e) {
50
+ console.warn(`[WorkspaceWatcher] Cannot access path: ${workspacePath}`, e);
51
+ return;
52
+ }
53
+ try {
54
+ const watcher = fs__namespace.watch(workspacePath, { recursive: true }, (eventType, filename) => {
55
+ if (filename) {
56
+ const parts = filename.split(path__namespace.sep);
57
+ if (parts.some((p) => p.startsWith(".") || p === "node_modules")) {
58
+ return;
59
+ }
60
+ }
61
+ console.log(`[WorkspaceWatcher] ${eventType}: ${filename} in thread ${threadId}`);
62
+ const existingTimer = debounceTimers.get(threadId);
63
+ if (existingTimer) {
64
+ clearTimeout(existingTimer);
65
+ }
66
+ const timer = setTimeout(() => {
67
+ debounceTimers.delete(threadId);
68
+ notifyRenderer(threadId, workspacePath);
69
+ }, DEBOUNCE_DELAY);
70
+ debounceTimers.set(threadId, timer);
71
+ });
72
+ watcher.on("error", (error) => {
73
+ console.error(`[WorkspaceWatcher] Error watching ${workspacePath}:`, error);
74
+ stopWatching(threadId);
75
+ });
76
+ activeWatchers.set(threadId, watcher);
77
+ console.log(`[WorkspaceWatcher] Started watching ${workspacePath} for thread ${threadId}`);
78
+ } catch (e) {
79
+ console.error(`[WorkspaceWatcher] Failed to start watching ${workspacePath}:`, e);
80
+ }
81
+ }
82
+ function stopWatching(threadId) {
83
+ const watcher = activeWatchers.get(threadId);
84
+ if (watcher) {
85
+ watcher.close();
86
+ activeWatchers.delete(threadId);
87
+ console.log(`[WorkspaceWatcher] Stopped watching for thread ${threadId}`);
88
+ }
89
+ const timer = debounceTimers.get(threadId);
90
+ if (timer) {
91
+ clearTimeout(timer);
92
+ debounceTimers.delete(threadId);
93
+ }
94
+ }
95
+ function notifyRenderer(threadId, workspacePath) {
96
+ const windows = electron.BrowserWindow.getAllWindows();
97
+ for (const win of windows) {
98
+ win.webContents.send("workspace:files-changed", {
99
+ threadId,
100
+ workspacePath
101
+ });
102
+ }
103
+ }
104
+ const OPENWORK_DIR = path.join(os.homedir(), ".openwork");
105
+ const ENV_FILE = path.join(OPENWORK_DIR, ".env");
106
+ const ENV_VAR_NAMES = {
107
+ anthropic: "ANTHROPIC_API_KEY",
108
+ openai: "OPENAI_API_KEY",
109
+ google: "GOOGLE_API_KEY",
110
+ custom: "CUSTOM_API_KEY"
111
+ };
112
+ function getOpenworkDir() {
113
+ if (!fs.existsSync(OPENWORK_DIR)) {
114
+ fs.mkdirSync(OPENWORK_DIR, { recursive: true });
115
+ }
116
+ return OPENWORK_DIR;
117
+ }
118
+ function getDbPath() {
119
+ return path.join(getOpenworkDir(), "openwork.sqlite");
120
+ }
121
+ function getThreadCheckpointDir() {
122
+ const dir = path.join(getOpenworkDir(), "threads");
123
+ if (!fs.existsSync(dir)) {
124
+ fs.mkdirSync(dir, { recursive: true });
125
+ }
126
+ return dir;
127
+ }
128
+ function getThreadCheckpointPath(threadId) {
129
+ return path.join(getThreadCheckpointDir(), `${threadId}.sqlite`);
130
+ }
131
+ function deleteThreadCheckpoint(threadId) {
132
+ const path2 = getThreadCheckpointPath(threadId);
133
+ if (fs.existsSync(path2)) {
134
+ fs.unlinkSync(path2);
135
+ }
136
+ }
137
+ function getEnvFilePath() {
138
+ return ENV_FILE;
139
+ }
140
+ function parseEnvFile() {
141
+ const envPath = getEnvFilePath();
142
+ if (!fs.existsSync(envPath)) return {};
143
+ const content = fs.readFileSync(envPath, "utf-8");
144
+ const result = {};
145
+ for (const line of content.split("\n")) {
146
+ const trimmed = line.trim();
147
+ if (!trimmed || trimmed.startsWith("#")) continue;
148
+ const eqIndex = trimmed.indexOf("=");
149
+ if (eqIndex > 0) {
150
+ const key = trimmed.slice(0, eqIndex).trim();
151
+ const value = trimmed.slice(eqIndex + 1).trim();
152
+ result[key] = value;
153
+ }
154
+ }
155
+ return result;
156
+ }
157
+ function writeEnvFile(env) {
158
+ getOpenworkDir();
159
+ const lines = Object.entries(env).filter(([_, v]) => v).map(([k, v]) => `${k}=${v}`);
160
+ fs.writeFileSync(getEnvFilePath(), lines.join("\n") + "\n");
161
+ }
162
+ function getApiKey(provider) {
163
+ const envVarName = ENV_VAR_NAMES[provider];
164
+ if (!envVarName) return void 0;
165
+ const env = parseEnvFile();
166
+ if (env[envVarName]) return env[envVarName];
167
+ return process.env[envVarName];
168
+ }
169
+ function setApiKey(provider, apiKey) {
170
+ const envVarName = ENV_VAR_NAMES[provider];
171
+ if (!envVarName) return;
172
+ const env = parseEnvFile();
173
+ env[envVarName] = apiKey;
174
+ writeEnvFile(env);
175
+ process.env[envVarName] = apiKey;
176
+ }
177
+ function deleteApiKey(provider) {
178
+ const envVarName = ENV_VAR_NAMES[provider];
179
+ if (!envVarName) return;
180
+ const env = parseEnvFile();
181
+ delete env[envVarName];
182
+ writeEnvFile(env);
183
+ delete process.env[envVarName];
184
+ }
185
+ function hasApiKey(provider) {
186
+ return !!getApiKey(provider);
187
+ }
188
+ function getCustomApiConfig() {
189
+ const env = parseEnvFile();
190
+ const baseUrl = env.CUSTOM_BASE_URL;
191
+ const apiKey = env.CUSTOM_API_KEY;
192
+ const model = env.CUSTOM_MODEL;
193
+ if (!baseUrl || !apiKey) return void 0;
194
+ return { baseUrl, apiKey, model };
195
+ }
196
+ function setCustomApiConfig(config) {
197
+ const env = parseEnvFile();
198
+ env.CUSTOM_BASE_URL = config.baseUrl;
199
+ env.CUSTOM_API_KEY = config.apiKey;
200
+ if (config.model) {
201
+ env.CUSTOM_MODEL = config.model;
202
+ } else {
203
+ delete env.CUSTOM_MODEL;
204
+ }
205
+ writeEnvFile(env);
206
+ process.env.CUSTOM_BASE_URL = config.baseUrl;
207
+ process.env.CUSTOM_API_KEY = config.apiKey;
208
+ if (config.model) {
209
+ process.env.CUSTOM_MODEL = config.model;
210
+ }
211
+ }
212
+ function deleteCustomApiConfig() {
213
+ const env = parseEnvFile();
214
+ delete env.CUSTOM_BASE_URL;
215
+ delete env.CUSTOM_API_KEY;
216
+ delete env.CUSTOM_MODEL;
217
+ writeEnvFile(env);
218
+ delete process.env.CUSTOM_BASE_URL;
219
+ delete process.env.CUSTOM_API_KEY;
220
+ delete process.env.CUSTOM_MODEL;
221
+ }
222
+ function hasCustomApiConfig() {
223
+ return !!getCustomApiConfig();
224
+ }
225
+ const store = new Store({
226
+ name: "settings",
227
+ cwd: getOpenworkDir()
228
+ });
229
+ const PROVIDERS = [
230
+ { id: "anthropic", name: "Anthropic" },
231
+ { id: "openai", name: "OpenAI" },
232
+ { id: "google", name: "Google" },
233
+ { id: "custom", name: "Custom API" }
234
+ ];
235
+ const AVAILABLE_MODELS = [
236
+ // Anthropic Claude 4.5 series (latest as of Jan 2026)
237
+ {
238
+ id: "claude-opus-4-5-20251101",
239
+ name: "Claude Opus 4.5",
240
+ provider: "anthropic",
241
+ model: "claude-opus-4-5-20251101",
242
+ description: "Premium model with maximum intelligence",
243
+ available: true
244
+ },
245
+ {
246
+ id: "claude-sonnet-4-5-20250929",
247
+ name: "Claude Sonnet 4.5",
248
+ provider: "anthropic",
249
+ model: "claude-sonnet-4-5-20250929",
250
+ description: "Best balance of intelligence, speed, and cost for agents",
251
+ available: true
252
+ },
253
+ {
254
+ id: "claude-haiku-4-5-20251001",
255
+ name: "Claude Haiku 4.5",
256
+ provider: "anthropic",
257
+ model: "claude-haiku-4-5-20251001",
258
+ description: "Fastest model with near-frontier intelligence",
259
+ available: true
260
+ },
261
+ // Anthropic Claude legacy models
262
+ {
263
+ id: "claude-opus-4-1-20250805",
264
+ name: "Claude Opus 4.1",
265
+ provider: "anthropic",
266
+ model: "claude-opus-4-1-20250805",
267
+ description: "Previous generation premium model with extended thinking",
268
+ available: true
269
+ },
270
+ {
271
+ id: "claude-sonnet-4-20250514",
272
+ name: "Claude Sonnet 4",
273
+ provider: "anthropic",
274
+ model: "claude-sonnet-4-20250514",
275
+ description: "Fast and capable previous generation model",
276
+ available: true
277
+ },
278
+ // OpenAI GPT-5 series (latest as of Jan 2026)
279
+ {
280
+ id: "gpt-5.2",
281
+ name: "GPT-5.2",
282
+ provider: "openai",
283
+ model: "gpt-5.2",
284
+ description: "Latest flagship with enhanced coding and agentic capabilities",
285
+ available: true
286
+ },
287
+ {
288
+ id: "gpt-5.1",
289
+ name: "GPT-5.1",
290
+ provider: "openai",
291
+ model: "gpt-5.1",
292
+ description: "Advanced reasoning and robust performance",
293
+ available: true
294
+ },
295
+ // OpenAI o-series reasoning models
296
+ {
297
+ id: "o3",
298
+ name: "o3",
299
+ provider: "openai",
300
+ model: "o3",
301
+ description: "Advanced reasoning for complex problem-solving",
302
+ available: true
303
+ },
304
+ {
305
+ id: "o3-mini",
306
+ name: "o3 Mini",
307
+ provider: "openai",
308
+ model: "o3-mini",
309
+ description: "Cost-effective reasoning with faster response times",
310
+ available: true
311
+ },
312
+ {
313
+ id: "o4-mini",
314
+ name: "o4 Mini",
315
+ provider: "openai",
316
+ model: "o4-mini",
317
+ description: "Fast, efficient reasoning model succeeding o3",
318
+ available: true
319
+ },
320
+ {
321
+ id: "o1",
322
+ name: "o1",
323
+ provider: "openai",
324
+ model: "o1",
325
+ description: "Premium reasoning for research, coding, math and science",
326
+ available: true
327
+ },
328
+ // OpenAI GPT-4 series
329
+ {
330
+ id: "gpt-4.1",
331
+ name: "GPT-4.1",
332
+ provider: "openai",
333
+ model: "gpt-4.1",
334
+ description: "Strong instruction-following with 1M context window",
335
+ available: true
336
+ },
337
+ {
338
+ id: "gpt-4.1-mini",
339
+ name: "GPT-4.1 Mini",
340
+ provider: "openai",
341
+ model: "gpt-4.1-mini",
342
+ description: "Faster, smaller version balancing performance and efficiency",
343
+ available: true
344
+ },
345
+ {
346
+ id: "gpt-4.1-nano",
347
+ name: "GPT-4.1 Nano",
348
+ provider: "openai",
349
+ model: "gpt-4.1-nano",
350
+ description: "Most cost-efficient for lighter tasks",
351
+ available: true
352
+ },
353
+ {
354
+ id: "gpt-4o",
355
+ name: "GPT-4o",
356
+ provider: "openai",
357
+ model: "gpt-4o",
358
+ description: "Versatile model for text generation and comprehension",
359
+ available: true
360
+ },
361
+ {
362
+ id: "gpt-4o-mini",
363
+ name: "GPT-4o Mini",
364
+ provider: "openai",
365
+ model: "gpt-4o-mini",
366
+ description: "Cost-efficient variant with faster response times",
367
+ available: true
368
+ },
369
+ // Google Gemini models
370
+ {
371
+ id: "gemini-3-pro-preview",
372
+ name: "Gemini 3 Pro Preview",
373
+ provider: "google",
374
+ model: "gemini-3-pro-preview",
375
+ description: "State-of-the-art reasoning and multimodal understanding",
376
+ available: true
377
+ },
378
+ {
379
+ id: "gemini-2.5-pro",
380
+ name: "Gemini 2.5 Pro",
381
+ provider: "google",
382
+ model: "gemini-2.5-pro",
383
+ description: "High-capability model for complex reasoning and coding",
384
+ available: true
385
+ },
386
+ {
387
+ id: "gemini-2.5-flash",
388
+ name: "Gemini 2.5 Flash",
389
+ provider: "google",
390
+ model: "gemini-2.5-flash",
391
+ description: "Lightning-fast with balance of intelligence and latency",
392
+ available: true
393
+ },
394
+ {
395
+ id: "gemini-2.5-flash-lite",
396
+ name: "Gemini 2.5 Flash Lite",
397
+ provider: "google",
398
+ model: "gemini-2.5-flash-lite",
399
+ description: "Fast, low-cost, high-performance model",
400
+ available: true
401
+ },
402
+ // Custom API
403
+ {
404
+ id: "custom",
405
+ name: "Custom API",
406
+ provider: "custom",
407
+ model: "custom",
408
+ description: "Use your own OpenAI-compatible API endpoint",
409
+ available: true
410
+ }
411
+ ];
412
+ function registerModelHandlers(ipcMain) {
413
+ ipcMain.handle("models:list", async () => {
414
+ return AVAILABLE_MODELS.map((model) => ({
415
+ ...model,
416
+ available: model.provider === "custom" ? hasCustomApiConfig() : hasApiKey(model.provider)
417
+ }));
418
+ });
419
+ ipcMain.handle("models:getDefault", async () => {
420
+ return store.get("defaultModel", "claude-sonnet-4-5-20250929");
421
+ });
422
+ ipcMain.handle("models:setDefault", async (_event, modelId) => {
423
+ store.set("defaultModel", modelId);
424
+ });
425
+ ipcMain.handle(
426
+ "models:setApiKey",
427
+ async (_event, { provider, apiKey }) => {
428
+ setApiKey(provider, apiKey);
429
+ }
430
+ );
431
+ ipcMain.handle("models:getApiKey", async (_event, provider) => {
432
+ return getApiKey(provider) ?? null;
433
+ });
434
+ ipcMain.handle("models:deleteApiKey", async (_event, provider) => {
435
+ deleteApiKey(provider);
436
+ });
437
+ ipcMain.handle("models:listProviders", async () => {
438
+ return PROVIDERS.map((provider) => ({
439
+ ...provider,
440
+ hasApiKey: provider.id === "custom" ? hasCustomApiConfig() : hasApiKey(provider.id)
441
+ }));
442
+ });
443
+ ipcMain.handle("models:getCustomApiConfig", async () => {
444
+ return getCustomApiConfig() ?? null;
445
+ });
446
+ ipcMain.handle("models:setCustomApiConfig", async (_event, config) => {
447
+ setCustomApiConfig(config);
448
+ });
449
+ ipcMain.handle("models:deleteCustomApiConfig", async () => {
450
+ deleteCustomApiConfig();
451
+ });
452
+ ipcMain.on("app:version", (event) => {
453
+ event.returnValue = electron.app.getVersion();
454
+ });
455
+ ipcMain.handle("workspace:get", async (_event, threadId) => {
456
+ if (!threadId) {
457
+ return store.get("workspacePath", null);
458
+ }
459
+ const { getThread: getThread2 } = await Promise.resolve().then(() => index);
460
+ const thread = getThread2(threadId);
461
+ if (!thread?.metadata) return null;
462
+ const metadata = JSON.parse(thread.metadata);
463
+ return metadata.workspacePath || null;
464
+ });
465
+ ipcMain.handle(
466
+ "workspace:set",
467
+ async (_event, { threadId, path: newPath }) => {
468
+ if (!threadId) {
469
+ if (newPath) {
470
+ store.set("workspacePath", newPath);
471
+ } else {
472
+ store.delete("workspacePath");
473
+ }
474
+ return newPath;
475
+ }
476
+ const { getThread: getThread2, updateThread: updateThread2 } = await Promise.resolve().then(() => index);
477
+ const thread = getThread2(threadId);
478
+ if (!thread) return null;
479
+ const metadata = thread.metadata ? JSON.parse(thread.metadata) : {};
480
+ metadata.workspacePath = newPath;
481
+ updateThread2(threadId, { metadata: JSON.stringify(metadata) });
482
+ if (newPath) {
483
+ startWatching(threadId, newPath);
484
+ } else {
485
+ stopWatching(threadId);
486
+ }
487
+ return newPath;
488
+ }
489
+ );
490
+ ipcMain.handle("workspace:select", async (_event, threadId) => {
491
+ const result = await electron.dialog.showOpenDialog({
492
+ properties: ["openDirectory", "createDirectory"],
493
+ title: "Select Workspace Folder",
494
+ message: "Choose a folder for the agent to work in"
495
+ });
496
+ if (result.canceled || result.filePaths.length === 0) {
497
+ return null;
498
+ }
499
+ const selectedPath = result.filePaths[0];
500
+ if (threadId) {
501
+ const { getThread: getThread2, updateThread: updateThread2 } = await Promise.resolve().then(() => index);
502
+ const thread = getThread2(threadId);
503
+ if (thread) {
504
+ const metadata = thread.metadata ? JSON.parse(thread.metadata) : {};
505
+ metadata.workspacePath = selectedPath;
506
+ updateThread2(threadId, { metadata: JSON.stringify(metadata) });
507
+ startWatching(threadId, selectedPath);
508
+ }
509
+ } else {
510
+ store.set("workspacePath", selectedPath);
511
+ }
512
+ return selectedPath;
513
+ });
514
+ ipcMain.handle("workspace:loadFromDisk", async (_event, { threadId }) => {
515
+ const { getThread: getThread2 } = await Promise.resolve().then(() => index);
516
+ const thread = getThread2(threadId);
517
+ const metadata = thread?.metadata ? JSON.parse(thread.metadata) : {};
518
+ const workspacePath = metadata.workspacePath;
519
+ if (!workspacePath) {
520
+ return { success: false, error: "No workspace folder linked", files: [] };
521
+ }
522
+ try {
523
+ const files = [];
524
+ async function readDir(dirPath, relativePath = "") {
525
+ const entries = await fs__namespace$1.readdir(dirPath, { withFileTypes: true });
526
+ for (const entry of entries) {
527
+ if (entry.name.startsWith(".") || entry.name === "node_modules") {
528
+ continue;
529
+ }
530
+ const fullPath = path__namespace.join(dirPath, entry.name);
531
+ const relPath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
532
+ if (entry.isDirectory()) {
533
+ files.push({
534
+ path: "/" + relPath,
535
+ is_dir: true
536
+ });
537
+ await readDir(fullPath, relPath);
538
+ } else {
539
+ const stat = await fs__namespace$1.stat(fullPath);
540
+ files.push({
541
+ path: "/" + relPath,
542
+ is_dir: false,
543
+ size: stat.size,
544
+ modified_at: stat.mtime.toISOString()
545
+ });
546
+ }
547
+ }
548
+ }
549
+ await readDir(workspacePath);
550
+ startWatching(threadId, workspacePath);
551
+ return {
552
+ success: true,
553
+ files,
554
+ workspacePath
555
+ };
556
+ } catch (e) {
557
+ return {
558
+ success: false,
559
+ error: e instanceof Error ? e.message : "Unknown error",
560
+ files: []
561
+ };
562
+ }
563
+ });
564
+ ipcMain.handle(
565
+ "workspace:readFile",
566
+ async (_event, { threadId, filePath }) => {
567
+ const { getThread: getThread2 } = await Promise.resolve().then(() => index);
568
+ const thread = getThread2(threadId);
569
+ const metadata = thread?.metadata ? JSON.parse(thread.metadata) : {};
570
+ const workspacePath = metadata.workspacePath;
571
+ if (!workspacePath) {
572
+ return {
573
+ success: false,
574
+ error: "No workspace folder linked"
575
+ };
576
+ }
577
+ try {
578
+ const relativePath = filePath.startsWith("/") ? filePath.slice(1) : filePath;
579
+ const fullPath = path__namespace.join(workspacePath, relativePath);
580
+ const resolvedPath = path__namespace.resolve(fullPath);
581
+ const resolvedWorkspace = path__namespace.resolve(workspacePath);
582
+ if (!resolvedPath.startsWith(resolvedWorkspace)) {
583
+ return { success: false, error: "Access denied: path outside workspace" };
584
+ }
585
+ const stat = await fs__namespace$1.stat(fullPath);
586
+ if (stat.isDirectory()) {
587
+ return { success: false, error: "Cannot read directory as file" };
588
+ }
589
+ const content = await fs__namespace$1.readFile(fullPath, "utf-8");
590
+ return {
591
+ success: true,
592
+ content,
593
+ size: stat.size,
594
+ modified_at: stat.mtime.toISOString()
595
+ };
596
+ } catch (e) {
597
+ return {
598
+ success: false,
599
+ error: e instanceof Error ? e.message : "Unknown error"
600
+ };
601
+ }
602
+ }
603
+ );
604
+ ipcMain.handle(
605
+ "workspace:readBinaryFile",
606
+ async (_event, { threadId, filePath }) => {
607
+ const { getThread: getThread2 } = await Promise.resolve().then(() => index);
608
+ const thread = getThread2(threadId);
609
+ const metadata = thread?.metadata ? JSON.parse(thread.metadata) : {};
610
+ const workspacePath = metadata.workspacePath;
611
+ if (!workspacePath) {
612
+ return {
613
+ success: false,
614
+ error: "No workspace folder linked"
615
+ };
616
+ }
617
+ try {
618
+ const relativePath = filePath.startsWith("/") ? filePath.slice(1) : filePath;
619
+ const fullPath = path__namespace.join(workspacePath, relativePath);
620
+ const resolvedPath = path__namespace.resolve(fullPath);
621
+ const resolvedWorkspace = path__namespace.resolve(workspacePath);
622
+ if (!resolvedPath.startsWith(resolvedWorkspace)) {
623
+ return { success: false, error: "Access denied: path outside workspace" };
624
+ }
625
+ const stat = await fs__namespace$1.stat(fullPath);
626
+ if (stat.isDirectory()) {
627
+ return { success: false, error: "Cannot read directory as file" };
628
+ }
629
+ const buffer = await fs__namespace$1.readFile(fullPath);
630
+ const base64 = buffer.toString("base64");
631
+ return {
632
+ success: true,
633
+ content: base64,
634
+ size: stat.size,
635
+ modified_at: stat.mtime.toISOString()
636
+ };
637
+ } catch (e) {
638
+ return {
639
+ success: false,
640
+ error: e instanceof Error ? e.message : "Unknown error"
641
+ };
642
+ }
643
+ }
644
+ );
645
+ }
646
+ function getDefaultModel() {
647
+ return store.get("defaultModel", "claude-sonnet-4-5-20250929");
648
+ }
649
+ class SqlJsSaver extends langgraphCheckpoint.BaseCheckpointSaver {
650
+ db = null;
651
+ dbPath;
652
+ isSetup = false;
653
+ saveTimer = null;
654
+ dirty = false;
655
+ constructor(dbPath, serde) {
656
+ super(serde);
657
+ this.dbPath = dbPath;
658
+ }
659
+ /**
660
+ * Initialize the database asynchronously
661
+ */
662
+ async initialize() {
663
+ if (this.db) return;
664
+ const SQL = await initSqlJs();
665
+ if (fs.existsSync(this.dbPath)) {
666
+ const stats = fs.statSync(this.dbPath);
667
+ const MAX_DB_SIZE = 100 * 1024 * 1024;
668
+ if (stats.size > MAX_DB_SIZE) {
669
+ console.warn(
670
+ `[SqlJsSaver] Database file is too large (${Math.round(stats.size / 1024 / 1024)}MB). Creating fresh database to prevent memory issues.`
671
+ );
672
+ const backupPath = this.dbPath + ".bak." + Date.now();
673
+ try {
674
+ fs.renameSync(this.dbPath, backupPath);
675
+ console.log(`[SqlJsSaver] Old database backed up to: ${backupPath}`);
676
+ } catch (e) {
677
+ console.warn("[SqlJsSaver] Could not backup old database:", e);
678
+ try {
679
+ fs.unlinkSync(this.dbPath);
680
+ } catch (e2) {
681
+ console.error("[SqlJsSaver] Could not delete old database:", e2);
682
+ }
683
+ }
684
+ this.db = new SQL.Database();
685
+ } else {
686
+ const buffer = fs.readFileSync(this.dbPath);
687
+ this.db = new SQL.Database(buffer);
688
+ }
689
+ } else {
690
+ const dir = path.dirname(this.dbPath);
691
+ if (!fs.existsSync(dir)) {
692
+ fs.mkdirSync(dir, { recursive: true });
693
+ }
694
+ this.db = new SQL.Database();
695
+ }
696
+ this.setup();
697
+ }
698
+ setup() {
699
+ if (this.isSetup || !this.db) return;
700
+ this.db.run(`
701
+ CREATE TABLE IF NOT EXISTS checkpoints (
702
+ thread_id TEXT NOT NULL,
703
+ checkpoint_ns TEXT NOT NULL DEFAULT '',
704
+ checkpoint_id TEXT NOT NULL,
705
+ parent_checkpoint_id TEXT,
706
+ type TEXT,
707
+ checkpoint TEXT,
708
+ metadata TEXT,
709
+ PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id)
710
+ )
711
+ `);
712
+ this.db.run(`
713
+ CREATE TABLE IF NOT EXISTS writes (
714
+ thread_id TEXT NOT NULL,
715
+ checkpoint_ns TEXT NOT NULL DEFAULT '',
716
+ checkpoint_id TEXT NOT NULL,
717
+ task_id TEXT NOT NULL,
718
+ idx INTEGER NOT NULL,
719
+ channel TEXT NOT NULL,
720
+ type TEXT,
721
+ value TEXT,
722
+ PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id, task_id, idx)
723
+ )
724
+ `);
725
+ this.isSetup = true;
726
+ this.saveToDisk();
727
+ }
728
+ /**
729
+ * Save database to disk (debounced)
730
+ */
731
+ saveToDisk() {
732
+ if (!this.db) return;
733
+ this.dirty = true;
734
+ if (this.saveTimer) {
735
+ clearTimeout(this.saveTimer);
736
+ }
737
+ this.saveTimer = setTimeout(() => {
738
+ if (this.db && this.dirty) {
739
+ const data = this.db.export();
740
+ fs.writeFileSync(this.dbPath, Buffer.from(data));
741
+ this.dirty = false;
742
+ }
743
+ }, 100);
744
+ }
745
+ /**
746
+ * Force immediate save to disk
747
+ */
748
+ async flush() {
749
+ if (this.saveTimer) {
750
+ clearTimeout(this.saveTimer);
751
+ this.saveTimer = null;
752
+ }
753
+ if (this.db && this.dirty) {
754
+ const data = this.db.export();
755
+ fs.writeFileSync(this.dbPath, Buffer.from(data));
756
+ this.dirty = false;
757
+ }
758
+ }
759
+ async getTuple(config) {
760
+ await this.initialize();
761
+ if (!this.db) throw new Error("Database not initialized");
762
+ const { thread_id, checkpoint_ns = "", checkpoint_id } = config.configurable ?? {};
763
+ let sql;
764
+ let params;
765
+ if (checkpoint_id) {
766
+ sql = `
767
+ SELECT thread_id, checkpoint_ns, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata
768
+ FROM checkpoints
769
+ WHERE thread_id = ? AND checkpoint_ns = ? AND checkpoint_id = ?
770
+ `;
771
+ params = [thread_id, checkpoint_ns, checkpoint_id];
772
+ } else {
773
+ sql = `
774
+ SELECT thread_id, checkpoint_ns, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata
775
+ FROM checkpoints
776
+ WHERE thread_id = ? AND checkpoint_ns = ?
777
+ ORDER BY checkpoint_id DESC
778
+ LIMIT 1
779
+ `;
780
+ params = [thread_id, checkpoint_ns];
781
+ }
782
+ const stmt = this.db.prepare(sql);
783
+ stmt.bind(params.filter((p) => p !== void 0));
784
+ if (!stmt.step()) {
785
+ stmt.free();
786
+ return void 0;
787
+ }
788
+ const row = stmt.getAsObject();
789
+ stmt.free();
790
+ const writesStmt = this.db.prepare(`
791
+ SELECT task_id, channel, type, value
792
+ FROM writes
793
+ WHERE thread_id = ? AND checkpoint_ns = ? AND checkpoint_id = ?
794
+ `);
795
+ writesStmt.bind([row.thread_id, row.checkpoint_ns, row.checkpoint_id]);
796
+ const pendingWrites = [];
797
+ while (writesStmt.step()) {
798
+ const write = writesStmt.getAsObject();
799
+ const value = await this.serde.loadsTyped(write.type ?? "json", write.value ?? "");
800
+ pendingWrites.push([write.task_id, write.channel, value]);
801
+ }
802
+ writesStmt.free();
803
+ const checkpoint = await this.serde.loadsTyped(
804
+ row.type ?? "json",
805
+ row.checkpoint
806
+ );
807
+ const finalConfig = checkpoint_id ? config : {
808
+ configurable: {
809
+ thread_id: row.thread_id,
810
+ checkpoint_ns: row.checkpoint_ns,
811
+ checkpoint_id: row.checkpoint_id
812
+ }
813
+ };
814
+ return {
815
+ checkpoint,
816
+ config: finalConfig,
817
+ metadata: await this.serde.loadsTyped(
818
+ row.type ?? "json",
819
+ row.metadata
820
+ ),
821
+ parentConfig: row.parent_checkpoint_id ? {
822
+ configurable: {
823
+ thread_id: row.thread_id,
824
+ checkpoint_ns: row.checkpoint_ns,
825
+ checkpoint_id: row.parent_checkpoint_id
826
+ }
827
+ } : void 0,
828
+ pendingWrites
829
+ };
830
+ }
831
+ async *list(config, options) {
832
+ await this.initialize();
833
+ if (!this.db) throw new Error("Database not initialized");
834
+ const { limit, before } = options ?? {};
835
+ const thread_id = config.configurable?.thread_id;
836
+ const checkpoint_ns = config.configurable?.checkpoint_ns ?? "";
837
+ let sql = `
838
+ SELECT thread_id, checkpoint_ns, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata
839
+ FROM checkpoints
840
+ WHERE thread_id = ? AND checkpoint_ns = ?
841
+ `;
842
+ const params = [thread_id, checkpoint_ns];
843
+ if (before?.configurable?.checkpoint_id) {
844
+ sql += ` AND checkpoint_id < ?`;
845
+ params.push(before.configurable.checkpoint_id);
846
+ }
847
+ sql += ` ORDER BY checkpoint_id DESC`;
848
+ if (limit) {
849
+ sql += ` LIMIT ${parseInt(String(limit), 10)}`;
850
+ }
851
+ const stmt = this.db.prepare(sql);
852
+ stmt.bind(params);
853
+ while (stmt.step()) {
854
+ const row = stmt.getAsObject();
855
+ const writesStmt = this.db.prepare(`
856
+ SELECT task_id, channel, type, value
857
+ FROM writes
858
+ WHERE thread_id = ? AND checkpoint_ns = ? AND checkpoint_id = ?
859
+ `);
860
+ writesStmt.bind([row.thread_id, row.checkpoint_ns, row.checkpoint_id]);
861
+ const pendingWrites = [];
862
+ while (writesStmt.step()) {
863
+ const write = writesStmt.getAsObject();
864
+ const value = await this.serde.loadsTyped(write.type ?? "json", write.value ?? "");
865
+ pendingWrites.push([write.task_id, write.channel, value]);
866
+ }
867
+ writesStmt.free();
868
+ const checkpoint = await this.serde.loadsTyped(
869
+ row.type ?? "json",
870
+ row.checkpoint
871
+ );
872
+ yield {
873
+ config: {
874
+ configurable: {
875
+ thread_id: row.thread_id,
876
+ checkpoint_ns: row.checkpoint_ns,
877
+ checkpoint_id: row.checkpoint_id
878
+ }
879
+ },
880
+ checkpoint,
881
+ metadata: await this.serde.loadsTyped(
882
+ row.type ?? "json",
883
+ row.metadata
884
+ ),
885
+ parentConfig: row.parent_checkpoint_id ? {
886
+ configurable: {
887
+ thread_id: row.thread_id,
888
+ checkpoint_ns: row.checkpoint_ns,
889
+ checkpoint_id: row.parent_checkpoint_id
890
+ }
891
+ } : void 0,
892
+ pendingWrites
893
+ };
894
+ }
895
+ stmt.free();
896
+ }
897
+ async put(config, checkpoint, metadata) {
898
+ await this.initialize();
899
+ if (!this.db) throw new Error("Database not initialized");
900
+ if (!config.configurable) {
901
+ throw new Error("Empty configuration supplied.");
902
+ }
903
+ const thread_id = config.configurable?.thread_id;
904
+ const checkpoint_ns = config.configurable?.checkpoint_ns ?? "";
905
+ const parent_checkpoint_id = config.configurable?.checkpoint_id;
906
+ if (!thread_id) {
907
+ throw new Error('Missing "thread_id" field in passed "config.configurable".');
908
+ }
909
+ const preparedCheckpoint = langgraphCheckpoint.copyCheckpoint(checkpoint);
910
+ const [[type1, serializedCheckpoint], [type2, serializedMetadata]] = await Promise.all([
911
+ this.serde.dumpsTyped(preparedCheckpoint),
912
+ this.serde.dumpsTyped(metadata)
913
+ ]);
914
+ if (type1 !== type2) {
915
+ throw new Error("Failed to serialize checkpoint and metadata to the same type.");
916
+ }
917
+ this.db.run(
918
+ `INSERT OR REPLACE INTO checkpoints
919
+ (thread_id, checkpoint_ns, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata)
920
+ VALUES (?, ?, ?, ?, ?, ?, ?)`,
921
+ [
922
+ thread_id,
923
+ checkpoint_ns,
924
+ checkpoint.id,
925
+ parent_checkpoint_id ?? null,
926
+ type1,
927
+ serializedCheckpoint,
928
+ serializedMetadata
929
+ ]
930
+ );
931
+ this.saveToDisk();
932
+ return {
933
+ configurable: {
934
+ thread_id,
935
+ checkpoint_ns,
936
+ checkpoint_id: checkpoint.id
937
+ }
938
+ };
939
+ }
940
+ async putWrites(config, writes, taskId) {
941
+ await this.initialize();
942
+ if (!this.db) throw new Error("Database not initialized");
943
+ if (!config.configurable) {
944
+ throw new Error("Empty configuration supplied.");
945
+ }
946
+ if (!config.configurable?.thread_id) {
947
+ throw new Error("Missing thread_id field in config.configurable.");
948
+ }
949
+ if (!config.configurable?.checkpoint_id) {
950
+ throw new Error("Missing checkpoint_id field in config.configurable.");
951
+ }
952
+ for (let idx = 0; idx < writes.length; idx++) {
953
+ const write = writes[idx];
954
+ const [type, serializedWrite] = await this.serde.dumpsTyped(write[1]);
955
+ this.db.run(
956
+ `INSERT OR REPLACE INTO writes
957
+ (thread_id, checkpoint_ns, checkpoint_id, task_id, idx, channel, type, value)
958
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
959
+ [
960
+ config.configurable.thread_id,
961
+ config.configurable.checkpoint_ns ?? "",
962
+ config.configurable.checkpoint_id,
963
+ taskId,
964
+ idx,
965
+ write[0],
966
+ type,
967
+ serializedWrite
968
+ ]
969
+ );
970
+ }
971
+ this.saveToDisk();
972
+ }
973
+ async deleteThread(threadId) {
974
+ await this.initialize();
975
+ if (!this.db) throw new Error("Database not initialized");
976
+ this.db.run(`DELETE FROM checkpoints WHERE thread_id = ?`, [threadId]);
977
+ this.db.run(`DELETE FROM writes WHERE thread_id = ?`, [threadId]);
978
+ this.saveToDisk();
979
+ }
980
+ /**
981
+ * Close the database and save any pending changes
982
+ */
983
+ async close() {
984
+ await this.flush();
985
+ if (this.db) {
986
+ this.db.close();
987
+ this.db = null;
988
+ }
989
+ }
990
+ }
991
+ class LocalSandbox extends deepagents.FilesystemBackend {
992
+ /** Unique identifier for this sandbox instance */
993
+ id;
994
+ timeout;
995
+ maxOutputBytes;
996
+ env;
997
+ workingDir;
998
+ constructor(options = {}) {
999
+ super({
1000
+ rootDir: options.rootDir,
1001
+ virtualMode: options.virtualMode,
1002
+ maxFileSizeMb: options.maxFileSizeMb
1003
+ });
1004
+ this.id = `local-sandbox-${node_crypto.randomUUID().slice(0, 8)}`;
1005
+ this.timeout = options.timeout ?? 12e4;
1006
+ this.maxOutputBytes = options.maxOutputBytes ?? 1e5;
1007
+ this.env = options.env ?? { ...process.env };
1008
+ this.workingDir = options.rootDir ?? process.cwd();
1009
+ }
1010
+ /**
1011
+ * Execute a shell command in the workspace directory.
1012
+ *
1013
+ * @param command - Shell command string to execute
1014
+ * @returns ExecuteResponse with combined output, exit code, and truncation flag
1015
+ *
1016
+ * @example
1017
+ * ```typescript
1018
+ * const result = await sandbox.execute('echo "Hello World"');
1019
+ * // result.output: "Hello World\n"
1020
+ * // result.exitCode: 0
1021
+ * // result.truncated: false
1022
+ * ```
1023
+ */
1024
+ async execute(command) {
1025
+ if (!command || typeof command !== "string") {
1026
+ return {
1027
+ output: "Error: Shell tool expects a non-empty command string.",
1028
+ exitCode: 1,
1029
+ truncated: false
1030
+ };
1031
+ }
1032
+ return new Promise((resolve) => {
1033
+ const outputParts = [];
1034
+ let totalBytes = 0;
1035
+ let truncated = false;
1036
+ let resolved = false;
1037
+ const isWindows = process.platform === "win32";
1038
+ const shell = isWindows ? "cmd.exe" : "/bin/sh";
1039
+ const shellArgs = isWindows ? ["/c", command] : ["-c", command];
1040
+ const proc = node_child_process.spawn(shell, shellArgs, {
1041
+ cwd: this.workingDir,
1042
+ env: this.env,
1043
+ stdio: ["ignore", "pipe", "pipe"]
1044
+ });
1045
+ const timeoutId = setTimeout(() => {
1046
+ if (!resolved) {
1047
+ resolved = true;
1048
+ proc.kill("SIGTERM");
1049
+ setTimeout(() => proc.kill("SIGKILL"), 1e3);
1050
+ resolve({
1051
+ output: `Error: Command timed out after ${(this.timeout / 1e3).toFixed(1)} seconds.`,
1052
+ exitCode: null,
1053
+ truncated: false
1054
+ });
1055
+ }
1056
+ }, this.timeout);
1057
+ proc.stdout.on("data", (data) => {
1058
+ if (truncated) return;
1059
+ const chunk = data.toString();
1060
+ const newTotal = totalBytes + chunk.length;
1061
+ if (newTotal > this.maxOutputBytes) {
1062
+ const remaining = this.maxOutputBytes - totalBytes;
1063
+ if (remaining > 0) {
1064
+ outputParts.push(chunk.slice(0, remaining));
1065
+ }
1066
+ truncated = true;
1067
+ totalBytes = this.maxOutputBytes;
1068
+ } else {
1069
+ outputParts.push(chunk);
1070
+ totalBytes = newTotal;
1071
+ }
1072
+ });
1073
+ proc.stderr.on("data", (data) => {
1074
+ if (truncated) return;
1075
+ const chunk = data.toString();
1076
+ const prefixedLines = chunk.split("\n").filter((line) => line.length > 0).map((line) => `[stderr] ${line}`).join("\n");
1077
+ if (prefixedLines.length === 0) return;
1078
+ const withNewline = prefixedLines + (chunk.endsWith("\n") ? "\n" : "");
1079
+ const newTotal = totalBytes + withNewline.length;
1080
+ if (newTotal > this.maxOutputBytes) {
1081
+ const remaining = this.maxOutputBytes - totalBytes;
1082
+ if (remaining > 0) {
1083
+ outputParts.push(withNewline.slice(0, remaining));
1084
+ }
1085
+ truncated = true;
1086
+ totalBytes = this.maxOutputBytes;
1087
+ } else {
1088
+ outputParts.push(withNewline);
1089
+ totalBytes = newTotal;
1090
+ }
1091
+ });
1092
+ proc.on("close", (code, signal) => {
1093
+ if (resolved) return;
1094
+ resolved = true;
1095
+ clearTimeout(timeoutId);
1096
+ let output = outputParts.join("");
1097
+ if (truncated) {
1098
+ output += `
1099
+
1100
+ ... Output truncated at ${this.maxOutputBytes} bytes.`;
1101
+ }
1102
+ if (!output.trim()) {
1103
+ output = "<no output>";
1104
+ }
1105
+ resolve({
1106
+ output,
1107
+ exitCode: signal ? null : code,
1108
+ truncated
1109
+ });
1110
+ });
1111
+ proc.on("error", (err) => {
1112
+ if (resolved) return;
1113
+ resolved = true;
1114
+ clearTimeout(timeoutId);
1115
+ resolve({
1116
+ output: `Error: Failed to execute command: ${err.message}`,
1117
+ exitCode: 1,
1118
+ truncated: false
1119
+ });
1120
+ });
1121
+ });
1122
+ }
1123
+ }
1124
+ const BASE_SYSTEM_PROMPT = `You are an AI assistant that helps users with various tasks including coding, research, and analysis.
1125
+
1126
+ # Core Behavior
1127
+
1128
+ Be concise and direct. Answer in fewer than 4 lines unless the user asks for detail.
1129
+ After working on a file, just stop - don't explain what you did unless asked.
1130
+ Avoid unnecessary introductions or conclusions.
1131
+
1132
+ When you run non-trivial bash commands, briefly explain what they do.
1133
+
1134
+ ## Proactiveness
1135
+ Take action when asked, but don't surprise users with unrequested actions.
1136
+ If asked how to approach something, answer first before taking action.
1137
+
1138
+ ## Following Conventions
1139
+ - Check existing code for libraries and frameworks before assuming availability
1140
+ - Mimic existing code style, naming conventions, and patterns
1141
+ - Never add comments unless asked
1142
+
1143
+ ## Task Management
1144
+ Use write_todos for complex multi-step tasks (3+ steps). Mark tasks in_progress before starting, completed immediately after finishing.
1145
+ For simple 1-2 step tasks, just do them directly without todos.
1146
+
1147
+ ## File Reading Best Practices
1148
+
1149
+ When exploring codebases or reading multiple files, use pagination to prevent context overflow.
1150
+
1151
+ **Pattern for codebase exploration:**
1152
+ 1. First scan: \`read_file(path, limit=100)\` - See file structure and key sections
1153
+ 2. Targeted read: \`read_file(path, offset=100, limit=200)\` - Read specific sections if needed
1154
+ 3. Full read: Only use \`read_file(path)\` without limit when necessary for editing
1155
+
1156
+ **When to paginate:**
1157
+ - Reading any file >500 lines
1158
+ - Exploring unfamiliar codebases (always start with limit=100)
1159
+ - Reading multiple files in sequence
1160
+
1161
+ **When full read is OK:**
1162
+ - Small files (<500 lines)
1163
+ - Files you need to edit immediately after reading
1164
+
1165
+ ## Working with Subagents (task tool)
1166
+ When delegating to subagents:
1167
+ - **Use filesystem for large I/O**: If input/output is large (>500 words), communicate via files
1168
+ - **Parallelize independent work**: Spawn parallel subagents for independent tasks
1169
+ - **Clear specifications**: Tell subagent exactly what format/structure you need
1170
+ - **Main agent synthesizes**: Subagents gather/execute, main agent integrates results
1171
+
1172
+ ## Tools
1173
+
1174
+ ### File Tools
1175
+ - read_file: Read file contents
1176
+ - edit_file: Replace exact strings in files (must read first, provide unique old_string)
1177
+ - write_file: Create or overwrite files
1178
+ - ls: List directory contents
1179
+ - glob: Find files by pattern (e.g., "**/*.py")
1180
+ - grep: Search file contents
1181
+
1182
+ All file paths should use fully qualified absolute system paths (e.g., /Users/name/project/src/file.ts).
1183
+
1184
+ ### Shell Tool
1185
+ - execute: Run shell commands in the workspace directory
1186
+
1187
+ The execute tool runs commands directly on the user's machine. Use it for:
1188
+ - Running scripts, tests, and builds (npm test, python script.py, make)
1189
+ - Git operations (git status, git diff, git commit)
1190
+ - Installing dependencies (npm install, pip install)
1191
+ - System commands (which, env, pwd)
1192
+
1193
+ **Important:**
1194
+ - All execute commands require user approval before running
1195
+ - Commands run in the workspace root directory
1196
+ - Avoid using shell for file reading (use read_file instead)
1197
+ - Avoid using shell for file searching (use grep/glob instead)
1198
+ - When running non-trivial commands, briefly explain what they do
1199
+
1200
+ ## Code References
1201
+ When referencing code, use format: \`file_path:line_number\`
1202
+
1203
+ ## Documentation
1204
+ - Do NOT create excessive markdown summary/documentation files after completing work
1205
+ - Focus on the work itself, not documenting what you did
1206
+ - Only create documentation when explicitly requested
1207
+
1208
+ ## Human-in-the-Loop Tool Approval
1209
+
1210
+ Some tool calls require user approval before execution. When a tool call is rejected by the user:
1211
+ 1. Accept their decision immediately - do NOT retry the same command
1212
+ 2. Explain that you understand they rejected the action
1213
+ 3. Suggest an alternative approach or ask for clarification
1214
+ 4. Never attempt the exact same rejected command again
1215
+
1216
+ Respect the user's decisions and work with them collaboratively.
1217
+
1218
+ ## Todo List Management
1219
+
1220
+ When using the write_todos tool:
1221
+ 1. Keep the todo list MINIMAL - aim for 3-6 items maximum
1222
+ 2. Only create todos for complex, multi-step tasks that truly need tracking
1223
+ 3. Break down work into clear, actionable items without over-fragmenting
1224
+ 4. For simple tasks (1-2 steps), just do them directly without creating todos
1225
+ 5. When first creating a todo list for a task, ALWAYS ask the user if the plan looks good before starting work
1226
+ - Create the todos, let them render, then ask: "Does this plan look good?" or similar
1227
+ - Wait for the user's response before marking the first todo as in_progress
1228
+ - If they want changes, adjust the plan accordingly
1229
+ 6. Update todo status promptly as you complete each item
1230
+
1231
+ The todo list is a planning tool - use it judiciously to avoid overwhelming the user with excessive task tracking.
1232
+ `;
1233
+ function getSystemPrompt(workspacePath) {
1234
+ const workingDirSection = `
1235
+ ### File System and Paths
1236
+
1237
+ **IMPORTANT - Path Handling:**
1238
+ - All file paths use fully qualified absolute system paths
1239
+ - The workspace root is: \`${workspacePath}\`
1240
+ - Example: \`${workspacePath}/src/index.ts\`, \`${workspacePath}/README.md\`
1241
+ - To list the workspace root, use \`ls("${workspacePath}")\`
1242
+ - Always use full absolute paths for all file operations
1243
+ `;
1244
+ return workingDirSection + BASE_SYSTEM_PROMPT;
1245
+ }
1246
+ const checkpointers = /* @__PURE__ */ new Map();
1247
+ async function getCheckpointer(threadId) {
1248
+ let checkpointer = checkpointers.get(threadId);
1249
+ if (!checkpointer) {
1250
+ const dbPath = getThreadCheckpointPath(threadId);
1251
+ checkpointer = new SqlJsSaver(dbPath);
1252
+ await checkpointer.initialize();
1253
+ checkpointers.set(threadId, checkpointer);
1254
+ }
1255
+ return checkpointer;
1256
+ }
1257
+ async function closeCheckpointer(threadId) {
1258
+ const checkpointer = checkpointers.get(threadId);
1259
+ if (checkpointer) {
1260
+ await checkpointer.close();
1261
+ checkpointers.delete(threadId);
1262
+ }
1263
+ }
1264
+ function getModelInstance(modelId) {
1265
+ const model = modelId || getDefaultModel();
1266
+ console.log("[Runtime] Using model:", model);
1267
+ if (model === "custom" || model.startsWith("custom-")) {
1268
+ const customConfig = getCustomApiConfig();
1269
+ console.log("[Runtime] Custom API config present:", !!customConfig);
1270
+ if (!customConfig) {
1271
+ throw new Error("Custom API configuration not set");
1272
+ }
1273
+ console.log("[Runtime] Custom API config:", {
1274
+ baseUrl: customConfig.baseUrl,
1275
+ model: customConfig.model,
1276
+ apiKeyLength: customConfig.apiKey?.length,
1277
+ apiKeyPrefix: customConfig.apiKey?.substring(0, 10),
1278
+ apiKeySuffix: customConfig.apiKey?.substring(customConfig.apiKey.length - 10),
1279
+ apiKeyHasNewline: customConfig.apiKey?.includes("\n"),
1280
+ apiKeyHasSpace: customConfig.apiKey?.includes(" ")
1281
+ });
1282
+ const chatModel = new openai.ChatOpenAI({
1283
+ model: customConfig.model || model,
1284
+ apiKey: customConfig.apiKey,
1285
+ // 尝试使用 apiKey 而不是 openAIApiKey
1286
+ configuration: {
1287
+ baseURL: customConfig.baseUrl
1288
+ },
1289
+ timeout: 6e4,
1290
+ // 60 seconds timeout
1291
+ maxRetries: 2
1292
+ });
1293
+ console.log("[Runtime] ChatOpenAI instance created");
1294
+ return chatModel;
1295
+ }
1296
+ if (model.startsWith("claude")) {
1297
+ const apiKey = getApiKey("anthropic");
1298
+ console.log("[Runtime] Anthropic API key present:", !!apiKey);
1299
+ if (!apiKey) {
1300
+ throw new Error("Anthropic API key not configured");
1301
+ }
1302
+ return new anthropic.ChatAnthropic({
1303
+ model,
1304
+ anthropicApiKey: apiKey
1305
+ });
1306
+ } else if (model.startsWith("gpt") || model.startsWith("o1") || model.startsWith("o3") || model.startsWith("o4")) {
1307
+ const apiKey = getApiKey("openai");
1308
+ console.log("[Runtime] OpenAI API key present:", !!apiKey);
1309
+ if (!apiKey) {
1310
+ throw new Error("OpenAI API key not configured");
1311
+ }
1312
+ return new openai.ChatOpenAI({
1313
+ model,
1314
+ openAIApiKey: apiKey
1315
+ });
1316
+ } else if (model.startsWith("gemini")) {
1317
+ const apiKey = getApiKey("google");
1318
+ console.log("[Runtime] Google API key present:", !!apiKey);
1319
+ if (!apiKey) {
1320
+ throw new Error("Google API key not configured");
1321
+ }
1322
+ return new googleGenai.ChatGoogleGenerativeAI({
1323
+ model,
1324
+ apiKey
1325
+ });
1326
+ }
1327
+ return model;
1328
+ }
1329
+ async function createAgentRuntime(options) {
1330
+ const { threadId, modelId, workspacePath } = options;
1331
+ if (!threadId) {
1332
+ throw new Error("Thread ID is required for checkpointing.");
1333
+ }
1334
+ if (!workspacePath) {
1335
+ throw new Error(
1336
+ "Workspace path is required. Please select a workspace folder before running the agent."
1337
+ );
1338
+ }
1339
+ console.log("[Runtime] Creating agent runtime...");
1340
+ console.log("[Runtime] Thread ID:", threadId);
1341
+ console.log("[Runtime] Workspace path:", workspacePath);
1342
+ const model = getModelInstance(modelId);
1343
+ console.log("[Runtime] Model instance created:", typeof model);
1344
+ const checkpointer = await getCheckpointer(threadId);
1345
+ console.log("[Runtime] Checkpointer ready for thread:", threadId);
1346
+ const backend = new LocalSandbox({
1347
+ rootDir: workspacePath,
1348
+ virtualMode: false,
1349
+ // Use absolute system paths for consistency with shell commands
1350
+ timeout: 12e4,
1351
+ // 2 minutes
1352
+ maxOutputBytes: 1e5
1353
+ // ~100KB
1354
+ });
1355
+ const systemPrompt = getSystemPrompt(workspacePath);
1356
+ const filesystemSystemPrompt = `You have access to a filesystem. All file paths use fully qualified absolute system paths.
1357
+
1358
+ - ls: list files in a directory (e.g., ls("${workspacePath}"))
1359
+ - read_file: read a file from the filesystem
1360
+ - write_file: write to a file in the filesystem
1361
+ - edit_file: edit a file in the filesystem
1362
+ - glob: find files matching a pattern (e.g., "**/*.py")
1363
+ - grep: search for text within files
1364
+
1365
+ The workspace root is: ${workspacePath}`;
1366
+ const agent = deepagents.createDeepAgent({
1367
+ model,
1368
+ checkpointer,
1369
+ backend,
1370
+ systemPrompt,
1371
+ // Custom filesystem prompt for absolute paths (requires deepagents update)
1372
+ filesystemSystemPrompt,
1373
+ // Require human approval for all shell commands
1374
+ interruptOn: { execute: true }
1375
+ });
1376
+ console.log("[Runtime] Deep agent created with LocalSandbox at:", workspacePath);
1377
+ return agent;
1378
+ }
1379
+ let db = null;
1380
+ let saveTimer = null;
1381
+ let dirty = false;
1382
+ function saveToDisk() {
1383
+ if (!db) return;
1384
+ dirty = true;
1385
+ if (saveTimer) {
1386
+ clearTimeout(saveTimer);
1387
+ }
1388
+ saveTimer = setTimeout(() => {
1389
+ if (db && dirty) {
1390
+ const data = db.export();
1391
+ fs.writeFileSync(getDbPath(), Buffer.from(data));
1392
+ dirty = false;
1393
+ }
1394
+ }, 100);
1395
+ }
1396
+ function getDb() {
1397
+ if (!db) {
1398
+ throw new Error("Database not initialized. Call initializeDatabase() first.");
1399
+ }
1400
+ return db;
1401
+ }
1402
+ async function initializeDatabase() {
1403
+ const dbPath = getDbPath();
1404
+ console.log("Initializing database at:", dbPath);
1405
+ const SQL = await initSqlJs();
1406
+ if (fs.existsSync(dbPath)) {
1407
+ const buffer = fs.readFileSync(dbPath);
1408
+ db = new SQL.Database(buffer);
1409
+ } else {
1410
+ const dir = path.dirname(dbPath);
1411
+ if (!fs.existsSync(dir)) {
1412
+ fs.mkdirSync(dir, { recursive: true });
1413
+ }
1414
+ db = new SQL.Database();
1415
+ }
1416
+ db.run(`
1417
+ CREATE TABLE IF NOT EXISTS threads (
1418
+ thread_id TEXT PRIMARY KEY,
1419
+ created_at INTEGER NOT NULL,
1420
+ updated_at INTEGER NOT NULL,
1421
+ metadata TEXT,
1422
+ status TEXT DEFAULT 'idle',
1423
+ thread_values TEXT,
1424
+ title TEXT
1425
+ )
1426
+ `);
1427
+ db.run(`
1428
+ CREATE TABLE IF NOT EXISTS runs (
1429
+ run_id TEXT PRIMARY KEY,
1430
+ thread_id TEXT REFERENCES threads(thread_id) ON DELETE CASCADE,
1431
+ assistant_id TEXT,
1432
+ created_at INTEGER NOT NULL,
1433
+ updated_at INTEGER NOT NULL,
1434
+ status TEXT,
1435
+ metadata TEXT,
1436
+ kwargs TEXT
1437
+ )
1438
+ `);
1439
+ db.run(`
1440
+ CREATE TABLE IF NOT EXISTS assistants (
1441
+ assistant_id TEXT PRIMARY KEY,
1442
+ graph_id TEXT NOT NULL,
1443
+ name TEXT,
1444
+ model TEXT DEFAULT 'claude-sonnet-4-5-20250929',
1445
+ config TEXT,
1446
+ created_at INTEGER NOT NULL,
1447
+ updated_at INTEGER NOT NULL
1448
+ )
1449
+ `);
1450
+ db.run(`CREATE INDEX IF NOT EXISTS idx_threads_updated_at ON threads(updated_at)`);
1451
+ db.run(`CREATE INDEX IF NOT EXISTS idx_runs_thread_id ON runs(thread_id)`);
1452
+ db.run(`CREATE INDEX IF NOT EXISTS idx_runs_status ON runs(status)`);
1453
+ saveToDisk();
1454
+ console.log("Database initialized successfully");
1455
+ return db;
1456
+ }
1457
+ function getAllThreads() {
1458
+ const database = getDb();
1459
+ const stmt = database.prepare("SELECT * FROM threads ORDER BY updated_at DESC");
1460
+ const threads = [];
1461
+ while (stmt.step()) {
1462
+ threads.push(stmt.getAsObject());
1463
+ }
1464
+ stmt.free();
1465
+ return threads;
1466
+ }
1467
+ function getThread(threadId) {
1468
+ const database = getDb();
1469
+ const stmt = database.prepare("SELECT * FROM threads WHERE thread_id = ?");
1470
+ stmt.bind([threadId]);
1471
+ if (!stmt.step()) {
1472
+ stmt.free();
1473
+ return null;
1474
+ }
1475
+ const thread = stmt.getAsObject();
1476
+ stmt.free();
1477
+ return thread;
1478
+ }
1479
+ function createThread(threadId, metadata) {
1480
+ const database = getDb();
1481
+ const now = Date.now();
1482
+ database.run(
1483
+ `INSERT INTO threads (thread_id, created_at, updated_at, metadata, status)
1484
+ VALUES (?, ?, ?, ?, ?)`,
1485
+ [threadId, now, now, metadata ? JSON.stringify(metadata) : null, "idle"]
1486
+ );
1487
+ saveToDisk();
1488
+ return {
1489
+ thread_id: threadId,
1490
+ created_at: now,
1491
+ updated_at: now,
1492
+ metadata: metadata ? JSON.stringify(metadata) : null,
1493
+ status: "idle",
1494
+ thread_values: null,
1495
+ title: null
1496
+ };
1497
+ }
1498
+ function updateThread(threadId, updates) {
1499
+ const database = getDb();
1500
+ const existing = getThread(threadId);
1501
+ if (!existing) return null;
1502
+ const now = Date.now();
1503
+ const setClauses = ["updated_at = ?"];
1504
+ const values = [now];
1505
+ if (updates.metadata !== void 0) {
1506
+ setClauses.push("metadata = ?");
1507
+ values.push(
1508
+ typeof updates.metadata === "string" ? updates.metadata : JSON.stringify(updates.metadata)
1509
+ );
1510
+ }
1511
+ if (updates.status !== void 0) {
1512
+ setClauses.push("status = ?");
1513
+ values.push(updates.status);
1514
+ }
1515
+ if (updates.thread_values !== void 0) {
1516
+ setClauses.push("thread_values = ?");
1517
+ values.push(updates.thread_values);
1518
+ }
1519
+ if (updates.title !== void 0) {
1520
+ setClauses.push("title = ?");
1521
+ values.push(updates.title);
1522
+ }
1523
+ values.push(threadId);
1524
+ database.run(`UPDATE threads SET ${setClauses.join(", ")} WHERE thread_id = ?`, values);
1525
+ saveToDisk();
1526
+ return getThread(threadId);
1527
+ }
1528
+ function deleteThread(threadId) {
1529
+ const database = getDb();
1530
+ database.run("DELETE FROM threads WHERE thread_id = ?", [threadId]);
1531
+ saveToDisk();
1532
+ }
1533
+ const index = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
1534
+ __proto__: null,
1535
+ createThread,
1536
+ deleteThread,
1537
+ getAllThreads,
1538
+ getDb,
1539
+ getThread,
1540
+ initializeDatabase,
1541
+ updateThread
1542
+ }, Symbol.toStringTag, { value: "Module" }));
1543
+ const activeRuns = /* @__PURE__ */ new Map();
1544
+ function registerAgentHandlers(ipcMain) {
1545
+ console.log("[Agent] Registering agent handlers...");
1546
+ ipcMain.on(
1547
+ "agent:invoke",
1548
+ async (event, { threadId, message }) => {
1549
+ const channel = `agent:stream:${threadId}`;
1550
+ const window = electron.BrowserWindow.fromWebContents(event.sender);
1551
+ console.log("[Agent] Received invoke request:", {
1552
+ threadId,
1553
+ message: message.substring(0, 50)
1554
+ });
1555
+ if (!window) {
1556
+ console.error("[Agent] No window found");
1557
+ return;
1558
+ }
1559
+ const existingController = activeRuns.get(threadId);
1560
+ if (existingController) {
1561
+ console.log("[Agent] Aborting existing stream for thread:", threadId);
1562
+ existingController.abort();
1563
+ activeRuns.delete(threadId);
1564
+ }
1565
+ const abortController = new AbortController();
1566
+ activeRuns.set(threadId, abortController);
1567
+ const onWindowClosed = () => {
1568
+ console.log("[Agent] Window closed, aborting stream for thread:", threadId);
1569
+ abortController.abort();
1570
+ };
1571
+ window.once("closed", onWindowClosed);
1572
+ try {
1573
+ const thread = getThread(threadId);
1574
+ const metadata = thread?.metadata ? JSON.parse(thread.metadata) : {};
1575
+ const workspacePath = metadata.workspacePath;
1576
+ const currentModel = metadata.currentModel;
1577
+ if (!workspacePath) {
1578
+ window.webContents.send(channel, {
1579
+ type: "error",
1580
+ error: "WORKSPACE_REQUIRED",
1581
+ message: "Please select a workspace folder before sending messages."
1582
+ });
1583
+ return;
1584
+ }
1585
+ const agent = await createAgentRuntime({
1586
+ threadId,
1587
+ workspacePath,
1588
+ modelId: currentModel
1589
+ });
1590
+ const humanMessage = new messages.HumanMessage(message);
1591
+ const stream = await agent.stream(
1592
+ { messages: [humanMessage] },
1593
+ {
1594
+ configurable: { thread_id: threadId },
1595
+ signal: abortController.signal,
1596
+ streamMode: ["messages", "values"],
1597
+ recursionLimit: 1e3
1598
+ }
1599
+ );
1600
+ for await (const chunk of stream) {
1601
+ if (abortController.signal.aborted) break;
1602
+ const [mode, data] = chunk;
1603
+ window.webContents.send(channel, {
1604
+ type: "stream",
1605
+ mode,
1606
+ data: JSON.parse(JSON.stringify(data))
1607
+ });
1608
+ }
1609
+ if (!abortController.signal.aborted) {
1610
+ window.webContents.send(channel, { type: "done" });
1611
+ }
1612
+ } catch (error) {
1613
+ const isAbortError = error instanceof Error && (error.name === "AbortError" || error.message.includes("aborted") || error.message.includes("Controller is already closed"));
1614
+ if (!isAbortError) {
1615
+ console.error("[Agent] Error:", error);
1616
+ window.webContents.send(channel, {
1617
+ type: "error",
1618
+ error: error instanceof Error ? error.message : "Unknown error"
1619
+ });
1620
+ }
1621
+ } finally {
1622
+ window.removeListener("closed", onWindowClosed);
1623
+ activeRuns.delete(threadId);
1624
+ }
1625
+ }
1626
+ );
1627
+ ipcMain.on(
1628
+ "agent:resume",
1629
+ async (event, {
1630
+ threadId,
1631
+ command
1632
+ }) => {
1633
+ const channel = `agent:stream:${threadId}`;
1634
+ const window = electron.BrowserWindow.fromWebContents(event.sender);
1635
+ console.log("[Agent] Received resume request:", { threadId, command });
1636
+ if (!window) {
1637
+ console.error("[Agent] No window found for resume");
1638
+ return;
1639
+ }
1640
+ const thread = getThread(threadId);
1641
+ const metadata = thread?.metadata ? JSON.parse(thread.metadata) : {};
1642
+ const workspacePath = metadata.workspacePath;
1643
+ const currentModel = metadata.currentModel;
1644
+ if (!workspacePath) {
1645
+ window.webContents.send(channel, {
1646
+ type: "error",
1647
+ error: "Workspace path is required"
1648
+ });
1649
+ return;
1650
+ }
1651
+ const existingController = activeRuns.get(threadId);
1652
+ if (existingController) {
1653
+ existingController.abort();
1654
+ activeRuns.delete(threadId);
1655
+ }
1656
+ const abortController = new AbortController();
1657
+ activeRuns.set(threadId, abortController);
1658
+ try {
1659
+ const agent = await createAgentRuntime({
1660
+ threadId,
1661
+ workspacePath,
1662
+ modelId: currentModel
1663
+ });
1664
+ const config = {
1665
+ configurable: { thread_id: threadId },
1666
+ signal: abortController.signal,
1667
+ streamMode: ["messages", "values"],
1668
+ recursionLimit: 1e3
1669
+ };
1670
+ const decisionType = command?.resume?.decision || "approve";
1671
+ const resumeValue = { decisions: [{ type: decisionType }] };
1672
+ const stream = await agent.stream(new langgraph.Command({ resume: resumeValue }), config);
1673
+ for await (const chunk of stream) {
1674
+ if (abortController.signal.aborted) break;
1675
+ const [mode, data] = chunk;
1676
+ window.webContents.send(channel, {
1677
+ type: "stream",
1678
+ mode,
1679
+ data: JSON.parse(JSON.stringify(data))
1680
+ });
1681
+ }
1682
+ if (!abortController.signal.aborted) {
1683
+ window.webContents.send(channel, { type: "done" });
1684
+ }
1685
+ } catch (error) {
1686
+ const isAbortError = error instanceof Error && (error.name === "AbortError" || error.message.includes("aborted") || error.message.includes("Controller is already closed"));
1687
+ if (!isAbortError) {
1688
+ console.error("[Agent] Resume error:", error);
1689
+ window.webContents.send(channel, {
1690
+ type: "error",
1691
+ error: error instanceof Error ? error.message : "Unknown error"
1692
+ });
1693
+ }
1694
+ } finally {
1695
+ activeRuns.delete(threadId);
1696
+ }
1697
+ }
1698
+ );
1699
+ ipcMain.on(
1700
+ "agent:interrupt",
1701
+ async (event, { threadId, decision }) => {
1702
+ const channel = `agent:stream:${threadId}`;
1703
+ const window = electron.BrowserWindow.fromWebContents(event.sender);
1704
+ if (!window) {
1705
+ console.error("[Agent] No window found for interrupt response");
1706
+ return;
1707
+ }
1708
+ const thread = getThread(threadId);
1709
+ const metadata = thread?.metadata ? JSON.parse(thread.metadata) : {};
1710
+ const workspacePath = metadata.workspacePath;
1711
+ const currentModel = metadata.currentModel;
1712
+ if (!workspacePath) {
1713
+ window.webContents.send(channel, {
1714
+ type: "error",
1715
+ error: "Workspace path is required"
1716
+ });
1717
+ return;
1718
+ }
1719
+ const existingController = activeRuns.get(threadId);
1720
+ if (existingController) {
1721
+ existingController.abort();
1722
+ activeRuns.delete(threadId);
1723
+ }
1724
+ const abortController = new AbortController();
1725
+ activeRuns.set(threadId, abortController);
1726
+ try {
1727
+ const agent = await createAgentRuntime({
1728
+ threadId,
1729
+ workspacePath,
1730
+ modelId: currentModel
1731
+ });
1732
+ const config = {
1733
+ configurable: { thread_id: threadId },
1734
+ signal: abortController.signal,
1735
+ streamMode: ["messages", "values"],
1736
+ recursionLimit: 1e3
1737
+ };
1738
+ if (decision.type === "approve") {
1739
+ const stream = await agent.stream(null, config);
1740
+ for await (const chunk of stream) {
1741
+ if (abortController.signal.aborted) break;
1742
+ const [mode, data] = chunk;
1743
+ window.webContents.send(channel, {
1744
+ type: "stream",
1745
+ mode,
1746
+ data: JSON.parse(JSON.stringify(data))
1747
+ });
1748
+ }
1749
+ if (!abortController.signal.aborted) {
1750
+ window.webContents.send(channel, { type: "done" });
1751
+ }
1752
+ } else if (decision.type === "reject") {
1753
+ window.webContents.send(channel, { type: "done" });
1754
+ }
1755
+ } catch (error) {
1756
+ const isAbortError = error instanceof Error && (error.name === "AbortError" || error.message.includes("aborted") || error.message.includes("Controller is already closed"));
1757
+ if (!isAbortError) {
1758
+ console.error("[Agent] Interrupt error:", error);
1759
+ window.webContents.send(channel, {
1760
+ type: "error",
1761
+ error: error instanceof Error ? error.message : "Unknown error"
1762
+ });
1763
+ }
1764
+ } finally {
1765
+ activeRuns.delete(threadId);
1766
+ }
1767
+ }
1768
+ );
1769
+ ipcMain.handle("agent:cancel", async (_event, { threadId }) => {
1770
+ const controller = activeRuns.get(threadId);
1771
+ if (controller) {
1772
+ controller.abort();
1773
+ activeRuns.delete(threadId);
1774
+ }
1775
+ });
1776
+ }
1777
+ function generateTitle(message) {
1778
+ const cleaned = message.trim().replace(/\s+/g, " ");
1779
+ if (cleaned.length <= 50) {
1780
+ return cleaned;
1781
+ }
1782
+ const sentenceMatch = cleaned.match(/^[^.!?]+[.!?]/);
1783
+ if (sentenceMatch && sentenceMatch[0].length <= 60) {
1784
+ return sentenceMatch[0].trim();
1785
+ }
1786
+ const words = cleaned.split(/\s+/);
1787
+ let title = "";
1788
+ for (const word of words) {
1789
+ if ((title + " " + word).length > 47) {
1790
+ break;
1791
+ }
1792
+ title = title ? title + " " + word : word;
1793
+ }
1794
+ if (words.join(" ").length > title.length) {
1795
+ title += "...";
1796
+ }
1797
+ return title;
1798
+ }
1799
+ function registerThreadHandlers(ipcMain) {
1800
+ ipcMain.handle("threads:list", async () => {
1801
+ const threads = getAllThreads();
1802
+ return threads.map((row) => ({
1803
+ thread_id: row.thread_id,
1804
+ created_at: new Date(row.created_at),
1805
+ updated_at: new Date(row.updated_at),
1806
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
1807
+ status: row.status,
1808
+ thread_values: row.thread_values ? JSON.parse(row.thread_values) : void 0,
1809
+ title: row.title
1810
+ }));
1811
+ });
1812
+ ipcMain.handle("threads:get", async (_event, threadId) => {
1813
+ const row = getThread(threadId);
1814
+ if (!row) return null;
1815
+ return {
1816
+ thread_id: row.thread_id,
1817
+ created_at: new Date(row.created_at),
1818
+ updated_at: new Date(row.updated_at),
1819
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
1820
+ status: row.status,
1821
+ thread_values: row.thread_values ? JSON.parse(row.thread_values) : void 0,
1822
+ title: row.title
1823
+ };
1824
+ });
1825
+ ipcMain.handle("threads:create", async (_event, metadata) => {
1826
+ const threadId = uuid.v4();
1827
+ const title = metadata?.title || `Thread ${(/* @__PURE__ */ new Date()).toLocaleDateString()}`;
1828
+ const thread = createThread(threadId, { ...metadata, title });
1829
+ return {
1830
+ thread_id: thread.thread_id,
1831
+ created_at: new Date(thread.created_at),
1832
+ updated_at: new Date(thread.updated_at),
1833
+ metadata: thread.metadata ? JSON.parse(thread.metadata) : void 0,
1834
+ status: thread.status,
1835
+ thread_values: thread.thread_values ? JSON.parse(thread.thread_values) : void 0,
1836
+ title
1837
+ };
1838
+ });
1839
+ ipcMain.handle(
1840
+ "threads:update",
1841
+ async (_event, { threadId, updates }) => {
1842
+ const updateData = {};
1843
+ if (updates.title !== void 0) updateData.title = updates.title;
1844
+ if (updates.status !== void 0) updateData.status = updates.status;
1845
+ if (updates.metadata !== void 0)
1846
+ updateData.metadata = JSON.stringify(updates.metadata);
1847
+ if (updates.thread_values !== void 0) updateData.thread_values = JSON.stringify(updates.thread_values);
1848
+ const row = updateThread(threadId, updateData);
1849
+ if (!row) throw new Error("Thread not found");
1850
+ return {
1851
+ thread_id: row.thread_id,
1852
+ created_at: new Date(row.created_at),
1853
+ updated_at: new Date(row.updated_at),
1854
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
1855
+ status: row.status,
1856
+ thread_values: row.thread_values ? JSON.parse(row.thread_values) : void 0,
1857
+ title: row.title
1858
+ };
1859
+ }
1860
+ );
1861
+ ipcMain.handle("threads:delete", async (_event, threadId) => {
1862
+ console.log("[Threads] Deleting thread:", threadId);
1863
+ deleteThread(threadId);
1864
+ console.log("[Threads] Deleted from metadata store");
1865
+ try {
1866
+ await closeCheckpointer(threadId);
1867
+ console.log("[Threads] Closed checkpointer");
1868
+ } catch (e) {
1869
+ console.warn("[Threads] Failed to close checkpointer:", e);
1870
+ }
1871
+ try {
1872
+ deleteThreadCheckpoint(threadId);
1873
+ console.log("[Threads] Deleted checkpoint file");
1874
+ } catch (e) {
1875
+ console.warn("[Threads] Failed to delete checkpoint file:", e);
1876
+ }
1877
+ });
1878
+ ipcMain.handle("threads:history", async (_event, threadId) => {
1879
+ try {
1880
+ const checkpointer = await getCheckpointer(threadId);
1881
+ const history = [];
1882
+ const config = { configurable: { thread_id: threadId } };
1883
+ for await (const checkpoint of checkpointer.list(config, { limit: 50 })) {
1884
+ history.push(checkpoint);
1885
+ }
1886
+ return history;
1887
+ } catch (e) {
1888
+ console.warn("Failed to get thread history:", e);
1889
+ return [];
1890
+ }
1891
+ });
1892
+ ipcMain.handle("threads:generateTitle", async (_event, message) => {
1893
+ return generateTitle(message);
1894
+ });
1895
+ }
1896
+ const originalConsoleError = console.error;
1897
+ console.error = (...args) => {
1898
+ const message = args.map((a) => String(a)).join(" ");
1899
+ if (message.includes("Controller is already closed") || message.includes("ERR_INVALID_STATE") || message.includes("StreamMessagesHandler") && message.includes("aborted")) {
1900
+ return;
1901
+ }
1902
+ originalConsoleError.apply(console, args);
1903
+ };
1904
+ process.on("uncaughtException", (error) => {
1905
+ if (error.message?.includes("Controller is already closed") || error.message?.includes("aborted")) {
1906
+ return;
1907
+ }
1908
+ originalConsoleError("Uncaught exception:", error);
1909
+ });
1910
+ process.on("unhandledRejection", (reason) => {
1911
+ const message = reason instanceof Error ? reason.message : String(reason);
1912
+ if (message?.includes("Controller is already closed") || message?.includes("aborted")) {
1913
+ return;
1914
+ }
1915
+ originalConsoleError("Unhandled rejection:", reason);
1916
+ });
1917
+ let mainWindow = null;
1918
+ const isDev = !electron.app.isPackaged;
1919
+ function createWindow() {
1920
+ mainWindow = new electron.BrowserWindow({
1921
+ width: 1440,
1922
+ height: 900,
1923
+ minWidth: 1200,
1924
+ minHeight: 700,
1925
+ show: false,
1926
+ backgroundColor: "#0D0D0F",
1927
+ titleBarStyle: "hiddenInset",
1928
+ trafficLightPosition: { x: 16, y: 16 },
1929
+ webPreferences: {
1930
+ preload: path.join(__dirname, "../preload/index.js"),
1931
+ sandbox: false
1932
+ }
1933
+ });
1934
+ mainWindow.on("ready-to-show", () => {
1935
+ mainWindow?.show();
1936
+ });
1937
+ mainWindow.webContents.setWindowOpenHandler((details) => {
1938
+ electron.shell.openExternal(details.url);
1939
+ return { action: "deny" };
1940
+ });
1941
+ if (isDev && process.env["ELECTRON_RENDERER_URL"]) {
1942
+ mainWindow.loadURL(process.env["ELECTRON_RENDERER_URL"]);
1943
+ } else {
1944
+ mainWindow.loadFile(path.join(__dirname, "../renderer/index.html"));
1945
+ }
1946
+ mainWindow.on("closed", () => {
1947
+ mainWindow = null;
1948
+ });
1949
+ }
1950
+ electron.app.whenReady().then(async () => {
1951
+ if (process.platform === "win32") {
1952
+ electron.app.setAppUserModelId(isDev ? process.execPath : "com.langchain.openwork");
1953
+ }
1954
+ if (process.platform === "darwin" && electron.app.dock) {
1955
+ const iconPath = path.join(__dirname, "../../resources/icon.png");
1956
+ try {
1957
+ const icon = electron.nativeImage.createFromPath(iconPath);
1958
+ if (!icon.isEmpty()) {
1959
+ electron.app.dock.setIcon(icon);
1960
+ }
1961
+ } catch {
1962
+ }
1963
+ }
1964
+ if (isDev) {
1965
+ electron.app.on("browser-window-created", (_, window) => {
1966
+ window.webContents.on("before-input-event", (event, input) => {
1967
+ if (input.key === "F12") {
1968
+ window.webContents.toggleDevTools();
1969
+ event.preventDefault();
1970
+ }
1971
+ });
1972
+ });
1973
+ }
1974
+ await initializeDatabase();
1975
+ registerAgentHandlers(electron.ipcMain);
1976
+ registerThreadHandlers(electron.ipcMain);
1977
+ registerModelHandlers(electron.ipcMain);
1978
+ createWindow();
1979
+ electron.app.on("activate", () => {
1980
+ if (electron.BrowserWindow.getAllWindows().length === 0) {
1981
+ createWindow();
1982
+ }
1983
+ });
1984
+ });
1985
+ electron.app.on("window-all-closed", () => {
1986
+ if (process.platform !== "darwin") {
1987
+ electron.app.quit();
1988
+ }
1989
+ });