sparkecoder 0.1.4 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,17 +1,32 @@
1
1
  var __defProp = Object.defineProperty;
2
- var __getOwnPropNames = Object.getOwnPropertyNames;
3
- var __esm = (fn, res) => function __init() {
4
- return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
5
- };
6
2
  var __export = (target, all) => {
7
3
  for (var name in all)
8
4
  __defProp(target, name, { get: all[name], enumerable: true });
9
5
  };
10
6
 
7
+ // src/agent/index.ts
8
+ import {
9
+ streamText,
10
+ generateText as generateText2,
11
+ tool as tool6,
12
+ stepCountIs
13
+ } from "ai";
14
+ import { gateway as gateway2 } from "@ai-sdk/gateway";
15
+ import { z as z7 } from "zod";
16
+ import { nanoid as nanoid3 } from "nanoid";
17
+
18
+ // src/db/index.ts
19
+ import Database from "better-sqlite3";
20
+ import { drizzle } from "drizzle-orm/better-sqlite3";
21
+ import { eq, desc, and, sql } from "drizzle-orm";
22
+ import { nanoid } from "nanoid";
23
+
11
24
  // src/db/schema.ts
12
25
  var schema_exports = {};
13
26
  __export(schema_exports, {
14
27
  activeStreams: () => activeStreams,
28
+ checkpoints: () => checkpoints,
29
+ fileBackups: () => fileBackups,
15
30
  loadedSkills: () => loadedSkills,
16
31
  messages: () => messages,
17
32
  sessions: () => sessions,
@@ -20,107 +35,108 @@ __export(schema_exports, {
20
35
  toolExecutions: () => toolExecutions
21
36
  });
22
37
  import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
23
- var sessions, messages, toolExecutions, todoItems, loadedSkills, terminals, activeStreams;
24
- var init_schema = __esm({
25
- "src/db/schema.ts"() {
26
- "use strict";
27
- sessions = sqliteTable("sessions", {
28
- id: text("id").primaryKey(),
29
- name: text("name"),
30
- workingDirectory: text("working_directory").notNull(),
31
- model: text("model").notNull(),
32
- status: text("status", { enum: ["active", "waiting", "completed", "error"] }).notNull().default("active"),
33
- config: text("config", { mode: "json" }).$type(),
34
- createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
35
- updatedAt: integer("updated_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
36
- });
37
- messages = sqliteTable("messages", {
38
- id: text("id").primaryKey(),
39
- sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
40
- // Store the entire ModelMessage as JSON (role + content)
41
- modelMessage: text("model_message", { mode: "json" }).$type().notNull(),
42
- // Sequence number within session to maintain exact ordering
43
- sequence: integer("sequence").notNull().default(0),
44
- createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
45
- });
46
- toolExecutions = sqliteTable("tool_executions", {
47
- id: text("id").primaryKey(),
48
- sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
49
- messageId: text("message_id").references(() => messages.id, { onDelete: "cascade" }),
50
- toolName: text("tool_name").notNull(),
51
- toolCallId: text("tool_call_id").notNull(),
52
- input: text("input", { mode: "json" }),
53
- output: text("output", { mode: "json" }),
54
- status: text("status", { enum: ["pending", "approved", "rejected", "completed", "error"] }).notNull().default("pending"),
55
- requiresApproval: integer("requires_approval", { mode: "boolean" }).notNull().default(false),
56
- error: text("error"),
57
- startedAt: integer("started_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
58
- completedAt: integer("completed_at", { mode: "timestamp" })
59
- });
60
- todoItems = sqliteTable("todo_items", {
61
- id: text("id").primaryKey(),
62
- sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
63
- content: text("content").notNull(),
64
- status: text("status", { enum: ["pending", "in_progress", "completed", "cancelled"] }).notNull().default("pending"),
65
- order: integer("order").notNull().default(0),
66
- createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
67
- updatedAt: integer("updated_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
68
- });
69
- loadedSkills = sqliteTable("loaded_skills", {
70
- id: text("id").primaryKey(),
71
- sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
72
- skillName: text("skill_name").notNull(),
73
- loadedAt: integer("loaded_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
74
- });
75
- terminals = sqliteTable("terminals", {
76
- id: text("id").primaryKey(),
77
- sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
78
- name: text("name"),
79
- // Optional friendly name (e.g., "dev-server")
80
- command: text("command").notNull(),
81
- // The command that was run
82
- cwd: text("cwd").notNull(),
83
- // Working directory
84
- pid: integer("pid"),
85
- // Process ID (null if not running)
86
- status: text("status", { enum: ["running", "stopped", "error"] }).notNull().default("running"),
87
- exitCode: integer("exit_code"),
88
- // Exit code if stopped
89
- error: text("error"),
90
- // Error message if status is 'error'
91
- createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
92
- stoppedAt: integer("stopped_at", { mode: "timestamp" })
93
- });
94
- activeStreams = sqliteTable("active_streams", {
95
- id: text("id").primaryKey(),
96
- sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
97
- streamId: text("stream_id").notNull().unique(),
98
- // Unique stream identifier
99
- status: text("status", { enum: ["active", "finished", "error"] }).notNull().default("active"),
100
- createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
101
- finishedAt: integer("finished_at", { mode: "timestamp" })
102
- });
103
- }
38
+ var sessions = sqliteTable("sessions", {
39
+ id: text("id").primaryKey(),
40
+ name: text("name"),
41
+ workingDirectory: text("working_directory").notNull(),
42
+ model: text("model").notNull(),
43
+ status: text("status", { enum: ["active", "waiting", "completed", "error"] }).notNull().default("active"),
44
+ config: text("config", { mode: "json" }).$type(),
45
+ createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
46
+ updatedAt: integer("updated_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
47
+ });
48
+ var messages = sqliteTable("messages", {
49
+ id: text("id").primaryKey(),
50
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
51
+ // Store the entire ModelMessage as JSON (role + content)
52
+ modelMessage: text("model_message", { mode: "json" }).$type().notNull(),
53
+ // Sequence number within session to maintain exact ordering
54
+ sequence: integer("sequence").notNull().default(0),
55
+ createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
56
+ });
57
+ var toolExecutions = sqliteTable("tool_executions", {
58
+ id: text("id").primaryKey(),
59
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
60
+ messageId: text("message_id").references(() => messages.id, { onDelete: "cascade" }),
61
+ toolName: text("tool_name").notNull(),
62
+ toolCallId: text("tool_call_id").notNull(),
63
+ input: text("input", { mode: "json" }),
64
+ output: text("output", { mode: "json" }),
65
+ status: text("status", { enum: ["pending", "approved", "rejected", "completed", "error"] }).notNull().default("pending"),
66
+ requiresApproval: integer("requires_approval", { mode: "boolean" }).notNull().default(false),
67
+ error: text("error"),
68
+ startedAt: integer("started_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
69
+ completedAt: integer("completed_at", { mode: "timestamp" })
70
+ });
71
+ var todoItems = sqliteTable("todo_items", {
72
+ id: text("id").primaryKey(),
73
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
74
+ content: text("content").notNull(),
75
+ status: text("status", { enum: ["pending", "in_progress", "completed", "cancelled"] }).notNull().default("pending"),
76
+ order: integer("order").notNull().default(0),
77
+ createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
78
+ updatedAt: integer("updated_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
79
+ });
80
+ var loadedSkills = sqliteTable("loaded_skills", {
81
+ id: text("id").primaryKey(),
82
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
83
+ skillName: text("skill_name").notNull(),
84
+ loadedAt: integer("loaded_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
85
+ });
86
+ var terminals = sqliteTable("terminals", {
87
+ id: text("id").primaryKey(),
88
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
89
+ name: text("name"),
90
+ // Optional friendly name (e.g., "dev-server")
91
+ command: text("command").notNull(),
92
+ // The command that was run
93
+ cwd: text("cwd").notNull(),
94
+ // Working directory
95
+ pid: integer("pid"),
96
+ // Process ID (null if not running)
97
+ status: text("status", { enum: ["running", "stopped", "error"] }).notNull().default("running"),
98
+ exitCode: integer("exit_code"),
99
+ // Exit code if stopped
100
+ error: text("error"),
101
+ // Error message if status is 'error'
102
+ createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
103
+ stoppedAt: integer("stopped_at", { mode: "timestamp" })
104
+ });
105
+ var activeStreams = sqliteTable("active_streams", {
106
+ id: text("id").primaryKey(),
107
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
108
+ streamId: text("stream_id").notNull().unique(),
109
+ // Unique stream identifier
110
+ status: text("status", { enum: ["active", "finished", "error"] }).notNull().default("active"),
111
+ createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date()),
112
+ finishedAt: integer("finished_at", { mode: "timestamp" })
113
+ });
114
+ var checkpoints = sqliteTable("checkpoints", {
115
+ id: text("id").primaryKey(),
116
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
117
+ // The message sequence number this checkpoint was created BEFORE
118
+ // (i.e., the state before this user message was processed)
119
+ messageSequence: integer("message_sequence").notNull(),
120
+ // Optional git commit hash if in a git repo
121
+ gitHead: text("git_head"),
122
+ createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
123
+ });
124
+ var fileBackups = sqliteTable("file_backups", {
125
+ id: text("id").primaryKey(),
126
+ checkpointId: text("checkpoint_id").notNull().references(() => checkpoints.id, { onDelete: "cascade" }),
127
+ sessionId: text("session_id").notNull().references(() => sessions.id, { onDelete: "cascade" }),
128
+ // Relative path from working directory
129
+ filePath: text("file_path").notNull(),
130
+ // Original content (null means file didn't exist before)
131
+ originalContent: text("original_content"),
132
+ // Whether the file existed before this checkpoint
133
+ existed: integer("existed", { mode: "boolean" }).notNull().default(true),
134
+ createdAt: integer("created_at", { mode: "timestamp" }).notNull().$defaultFn(() => /* @__PURE__ */ new Date())
104
135
  });
105
136
 
106
137
  // src/db/index.ts
107
- var db_exports = {};
108
- __export(db_exports, {
109
- activeStreamQueries: () => activeStreamQueries,
110
- closeDatabase: () => closeDatabase,
111
- getDb: () => getDb,
112
- initDatabase: () => initDatabase,
113
- messageQueries: () => messageQueries,
114
- sessionQueries: () => sessionQueries,
115
- skillQueries: () => skillQueries,
116
- terminalQueries: () => terminalQueries,
117
- todoQueries: () => todoQueries,
118
- toolExecutionQueries: () => toolExecutionQueries
119
- });
120
- import Database from "better-sqlite3";
121
- import { drizzle } from "drizzle-orm/better-sqlite3";
122
- import { eq, desc, and, sql } from "drizzle-orm";
123
- import { nanoid } from "nanoid";
138
+ var db = null;
139
+ var sqlite = null;
124
140
  function initDatabase(dbPath) {
125
141
  sqlite = new Database(dbPath);
126
142
  sqlite.pragma("journal_mode = WAL");
@@ -201,12 +217,35 @@ function initDatabase(dbPath) {
201
217
  finished_at INTEGER
202
218
  );
203
219
 
220
+ -- Checkpoints table - created before each user message
221
+ CREATE TABLE IF NOT EXISTS checkpoints (
222
+ id TEXT PRIMARY KEY,
223
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
224
+ message_sequence INTEGER NOT NULL,
225
+ git_head TEXT,
226
+ created_at INTEGER NOT NULL
227
+ );
228
+
229
+ -- File backups table - stores original file content
230
+ CREATE TABLE IF NOT EXISTS file_backups (
231
+ id TEXT PRIMARY KEY,
232
+ checkpoint_id TEXT NOT NULL REFERENCES checkpoints(id) ON DELETE CASCADE,
233
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
234
+ file_path TEXT NOT NULL,
235
+ original_content TEXT,
236
+ existed INTEGER NOT NULL DEFAULT 1,
237
+ created_at INTEGER NOT NULL
238
+ );
239
+
204
240
  CREATE INDEX IF NOT EXISTS idx_messages_session ON messages(session_id);
205
241
  CREATE INDEX IF NOT EXISTS idx_tool_executions_session ON tool_executions(session_id);
206
242
  CREATE INDEX IF NOT EXISTS idx_todo_items_session ON todo_items(session_id);
207
243
  CREATE INDEX IF NOT EXISTS idx_loaded_skills_session ON loaded_skills(session_id);
208
244
  CREATE INDEX IF NOT EXISTS idx_terminals_session ON terminals(session_id);
209
245
  CREATE INDEX IF NOT EXISTS idx_active_streams_session ON active_streams(session_id);
246
+ CREATE INDEX IF NOT EXISTS idx_checkpoints_session ON checkpoints(session_id);
247
+ CREATE INDEX IF NOT EXISTS idx_file_backups_checkpoint ON file_backups(checkpoint_id);
248
+ CREATE INDEX IF NOT EXISTS idx_file_backups_session ON file_backups(session_id);
210
249
  `);
211
250
  return db;
212
251
  }
@@ -223,313 +262,383 @@ function closeDatabase() {
223
262
  db = null;
224
263
  }
225
264
  }
226
- var db, sqlite, sessionQueries, messageQueries, toolExecutionQueries, todoQueries, skillQueries, terminalQueries, activeStreamQueries;
227
- var init_db = __esm({
228
- "src/db/index.ts"() {
229
- "use strict";
230
- init_schema();
231
- db = null;
232
- sqlite = null;
233
- sessionQueries = {
234
- create(data) {
235
- const id = nanoid();
236
- const now = /* @__PURE__ */ new Date();
237
- const result = getDb().insert(sessions).values({
238
- id,
239
- ...data,
240
- createdAt: now,
241
- updatedAt: now
242
- }).returning().get();
243
- return result;
244
- },
245
- getById(id) {
246
- return getDb().select().from(sessions).where(eq(sessions.id, id)).get();
247
- },
248
- list(limit = 50, offset = 0) {
249
- return getDb().select().from(sessions).orderBy(desc(sessions.createdAt)).limit(limit).offset(offset).all();
250
- },
251
- updateStatus(id, status) {
252
- return getDb().update(sessions).set({ status, updatedAt: /* @__PURE__ */ new Date() }).where(eq(sessions.id, id)).returning().get();
253
- },
254
- updateModel(id, model) {
255
- return getDb().update(sessions).set({ model, updatedAt: /* @__PURE__ */ new Date() }).where(eq(sessions.id, id)).returning().get();
256
- },
257
- update(id, updates) {
258
- return getDb().update(sessions).set({ ...updates, updatedAt: /* @__PURE__ */ new Date() }).where(eq(sessions.id, id)).returning().get();
259
- },
260
- delete(id) {
261
- const result = getDb().delete(sessions).where(eq(sessions.id, id)).run();
262
- return result.changes > 0;
263
- }
264
- };
265
- messageQueries = {
266
- /**
267
- * Get the next sequence number for a session
268
- */
269
- getNextSequence(sessionId) {
270
- const result = getDb().select({ maxSeq: sql`COALESCE(MAX(sequence), -1)` }).from(messages).where(eq(messages.sessionId, sessionId)).get();
271
- return (result?.maxSeq ?? -1) + 1;
272
- },
273
- /**
274
- * Create a single message from a ModelMessage
275
- */
276
- create(sessionId, modelMessage) {
277
- const id = nanoid();
278
- const sequence = this.getNextSequence(sessionId);
279
- const result = getDb().insert(messages).values({
280
- id,
281
- sessionId,
282
- modelMessage,
283
- sequence,
284
- createdAt: /* @__PURE__ */ new Date()
285
- }).returning().get();
286
- return result;
287
- },
288
- /**
289
- * Add multiple ModelMessages at once (from response.messages)
290
- * Maintains insertion order via sequence numbers
291
- */
292
- addMany(sessionId, modelMessages) {
293
- const results = [];
294
- let sequence = this.getNextSequence(sessionId);
295
- for (const msg of modelMessages) {
296
- const id = nanoid();
297
- const result = getDb().insert(messages).values({
298
- id,
299
- sessionId,
300
- modelMessage: msg,
301
- sequence,
302
- createdAt: /* @__PURE__ */ new Date()
303
- }).returning().get();
304
- results.push(result);
305
- sequence++;
306
- }
307
- return results;
308
- },
309
- /**
310
- * Get all messages for a session as ModelMessage[]
311
- * Ordered by sequence to maintain exact insertion order
312
- */
313
- getBySession(sessionId) {
314
- return getDb().select().from(messages).where(eq(messages.sessionId, sessionId)).orderBy(messages.sequence).all();
315
- },
316
- /**
317
- * Get ModelMessages directly (for passing to AI SDK)
318
- */
319
- getModelMessages(sessionId) {
320
- const messages2 = this.getBySession(sessionId);
321
- return messages2.map((m) => m.modelMessage);
322
- },
323
- getRecentBySession(sessionId, limit = 50) {
324
- return getDb().select().from(messages).where(eq(messages.sessionId, sessionId)).orderBy(desc(messages.sequence)).limit(limit).all().reverse();
325
- },
326
- countBySession(sessionId) {
327
- const result = getDb().select({ count: sql`count(*)` }).from(messages).where(eq(messages.sessionId, sessionId)).get();
328
- return result?.count ?? 0;
329
- },
330
- deleteBySession(sessionId) {
331
- const result = getDb().delete(messages).where(eq(messages.sessionId, sessionId)).run();
332
- return result.changes;
333
- }
334
- };
335
- toolExecutionQueries = {
336
- create(data) {
337
- const id = nanoid();
338
- const result = getDb().insert(toolExecutions).values({
339
- id,
340
- ...data,
341
- startedAt: /* @__PURE__ */ new Date()
342
- }).returning().get();
343
- return result;
344
- },
345
- getById(id) {
346
- return getDb().select().from(toolExecutions).where(eq(toolExecutions.id, id)).get();
347
- },
348
- getByToolCallId(toolCallId) {
349
- return getDb().select().from(toolExecutions).where(eq(toolExecutions.toolCallId, toolCallId)).get();
350
- },
351
- getPendingApprovals(sessionId) {
352
- return getDb().select().from(toolExecutions).where(
353
- and(
354
- eq(toolExecutions.sessionId, sessionId),
355
- eq(toolExecutions.status, "pending"),
356
- eq(toolExecutions.requiresApproval, true)
357
- )
358
- ).all();
359
- },
360
- approve(id) {
361
- return getDb().update(toolExecutions).set({ status: "approved" }).where(eq(toolExecutions.id, id)).returning().get();
362
- },
363
- reject(id) {
364
- return getDb().update(toolExecutions).set({ status: "rejected" }).where(eq(toolExecutions.id, id)).returning().get();
365
- },
366
- complete(id, output, error) {
367
- return getDb().update(toolExecutions).set({
368
- status: error ? "error" : "completed",
369
- output,
370
- error,
371
- completedAt: /* @__PURE__ */ new Date()
372
- }).where(eq(toolExecutions.id, id)).returning().get();
373
- },
374
- getBySession(sessionId) {
375
- return getDb().select().from(toolExecutions).where(eq(toolExecutions.sessionId, sessionId)).orderBy(toolExecutions.startedAt).all();
376
- }
377
- };
378
- todoQueries = {
379
- create(data) {
380
- const id = nanoid();
381
- const now = /* @__PURE__ */ new Date();
382
- const result = getDb().insert(todoItems).values({
383
- id,
384
- ...data,
385
- createdAt: now,
386
- updatedAt: now
387
- }).returning().get();
388
- return result;
389
- },
390
- createMany(sessionId, items) {
391
- const now = /* @__PURE__ */ new Date();
392
- const values = items.map((item, index) => ({
393
- id: nanoid(),
394
- sessionId,
395
- content: item.content,
396
- order: item.order ?? index,
397
- createdAt: now,
398
- updatedAt: now
399
- }));
400
- return getDb().insert(todoItems).values(values).returning().all();
401
- },
402
- getBySession(sessionId) {
403
- return getDb().select().from(todoItems).where(eq(todoItems.sessionId, sessionId)).orderBy(todoItems.order).all();
404
- },
405
- updateStatus(id, status) {
406
- return getDb().update(todoItems).set({ status, updatedAt: /* @__PURE__ */ new Date() }).where(eq(todoItems.id, id)).returning().get();
407
- },
408
- delete(id) {
409
- const result = getDb().delete(todoItems).where(eq(todoItems.id, id)).run();
410
- return result.changes > 0;
411
- },
412
- clearSession(sessionId) {
413
- const result = getDb().delete(todoItems).where(eq(todoItems.sessionId, sessionId)).run();
414
- return result.changes;
415
- }
416
- };
417
- skillQueries = {
418
- load(sessionId, skillName) {
419
- const id = nanoid();
420
- const result = getDb().insert(loadedSkills).values({
421
- id,
422
- sessionId,
423
- skillName,
424
- loadedAt: /* @__PURE__ */ new Date()
425
- }).returning().get();
426
- return result;
427
- },
428
- getBySession(sessionId) {
429
- return getDb().select().from(loadedSkills).where(eq(loadedSkills.sessionId, sessionId)).orderBy(loadedSkills.loadedAt).all();
430
- },
431
- isLoaded(sessionId, skillName) {
432
- const result = getDb().select().from(loadedSkills).where(
433
- and(
434
- eq(loadedSkills.sessionId, sessionId),
435
- eq(loadedSkills.skillName, skillName)
436
- )
437
- ).get();
438
- return !!result;
439
- }
440
- };
441
- terminalQueries = {
442
- create(data) {
443
- const id = nanoid();
444
- const result = getDb().insert(terminals).values({
445
- id,
446
- ...data,
447
- createdAt: /* @__PURE__ */ new Date()
448
- }).returning().get();
449
- return result;
450
- },
451
- getById(id) {
452
- return getDb().select().from(terminals).where(eq(terminals.id, id)).get();
453
- },
454
- getBySession(sessionId) {
455
- return getDb().select().from(terminals).where(eq(terminals.sessionId, sessionId)).orderBy(desc(terminals.createdAt)).all();
456
- },
457
- getRunning(sessionId) {
458
- return getDb().select().from(terminals).where(
459
- and(
460
- eq(terminals.sessionId, sessionId),
461
- eq(terminals.status, "running")
462
- )
463
- ).all();
464
- },
465
- updateStatus(id, status, exitCode, error) {
466
- return getDb().update(terminals).set({
467
- status,
468
- exitCode,
469
- error,
470
- stoppedAt: status !== "running" ? /* @__PURE__ */ new Date() : void 0
471
- }).where(eq(terminals.id, id)).returning().get();
472
- },
473
- updatePid(id, pid) {
474
- return getDb().update(terminals).set({ pid }).where(eq(terminals.id, id)).returning().get();
475
- },
476
- delete(id) {
477
- const result = getDb().delete(terminals).where(eq(terminals.id, id)).run();
478
- return result.changes > 0;
479
- },
480
- deleteBySession(sessionId) {
481
- const result = getDb().delete(terminals).where(eq(terminals.sessionId, sessionId)).run();
482
- return result.changes;
483
- }
484
- };
485
- activeStreamQueries = {
486
- create(sessionId, streamId) {
487
- const id = nanoid();
488
- const result = getDb().insert(activeStreams).values({
489
- id,
490
- sessionId,
491
- streamId,
492
- status: "active",
493
- createdAt: /* @__PURE__ */ new Date()
494
- }).returning().get();
495
- return result;
496
- },
497
- getBySessionId(sessionId) {
498
- return getDb().select().from(activeStreams).where(
499
- and(
500
- eq(activeStreams.sessionId, sessionId),
501
- eq(activeStreams.status, "active")
502
- )
503
- ).get();
504
- },
505
- getByStreamId(streamId) {
506
- return getDb().select().from(activeStreams).where(eq(activeStreams.streamId, streamId)).get();
507
- },
508
- finish(streamId) {
509
- return getDb().update(activeStreams).set({ status: "finished", finishedAt: /* @__PURE__ */ new Date() }).where(eq(activeStreams.streamId, streamId)).returning().get();
510
- },
511
- markError(streamId) {
512
- return getDb().update(activeStreams).set({ status: "error", finishedAt: /* @__PURE__ */ new Date() }).where(eq(activeStreams.streamId, streamId)).returning().get();
513
- },
514
- deleteBySession(sessionId) {
515
- const result = getDb().delete(activeStreams).where(eq(activeStreams.sessionId, sessionId)).run();
516
- return result.changes;
517
- }
518
- };
265
+ var sessionQueries = {
266
+ create(data) {
267
+ const id = nanoid();
268
+ const now = /* @__PURE__ */ new Date();
269
+ const result = getDb().insert(sessions).values({
270
+ id,
271
+ ...data,
272
+ createdAt: now,
273
+ updatedAt: now
274
+ }).returning().get();
275
+ return result;
276
+ },
277
+ getById(id) {
278
+ return getDb().select().from(sessions).where(eq(sessions.id, id)).get();
279
+ },
280
+ list(limit = 50, offset = 0) {
281
+ return getDb().select().from(sessions).orderBy(desc(sessions.createdAt)).limit(limit).offset(offset).all();
282
+ },
283
+ updateStatus(id, status) {
284
+ return getDb().update(sessions).set({ status, updatedAt: /* @__PURE__ */ new Date() }).where(eq(sessions.id, id)).returning().get();
285
+ },
286
+ updateModel(id, model) {
287
+ return getDb().update(sessions).set({ model, updatedAt: /* @__PURE__ */ new Date() }).where(eq(sessions.id, id)).returning().get();
288
+ },
289
+ update(id, updates) {
290
+ return getDb().update(sessions).set({ ...updates, updatedAt: /* @__PURE__ */ new Date() }).where(eq(sessions.id, id)).returning().get();
291
+ },
292
+ delete(id) {
293
+ const result = getDb().delete(sessions).where(eq(sessions.id, id)).run();
294
+ return result.changes > 0;
519
295
  }
520
- });
521
-
522
- // src/agent/index.ts
523
- init_db();
524
- import {
525
- streamText,
526
- generateText as generateText2,
527
- tool as tool6,
528
- stepCountIs
529
- } from "ai";
530
- import { gateway as gateway2 } from "@ai-sdk/gateway";
531
- import { z as z7 } from "zod";
532
- import { nanoid as nanoid3 } from "nanoid";
296
+ };
297
+ var messageQueries = {
298
+ /**
299
+ * Get the next sequence number for a session
300
+ */
301
+ getNextSequence(sessionId) {
302
+ const result = getDb().select({ maxSeq: sql`COALESCE(MAX(sequence), -1)` }).from(messages).where(eq(messages.sessionId, sessionId)).get();
303
+ return (result?.maxSeq ?? -1) + 1;
304
+ },
305
+ /**
306
+ * Create a single message from a ModelMessage
307
+ */
308
+ create(sessionId, modelMessage) {
309
+ const id = nanoid();
310
+ const sequence = this.getNextSequence(sessionId);
311
+ const result = getDb().insert(messages).values({
312
+ id,
313
+ sessionId,
314
+ modelMessage,
315
+ sequence,
316
+ createdAt: /* @__PURE__ */ new Date()
317
+ }).returning().get();
318
+ return result;
319
+ },
320
+ /**
321
+ * Add multiple ModelMessages at once (from response.messages)
322
+ * Maintains insertion order via sequence numbers
323
+ */
324
+ addMany(sessionId, modelMessages) {
325
+ const results = [];
326
+ let sequence = this.getNextSequence(sessionId);
327
+ for (const msg of modelMessages) {
328
+ const id = nanoid();
329
+ const result = getDb().insert(messages).values({
330
+ id,
331
+ sessionId,
332
+ modelMessage: msg,
333
+ sequence,
334
+ createdAt: /* @__PURE__ */ new Date()
335
+ }).returning().get();
336
+ results.push(result);
337
+ sequence++;
338
+ }
339
+ return results;
340
+ },
341
+ /**
342
+ * Get all messages for a session as ModelMessage[]
343
+ * Ordered by sequence to maintain exact insertion order
344
+ */
345
+ getBySession(sessionId) {
346
+ return getDb().select().from(messages).where(eq(messages.sessionId, sessionId)).orderBy(messages.sequence).all();
347
+ },
348
+ /**
349
+ * Get ModelMessages directly (for passing to AI SDK)
350
+ */
351
+ getModelMessages(sessionId) {
352
+ const messages2 = this.getBySession(sessionId);
353
+ return messages2.map((m) => m.modelMessage);
354
+ },
355
+ getRecentBySession(sessionId, limit = 50) {
356
+ return getDb().select().from(messages).where(eq(messages.sessionId, sessionId)).orderBy(desc(messages.sequence)).limit(limit).all().reverse();
357
+ },
358
+ countBySession(sessionId) {
359
+ const result = getDb().select({ count: sql`count(*)` }).from(messages).where(eq(messages.sessionId, sessionId)).get();
360
+ return result?.count ?? 0;
361
+ },
362
+ deleteBySession(sessionId) {
363
+ const result = getDb().delete(messages).where(eq(messages.sessionId, sessionId)).run();
364
+ return result.changes;
365
+ },
366
+ /**
367
+ * Delete all messages with sequence >= the given sequence number
368
+ * (Used when reverting to a checkpoint)
369
+ */
370
+ deleteFromSequence(sessionId, fromSequence) {
371
+ const result = getDb().delete(messages).where(
372
+ and(
373
+ eq(messages.sessionId, sessionId),
374
+ sql`sequence >= ${fromSequence}`
375
+ )
376
+ ).run();
377
+ return result.changes;
378
+ }
379
+ };
380
+ var toolExecutionQueries = {
381
+ create(data) {
382
+ const id = nanoid();
383
+ const result = getDb().insert(toolExecutions).values({
384
+ id,
385
+ ...data,
386
+ startedAt: /* @__PURE__ */ new Date()
387
+ }).returning().get();
388
+ return result;
389
+ },
390
+ getById(id) {
391
+ return getDb().select().from(toolExecutions).where(eq(toolExecutions.id, id)).get();
392
+ },
393
+ getByToolCallId(toolCallId) {
394
+ return getDb().select().from(toolExecutions).where(eq(toolExecutions.toolCallId, toolCallId)).get();
395
+ },
396
+ getPendingApprovals(sessionId) {
397
+ return getDb().select().from(toolExecutions).where(
398
+ and(
399
+ eq(toolExecutions.sessionId, sessionId),
400
+ eq(toolExecutions.status, "pending"),
401
+ eq(toolExecutions.requiresApproval, true)
402
+ )
403
+ ).all();
404
+ },
405
+ approve(id) {
406
+ return getDb().update(toolExecutions).set({ status: "approved" }).where(eq(toolExecutions.id, id)).returning().get();
407
+ },
408
+ reject(id) {
409
+ return getDb().update(toolExecutions).set({ status: "rejected" }).where(eq(toolExecutions.id, id)).returning().get();
410
+ },
411
+ complete(id, output, error) {
412
+ return getDb().update(toolExecutions).set({
413
+ status: error ? "error" : "completed",
414
+ output,
415
+ error,
416
+ completedAt: /* @__PURE__ */ new Date()
417
+ }).where(eq(toolExecutions.id, id)).returning().get();
418
+ },
419
+ getBySession(sessionId) {
420
+ return getDb().select().from(toolExecutions).where(eq(toolExecutions.sessionId, sessionId)).orderBy(toolExecutions.startedAt).all();
421
+ },
422
+ /**
423
+ * Delete all tool executions after a given timestamp
424
+ * (Used when reverting to a checkpoint)
425
+ */
426
+ deleteAfterTime(sessionId, afterTime) {
427
+ const result = getDb().delete(toolExecutions).where(
428
+ and(
429
+ eq(toolExecutions.sessionId, sessionId),
430
+ sql`started_at > ${afterTime.getTime()}`
431
+ )
432
+ ).run();
433
+ return result.changes;
434
+ }
435
+ };
436
+ var todoQueries = {
437
+ create(data) {
438
+ const id = nanoid();
439
+ const now = /* @__PURE__ */ new Date();
440
+ const result = getDb().insert(todoItems).values({
441
+ id,
442
+ ...data,
443
+ createdAt: now,
444
+ updatedAt: now
445
+ }).returning().get();
446
+ return result;
447
+ },
448
+ createMany(sessionId, items) {
449
+ const now = /* @__PURE__ */ new Date();
450
+ const values = items.map((item, index) => ({
451
+ id: nanoid(),
452
+ sessionId,
453
+ content: item.content,
454
+ order: item.order ?? index,
455
+ createdAt: now,
456
+ updatedAt: now
457
+ }));
458
+ return getDb().insert(todoItems).values(values).returning().all();
459
+ },
460
+ getBySession(sessionId) {
461
+ return getDb().select().from(todoItems).where(eq(todoItems.sessionId, sessionId)).orderBy(todoItems.order).all();
462
+ },
463
+ updateStatus(id, status) {
464
+ return getDb().update(todoItems).set({ status, updatedAt: /* @__PURE__ */ new Date() }).where(eq(todoItems.id, id)).returning().get();
465
+ },
466
+ delete(id) {
467
+ const result = getDb().delete(todoItems).where(eq(todoItems.id, id)).run();
468
+ return result.changes > 0;
469
+ },
470
+ clearSession(sessionId) {
471
+ const result = getDb().delete(todoItems).where(eq(todoItems.sessionId, sessionId)).run();
472
+ return result.changes;
473
+ }
474
+ };
475
+ var skillQueries = {
476
+ load(sessionId, skillName) {
477
+ const id = nanoid();
478
+ const result = getDb().insert(loadedSkills).values({
479
+ id,
480
+ sessionId,
481
+ skillName,
482
+ loadedAt: /* @__PURE__ */ new Date()
483
+ }).returning().get();
484
+ return result;
485
+ },
486
+ getBySession(sessionId) {
487
+ return getDb().select().from(loadedSkills).where(eq(loadedSkills.sessionId, sessionId)).orderBy(loadedSkills.loadedAt).all();
488
+ },
489
+ isLoaded(sessionId, skillName) {
490
+ const result = getDb().select().from(loadedSkills).where(
491
+ and(
492
+ eq(loadedSkills.sessionId, sessionId),
493
+ eq(loadedSkills.skillName, skillName)
494
+ )
495
+ ).get();
496
+ return !!result;
497
+ }
498
+ };
499
+ var activeStreamQueries = {
500
+ create(sessionId, streamId) {
501
+ const id = nanoid();
502
+ const result = getDb().insert(activeStreams).values({
503
+ id,
504
+ sessionId,
505
+ streamId,
506
+ status: "active",
507
+ createdAt: /* @__PURE__ */ new Date()
508
+ }).returning().get();
509
+ return result;
510
+ },
511
+ getBySessionId(sessionId) {
512
+ return getDb().select().from(activeStreams).where(
513
+ and(
514
+ eq(activeStreams.sessionId, sessionId),
515
+ eq(activeStreams.status, "active")
516
+ )
517
+ ).get();
518
+ },
519
+ getByStreamId(streamId) {
520
+ return getDb().select().from(activeStreams).where(eq(activeStreams.streamId, streamId)).get();
521
+ },
522
+ finish(streamId) {
523
+ return getDb().update(activeStreams).set({ status: "finished", finishedAt: /* @__PURE__ */ new Date() }).where(eq(activeStreams.streamId, streamId)).returning().get();
524
+ },
525
+ markError(streamId) {
526
+ return getDb().update(activeStreams).set({ status: "error", finishedAt: /* @__PURE__ */ new Date() }).where(eq(activeStreams.streamId, streamId)).returning().get();
527
+ },
528
+ deleteBySession(sessionId) {
529
+ const result = getDb().delete(activeStreams).where(eq(activeStreams.sessionId, sessionId)).run();
530
+ return result.changes;
531
+ }
532
+ };
533
+ var checkpointQueries = {
534
+ create(data) {
535
+ const id = nanoid();
536
+ const result = getDb().insert(checkpoints).values({
537
+ id,
538
+ sessionId: data.sessionId,
539
+ messageSequence: data.messageSequence,
540
+ gitHead: data.gitHead,
541
+ createdAt: /* @__PURE__ */ new Date()
542
+ }).returning().get();
543
+ return result;
544
+ },
545
+ getById(id) {
546
+ return getDb().select().from(checkpoints).where(eq(checkpoints.id, id)).get();
547
+ },
548
+ getBySession(sessionId) {
549
+ return getDb().select().from(checkpoints).where(eq(checkpoints.sessionId, sessionId)).orderBy(checkpoints.messageSequence).all();
550
+ },
551
+ getByMessageSequence(sessionId, messageSequence) {
552
+ return getDb().select().from(checkpoints).where(
553
+ and(
554
+ eq(checkpoints.sessionId, sessionId),
555
+ eq(checkpoints.messageSequence, messageSequence)
556
+ )
557
+ ).get();
558
+ },
559
+ getLatest(sessionId) {
560
+ return getDb().select().from(checkpoints).where(eq(checkpoints.sessionId, sessionId)).orderBy(desc(checkpoints.messageSequence)).limit(1).get();
561
+ },
562
+ /**
563
+ * Delete all checkpoints after a given sequence number
564
+ * (Used when reverting to a checkpoint)
565
+ */
566
+ deleteAfterSequence(sessionId, messageSequence) {
567
+ const result = getDb().delete(checkpoints).where(
568
+ and(
569
+ eq(checkpoints.sessionId, sessionId),
570
+ sql`message_sequence > ${messageSequence}`
571
+ )
572
+ ).run();
573
+ return result.changes;
574
+ },
575
+ deleteBySession(sessionId) {
576
+ const result = getDb().delete(checkpoints).where(eq(checkpoints.sessionId, sessionId)).run();
577
+ return result.changes;
578
+ }
579
+ };
580
+ var fileBackupQueries = {
581
+ create(data) {
582
+ const id = nanoid();
583
+ const result = getDb().insert(fileBackups).values({
584
+ id,
585
+ checkpointId: data.checkpointId,
586
+ sessionId: data.sessionId,
587
+ filePath: data.filePath,
588
+ originalContent: data.originalContent,
589
+ existed: data.existed,
590
+ createdAt: /* @__PURE__ */ new Date()
591
+ }).returning().get();
592
+ return result;
593
+ },
594
+ getByCheckpoint(checkpointId) {
595
+ return getDb().select().from(fileBackups).where(eq(fileBackups.checkpointId, checkpointId)).all();
596
+ },
597
+ getBySession(sessionId) {
598
+ return getDb().select().from(fileBackups).where(eq(fileBackups.sessionId, sessionId)).orderBy(fileBackups.createdAt).all();
599
+ },
600
+ /**
601
+ * Get all file backups from a given checkpoint sequence onwards (inclusive)
602
+ * (Used when reverting - need to restore these files)
603
+ *
604
+ * When reverting to checkpoint X, we need backups from checkpoint X and all later ones
605
+ * because checkpoint X's backups represent the state BEFORE processing message X.
606
+ */
607
+ getFromSequence(sessionId, messageSequence) {
608
+ const checkpointsFrom = getDb().select().from(checkpoints).where(
609
+ and(
610
+ eq(checkpoints.sessionId, sessionId),
611
+ sql`message_sequence >= ${messageSequence}`
612
+ )
613
+ ).all();
614
+ if (checkpointsFrom.length === 0) {
615
+ return [];
616
+ }
617
+ const checkpointIds = checkpointsFrom.map((c) => c.id);
618
+ const allBackups = [];
619
+ for (const cpId of checkpointIds) {
620
+ const backups = getDb().select().from(fileBackups).where(eq(fileBackups.checkpointId, cpId)).all();
621
+ allBackups.push(...backups);
622
+ }
623
+ return allBackups;
624
+ },
625
+ /**
626
+ * Check if a file already has a backup in the current checkpoint
627
+ */
628
+ hasBackup(checkpointId, filePath) {
629
+ const result = getDb().select().from(fileBackups).where(
630
+ and(
631
+ eq(fileBackups.checkpointId, checkpointId),
632
+ eq(fileBackups.filePath, filePath)
633
+ )
634
+ ).get();
635
+ return !!result;
636
+ },
637
+ deleteBySession(sessionId) {
638
+ const result = getDb().delete(fileBackups).where(eq(fileBackups.sessionId, sessionId)).run();
639
+ return result.changes;
640
+ }
641
+ };
533
642
 
534
643
  // src/config/index.ts
535
644
  import { existsSync, readFileSync, mkdirSync, writeFileSync } from "fs";
@@ -883,7 +992,6 @@ async function isTmuxAvailable() {
883
992
  try {
884
993
  const { stdout } = await execAsync("tmux -V");
885
994
  tmuxAvailableCache = true;
886
- console.log(`[tmux] Available: ${stdout.trim()}`);
887
995
  return true;
888
996
  } catch (error) {
889
997
  tmuxAvailableCache = false;
@@ -1503,9 +1611,198 @@ Use this to understand existing code, check file contents, or gather context.`,
1503
1611
  // src/tools/write-file.ts
1504
1612
  import { tool as tool3 } from "ai";
1505
1613
  import { z as z4 } from "zod";
1506
- import { readFile as readFile3, writeFile as writeFile2, mkdir as mkdir2 } from "fs/promises";
1507
- import { resolve as resolve3, relative as relative2, isAbsolute as isAbsolute2, dirname as dirname2 } from "path";
1614
+ import { readFile as readFile4, writeFile as writeFile3, mkdir as mkdir3 } from "fs/promises";
1615
+ import { resolve as resolve4, relative as relative3, isAbsolute as isAbsolute2, dirname as dirname3 } from "path";
1616
+ import { existsSync as existsSync5 } from "fs";
1617
+
1618
+ // src/checkpoints/index.ts
1619
+ import { readFile as readFile3, writeFile as writeFile2, unlink, mkdir as mkdir2 } from "fs/promises";
1508
1620
  import { existsSync as existsSync4 } from "fs";
1621
+ import { resolve as resolve3, relative as relative2, dirname as dirname2 } from "path";
1622
+ import { exec as exec3 } from "child_process";
1623
+ import { promisify as promisify3 } from "util";
1624
+ var execAsync3 = promisify3(exec3);
1625
+ async function getGitHead(workingDirectory) {
1626
+ try {
1627
+ const { stdout } = await execAsync3("git rev-parse HEAD", {
1628
+ cwd: workingDirectory,
1629
+ timeout: 5e3
1630
+ });
1631
+ return stdout.trim();
1632
+ } catch {
1633
+ return void 0;
1634
+ }
1635
+ }
1636
+ var activeManagers = /* @__PURE__ */ new Map();
1637
+ function getCheckpointManager(sessionId, workingDirectory) {
1638
+ let manager = activeManagers.get(sessionId);
1639
+ if (!manager) {
1640
+ manager = {
1641
+ sessionId,
1642
+ workingDirectory,
1643
+ currentCheckpointId: null
1644
+ };
1645
+ activeManagers.set(sessionId, manager);
1646
+ }
1647
+ return manager;
1648
+ }
1649
+ async function createCheckpoint(sessionId, workingDirectory, messageSequence) {
1650
+ const gitHead = await getGitHead(workingDirectory);
1651
+ const checkpoint = checkpointQueries.create({
1652
+ sessionId,
1653
+ messageSequence,
1654
+ gitHead
1655
+ });
1656
+ const manager = getCheckpointManager(sessionId, workingDirectory);
1657
+ manager.currentCheckpointId = checkpoint.id;
1658
+ return checkpoint;
1659
+ }
1660
+ async function backupFile(sessionId, workingDirectory, filePath) {
1661
+ const manager = getCheckpointManager(sessionId, workingDirectory);
1662
+ if (!manager.currentCheckpointId) {
1663
+ console.warn("[checkpoint] No active checkpoint, skipping file backup");
1664
+ return null;
1665
+ }
1666
+ const absolutePath = resolve3(workingDirectory, filePath);
1667
+ const relativePath = relative2(workingDirectory, absolutePath);
1668
+ if (fileBackupQueries.hasBackup(manager.currentCheckpointId, relativePath)) {
1669
+ return null;
1670
+ }
1671
+ let originalContent = null;
1672
+ let existed = false;
1673
+ if (existsSync4(absolutePath)) {
1674
+ try {
1675
+ originalContent = await readFile3(absolutePath, "utf-8");
1676
+ existed = true;
1677
+ } catch (error) {
1678
+ console.warn(`[checkpoint] Failed to read file for backup: ${error.message}`);
1679
+ }
1680
+ }
1681
+ const backup = fileBackupQueries.create({
1682
+ checkpointId: manager.currentCheckpointId,
1683
+ sessionId,
1684
+ filePath: relativePath,
1685
+ originalContent,
1686
+ existed
1687
+ });
1688
+ return backup;
1689
+ }
1690
+ async function revertToCheckpoint(sessionId, checkpointId) {
1691
+ const session = sessionQueries.getById(sessionId);
1692
+ if (!session) {
1693
+ return {
1694
+ success: false,
1695
+ filesRestored: 0,
1696
+ filesDeleted: 0,
1697
+ messagesDeleted: 0,
1698
+ checkpointsDeleted: 0,
1699
+ error: "Session not found"
1700
+ };
1701
+ }
1702
+ const checkpoint = checkpointQueries.getById(checkpointId);
1703
+ if (!checkpoint || checkpoint.sessionId !== sessionId) {
1704
+ return {
1705
+ success: false,
1706
+ filesRestored: 0,
1707
+ filesDeleted: 0,
1708
+ messagesDeleted: 0,
1709
+ checkpointsDeleted: 0,
1710
+ error: "Checkpoint not found"
1711
+ };
1712
+ }
1713
+ const workingDirectory = session.workingDirectory;
1714
+ const backupsToRevert = fileBackupQueries.getFromSequence(sessionId, checkpoint.messageSequence);
1715
+ const fileToEarliestBackup = /* @__PURE__ */ new Map();
1716
+ for (const backup of backupsToRevert) {
1717
+ if (!fileToEarliestBackup.has(backup.filePath)) {
1718
+ fileToEarliestBackup.set(backup.filePath, backup);
1719
+ }
1720
+ }
1721
+ let filesRestored = 0;
1722
+ let filesDeleted = 0;
1723
+ for (const [filePath, backup] of fileToEarliestBackup) {
1724
+ const absolutePath = resolve3(workingDirectory, filePath);
1725
+ try {
1726
+ if (backup.existed && backup.originalContent !== null) {
1727
+ const dir = dirname2(absolutePath);
1728
+ if (!existsSync4(dir)) {
1729
+ await mkdir2(dir, { recursive: true });
1730
+ }
1731
+ await writeFile2(absolutePath, backup.originalContent, "utf-8");
1732
+ filesRestored++;
1733
+ } else if (!backup.existed) {
1734
+ if (existsSync4(absolutePath)) {
1735
+ await unlink(absolutePath);
1736
+ filesDeleted++;
1737
+ }
1738
+ }
1739
+ } catch (error) {
1740
+ console.error(`Failed to restore ${filePath}: ${error.message}`);
1741
+ }
1742
+ }
1743
+ const messagesDeleted = messageQueries.deleteFromSequence(sessionId, checkpoint.messageSequence);
1744
+ toolExecutionQueries.deleteAfterTime(sessionId, checkpoint.createdAt);
1745
+ const checkpointsDeleted = checkpointQueries.deleteAfterSequence(sessionId, checkpoint.messageSequence);
1746
+ const manager = getCheckpointManager(sessionId, workingDirectory);
1747
+ manager.currentCheckpointId = checkpoint.id;
1748
+ return {
1749
+ success: true,
1750
+ filesRestored,
1751
+ filesDeleted,
1752
+ messagesDeleted,
1753
+ checkpointsDeleted
1754
+ };
1755
+ }
1756
+ function getCheckpoints(sessionId) {
1757
+ return checkpointQueries.getBySession(sessionId);
1758
+ }
1759
+ async function getSessionDiff(sessionId) {
1760
+ const session = sessionQueries.getById(sessionId);
1761
+ if (!session) {
1762
+ return { files: [] };
1763
+ }
1764
+ const workingDirectory = session.workingDirectory;
1765
+ const allBackups = fileBackupQueries.getBySession(sessionId);
1766
+ const fileToOriginalBackup = /* @__PURE__ */ new Map();
1767
+ for (const backup of allBackups) {
1768
+ if (!fileToOriginalBackup.has(backup.filePath)) {
1769
+ fileToOriginalBackup.set(backup.filePath, backup);
1770
+ }
1771
+ }
1772
+ const files = [];
1773
+ for (const [filePath, originalBackup] of fileToOriginalBackup) {
1774
+ const absolutePath = resolve3(workingDirectory, filePath);
1775
+ let currentContent = null;
1776
+ let currentExists = false;
1777
+ if (existsSync4(absolutePath)) {
1778
+ try {
1779
+ currentContent = await readFile3(absolutePath, "utf-8");
1780
+ currentExists = true;
1781
+ } catch {
1782
+ }
1783
+ }
1784
+ let status;
1785
+ if (!originalBackup.existed && currentExists) {
1786
+ status = "created";
1787
+ } else if (originalBackup.existed && !currentExists) {
1788
+ status = "deleted";
1789
+ } else {
1790
+ status = "modified";
1791
+ }
1792
+ files.push({
1793
+ path: filePath,
1794
+ status,
1795
+ originalContent: originalBackup.originalContent,
1796
+ currentContent
1797
+ });
1798
+ }
1799
+ return { files };
1800
+ }
1801
+ function clearCheckpointManager(sessionId) {
1802
+ activeManagers.delete(sessionId);
1803
+ }
1804
+
1805
+ // src/tools/write-file.ts
1509
1806
  var writeFileInputSchema = z4.object({
1510
1807
  path: z4.string().describe("The path to the file. Can be relative to working directory or absolute."),
1511
1808
  mode: z4.enum(["full", "str_replace"]).describe('Write mode: "full" for complete file write, "str_replace" for targeted string replacement'),
@@ -1534,8 +1831,8 @@ Working directory: ${options.workingDirectory}`,
1534
1831
  inputSchema: writeFileInputSchema,
1535
1832
  execute: async ({ path, mode, content, old_string, new_string }) => {
1536
1833
  try {
1537
- const absolutePath = isAbsolute2(path) ? path : resolve3(options.workingDirectory, path);
1538
- const relativePath = relative2(options.workingDirectory, absolutePath);
1834
+ const absolutePath = isAbsolute2(path) ? path : resolve4(options.workingDirectory, path);
1835
+ const relativePath = relative3(options.workingDirectory, absolutePath);
1539
1836
  if (relativePath.startsWith("..") && !isAbsolute2(path)) {
1540
1837
  return {
1541
1838
  success: false,
@@ -1549,16 +1846,17 @@ Working directory: ${options.workingDirectory}`,
1549
1846
  error: 'Content is required for "full" mode'
1550
1847
  };
1551
1848
  }
1552
- const dir = dirname2(absolutePath);
1553
- if (!existsSync4(dir)) {
1554
- await mkdir2(dir, { recursive: true });
1849
+ await backupFile(options.sessionId, options.workingDirectory, absolutePath);
1850
+ const dir = dirname3(absolutePath);
1851
+ if (!existsSync5(dir)) {
1852
+ await mkdir3(dir, { recursive: true });
1555
1853
  }
1556
- const existed = existsSync4(absolutePath);
1557
- await writeFile2(absolutePath, content, "utf-8");
1854
+ const existed = existsSync5(absolutePath);
1855
+ await writeFile3(absolutePath, content, "utf-8");
1558
1856
  return {
1559
1857
  success: true,
1560
1858
  path: absolutePath,
1561
- relativePath: relative2(options.workingDirectory, absolutePath),
1859
+ relativePath: relative3(options.workingDirectory, absolutePath),
1562
1860
  mode: "full",
1563
1861
  action: existed ? "replaced" : "created",
1564
1862
  bytesWritten: Buffer.byteLength(content, "utf-8"),
@@ -1571,13 +1869,14 @@ Working directory: ${options.workingDirectory}`,
1571
1869
  error: 'Both old_string and new_string are required for "str_replace" mode'
1572
1870
  };
1573
1871
  }
1574
- if (!existsSync4(absolutePath)) {
1872
+ if (!existsSync5(absolutePath)) {
1575
1873
  return {
1576
1874
  success: false,
1577
1875
  error: `File not found: ${path}. Use "full" mode to create new files.`
1578
1876
  };
1579
1877
  }
1580
- const currentContent = await readFile3(absolutePath, "utf-8");
1878
+ await backupFile(options.sessionId, options.workingDirectory, absolutePath);
1879
+ const currentContent = await readFile4(absolutePath, "utf-8");
1581
1880
  if (!currentContent.includes(old_string)) {
1582
1881
  const lines = currentContent.split("\n");
1583
1882
  const preview = lines.slice(0, 20).join("\n");
@@ -1598,13 +1897,13 @@ Working directory: ${options.workingDirectory}`,
1598
1897
  };
1599
1898
  }
1600
1899
  const newContent = currentContent.replace(old_string, new_string);
1601
- await writeFile2(absolutePath, newContent, "utf-8");
1900
+ await writeFile3(absolutePath, newContent, "utf-8");
1602
1901
  const oldLines = old_string.split("\n").length;
1603
1902
  const newLines = new_string.split("\n").length;
1604
1903
  return {
1605
1904
  success: true,
1606
1905
  path: absolutePath,
1607
- relativePath: relative2(options.workingDirectory, absolutePath),
1906
+ relativePath: relative3(options.workingDirectory, absolutePath),
1608
1907
  mode: "str_replace",
1609
1908
  linesRemoved: oldLines,
1610
1909
  linesAdded: newLines,
@@ -1626,7 +1925,6 @@ Working directory: ${options.workingDirectory}`,
1626
1925
  }
1627
1926
 
1628
1927
  // src/tools/todo.ts
1629
- init_db();
1630
1928
  import { tool as tool4 } from "ai";
1631
1929
  import { z as z5 } from "zod";
1632
1930
  var todoInputSchema = z5.object({
@@ -1756,9 +2054,9 @@ import { tool as tool5 } from "ai";
1756
2054
  import { z as z6 } from "zod";
1757
2055
 
1758
2056
  // src/skills/index.ts
1759
- import { readFile as readFile4, readdir } from "fs/promises";
1760
- import { resolve as resolve4, basename, extname } from "path";
1761
- import { existsSync as existsSync5 } from "fs";
2057
+ import { readFile as readFile5, readdir } from "fs/promises";
2058
+ import { resolve as resolve5, basename, extname } from "path";
2059
+ import { existsSync as existsSync6 } from "fs";
1762
2060
  function parseSkillFrontmatter(content) {
1763
2061
  const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
1764
2062
  if (!frontmatterMatch) {
@@ -1789,15 +2087,15 @@ function getSkillNameFromPath(filePath) {
1789
2087
  return basename(filePath, extname(filePath)).replace(/[-_]/g, " ").replace(/\b\w/g, (c) => c.toUpperCase());
1790
2088
  }
1791
2089
  async function loadSkillsFromDirectory(directory) {
1792
- if (!existsSync5(directory)) {
2090
+ if (!existsSync6(directory)) {
1793
2091
  return [];
1794
2092
  }
1795
2093
  const skills = [];
1796
2094
  const files = await readdir(directory);
1797
2095
  for (const file of files) {
1798
2096
  if (!file.endsWith(".md")) continue;
1799
- const filePath = resolve4(directory, file);
1800
- const content = await readFile4(filePath, "utf-8");
2097
+ const filePath = resolve5(directory, file);
2098
+ const content = await readFile5(filePath, "utf-8");
1801
2099
  const parsed = parseSkillFrontmatter(content);
1802
2100
  if (parsed) {
1803
2101
  skills.push({
@@ -1839,7 +2137,7 @@ async function loadSkillContent(skillName, directories) {
1839
2137
  if (!skill) {
1840
2138
  return null;
1841
2139
  }
1842
- const content = await readFile4(skill.filePath, "utf-8");
2140
+ const content = await readFile5(skill.filePath, "utf-8");
1843
2141
  const parsed = parseSkillFrontmatter(content);
1844
2142
  return {
1845
2143
  ...skill,
@@ -1858,7 +2156,6 @@ function formatSkillsForContext(skills) {
1858
2156
  }
1859
2157
 
1860
2158
  // src/tools/load-skill.ts
1861
- init_db();
1862
2159
  var loadSkillInputSchema = z6.object({
1863
2160
  action: z6.enum(["list", "load"]).describe('Action to perform: "list" to see available skills, "load" to load a skill'),
1864
2161
  skillName: z6.string().optional().describe('For "load" action: The name of the skill to load')
@@ -1951,7 +2248,8 @@ function createTools(options) {
1951
2248
  workingDirectory: options.workingDirectory
1952
2249
  }),
1953
2250
  write_file: createWriteFileTool({
1954
- workingDirectory: options.workingDirectory
2251
+ workingDirectory: options.workingDirectory,
2252
+ sessionId: options.sessionId
1955
2253
  }),
1956
2254
  todo: createTodoTool({
1957
2255
  sessionId: options.sessionId
@@ -1964,13 +2262,11 @@ function createTools(options) {
1964
2262
  }
1965
2263
 
1966
2264
  // src/agent/context.ts
1967
- init_db();
1968
2265
  import { generateText } from "ai";
1969
2266
  import { gateway } from "@ai-sdk/gateway";
1970
2267
 
1971
2268
  // src/agent/prompts.ts
1972
2269
  import os from "os";
1973
- init_db();
1974
2270
  function getSearchInstructions() {
1975
2271
  const platform3 = process.platform;
1976
2272
  const common = `- **Prefer \`read_file\` over shell commands** for reading files - don't use \`cat\`, \`head\`, or \`tail\` when \`read_file\` is available
@@ -2011,6 +2307,9 @@ You have access to powerful tools for:
2011
2307
  - **todo**: Manage your task list to track progress on complex operations
2012
2308
  - **load_skill**: Load specialized knowledge documents for specific tasks
2013
2309
 
2310
+
2311
+ IMPORTANT: If you have zero context of where you are working, always explore it first to understand the structure before doing things for the user.
2312
+
2014
2313
  Use the TODO tool to manage your task list to track progress on complex operations. Always ask the user what they want to do specifically before doing it, and make a plan.
2015
2314
  Step 1 of the plan should be researching files and understanding the components/structure of what you're working on (if you don't already have context), then after u have done that, plan out the rest of the tasks u need to do.
2016
2315
  You can clear the todo and restart it, and do multiple things inside of one session.
@@ -2447,8 +2746,8 @@ var Agent = class _Agent {
2447
2746
  this.pendingApprovals.set(toolCallId, execution);
2448
2747
  options.onApprovalRequired?.(execution);
2449
2748
  sessionQueries.updateStatus(this.session.id, "waiting");
2450
- const approved = await new Promise((resolve6) => {
2451
- approvalResolvers.set(toolCallId, { resolve: resolve6, sessionId: this.session.id });
2749
+ const approved = await new Promise((resolve7) => {
2750
+ approvalResolvers.set(toolCallId, { resolve: resolve7, sessionId: this.session.id });
2452
2751
  });
2453
2752
  const resolverData = approvalResolvers.get(toolCallId);
2454
2753
  approvalResolvers.delete(toolCallId);
@@ -2542,18 +2841,18 @@ var Agent = class _Agent {
2542
2841
  };
2543
2842
 
2544
2843
  // src/server/index.ts
2844
+ import "dotenv/config";
2545
2845
  import { Hono as Hono5 } from "hono";
2546
2846
  import { serve } from "@hono/node-server";
2547
2847
  import { cors } from "hono/cors";
2548
2848
  import { logger } from "hono/logger";
2549
- import { existsSync as existsSync6, mkdirSync as mkdirSync2 } from "fs";
2550
- import { resolve as resolve5, dirname as dirname3, join as join3 } from "path";
2849
+ import { existsSync as existsSync7, mkdirSync as mkdirSync2 } from "fs";
2850
+ import { resolve as resolve6, dirname as dirname4, join as join3 } from "path";
2551
2851
  import { spawn } from "child_process";
2552
2852
  import { createServer as createNetServer } from "net";
2553
2853
  import { fileURLToPath } from "url";
2554
2854
 
2555
2855
  // src/server/routes/sessions.ts
2556
- init_db();
2557
2856
  import { Hono } from "hono";
2558
2857
  import { zValidator } from "@hono/zod-validator";
2559
2858
  import { z as z8 } from "zod";
@@ -2764,6 +3063,7 @@ sessions2.delete("/:id", async (c) => {
2764
3063
  }
2765
3064
  } catch (e) {
2766
3065
  }
3066
+ clearCheckpointManager(id);
2767
3067
  const deleted = sessionQueries.delete(id);
2768
3068
  if (!deleted) {
2769
3069
  return c.json({ error: "Session not found" }, 404);
@@ -2815,9 +3115,100 @@ sessions2.get("/:id/todos", async (c) => {
2815
3115
  } : null
2816
3116
  });
2817
3117
  });
3118
+ sessions2.get("/:id/checkpoints", async (c) => {
3119
+ const id = c.req.param("id");
3120
+ const session = sessionQueries.getById(id);
3121
+ if (!session) {
3122
+ return c.json({ error: "Session not found" }, 404);
3123
+ }
3124
+ const checkpoints2 = getCheckpoints(id);
3125
+ return c.json({
3126
+ sessionId: id,
3127
+ checkpoints: checkpoints2.map((cp) => ({
3128
+ id: cp.id,
3129
+ messageSequence: cp.messageSequence,
3130
+ gitHead: cp.gitHead,
3131
+ createdAt: cp.createdAt.toISOString()
3132
+ })),
3133
+ count: checkpoints2.length
3134
+ });
3135
+ });
3136
+ sessions2.post("/:id/revert/:checkpointId", async (c) => {
3137
+ const sessionId = c.req.param("id");
3138
+ const checkpointId = c.req.param("checkpointId");
3139
+ const session = sessionQueries.getById(sessionId);
3140
+ if (!session) {
3141
+ return c.json({ error: "Session not found" }, 404);
3142
+ }
3143
+ const activeStream = activeStreamQueries.getBySessionId(sessionId);
3144
+ if (activeStream) {
3145
+ return c.json({
3146
+ error: "Cannot revert while a stream is active. Stop the stream first.",
3147
+ streamId: activeStream.streamId
3148
+ }, 409);
3149
+ }
3150
+ const result = await revertToCheckpoint(sessionId, checkpointId);
3151
+ if (!result.success) {
3152
+ return c.json({ error: result.error }, 400);
3153
+ }
3154
+ return c.json({
3155
+ success: true,
3156
+ sessionId,
3157
+ checkpointId,
3158
+ filesRestored: result.filesRestored,
3159
+ filesDeleted: result.filesDeleted,
3160
+ messagesDeleted: result.messagesDeleted,
3161
+ checkpointsDeleted: result.checkpointsDeleted
3162
+ });
3163
+ });
3164
+ sessions2.get("/:id/diff", async (c) => {
3165
+ const id = c.req.param("id");
3166
+ const session = sessionQueries.getById(id);
3167
+ if (!session) {
3168
+ return c.json({ error: "Session not found" }, 404);
3169
+ }
3170
+ const diff = await getSessionDiff(id);
3171
+ return c.json({
3172
+ sessionId: id,
3173
+ files: diff.files.map((f) => ({
3174
+ path: f.path,
3175
+ status: f.status,
3176
+ hasOriginal: f.originalContent !== null,
3177
+ hasCurrent: f.currentContent !== null
3178
+ // Optionally include content (can be large)
3179
+ // originalContent: f.originalContent,
3180
+ // currentContent: f.currentContent,
3181
+ })),
3182
+ summary: {
3183
+ created: diff.files.filter((f) => f.status === "created").length,
3184
+ modified: diff.files.filter((f) => f.status === "modified").length,
3185
+ deleted: diff.files.filter((f) => f.status === "deleted").length,
3186
+ total: diff.files.length
3187
+ }
3188
+ });
3189
+ });
3190
+ sessions2.get("/:id/diff/:filePath", async (c) => {
3191
+ const sessionId = c.req.param("id");
3192
+ const filePath = decodeURIComponent(c.req.param("filePath"));
3193
+ const session = sessionQueries.getById(sessionId);
3194
+ if (!session) {
3195
+ return c.json({ error: "Session not found" }, 404);
3196
+ }
3197
+ const diff = await getSessionDiff(sessionId);
3198
+ const fileDiff = diff.files.find((f) => f.path === filePath);
3199
+ if (!fileDiff) {
3200
+ return c.json({ error: "File not found in diff" }, 404);
3201
+ }
3202
+ return c.json({
3203
+ sessionId,
3204
+ path: fileDiff.path,
3205
+ status: fileDiff.status,
3206
+ originalContent: fileDiff.originalContent,
3207
+ currentContent: fileDiff.currentContent
3208
+ });
3209
+ });
2818
3210
 
2819
3211
  // src/server/routes/agents.ts
2820
- init_db();
2821
3212
  import { Hono as Hono2 } from "hono";
2822
3213
  import { zValidator as zValidator2 } from "@hono/zod-validator";
2823
3214
  import { z as z9 } from "zod";
@@ -3091,8 +3482,9 @@ agents.post(
3091
3482
  if (!session) {
3092
3483
  return c.json({ error: "Session not found" }, 404);
3093
3484
  }
3094
- const { messageQueries: messageQueries2 } = await Promise.resolve().then(() => (init_db(), db_exports));
3095
- messageQueries2.create(id, { role: "user", content: prompt });
3485
+ const nextSequence = messageQueries.getNextSequence(id);
3486
+ await createCheckpoint(id, session.workingDirectory, nextSequence);
3487
+ messageQueries.create(id, { role: "user", content: prompt });
3096
3488
  const streamId = `stream_${id}_${nanoid4(10)}`;
3097
3489
  activeStreamQueries.create(id, streamId);
3098
3490
  const stream = await streamContext.resumableStream(
@@ -3291,6 +3683,7 @@ agents.post(
3291
3683
  });
3292
3684
  const session = agent.getSession();
3293
3685
  const streamId = `stream_${session.id}_${nanoid4(10)}`;
3686
+ await createCheckpoint(session.id, session.workingDirectory, 0);
3294
3687
  activeStreamQueries.create(session.id, streamId);
3295
3688
  const createQuickStreamProducer = () => {
3296
3689
  const { readable, writable } = new TransformStream();
@@ -3471,10 +3864,14 @@ import { z as z10 } from "zod";
3471
3864
  var health = new Hono3();
3472
3865
  health.get("/", async (c) => {
3473
3866
  const config = getConfig();
3867
+ const apiKeyStatus = getApiKeyStatus();
3868
+ const gatewayKey = apiKeyStatus.find((s) => s.provider === "ai-gateway");
3869
+ const hasApiKey = gatewayKey?.configured ?? false;
3474
3870
  return c.json({
3475
3871
  status: "ok",
3476
3872
  version: "0.1.0",
3477
3873
  uptime: process.uptime(),
3874
+ apiKeyConfigured: hasApiKey,
3478
3875
  config: {
3479
3876
  workingDirectory: config.resolvedWorkingDirectory,
3480
3877
  defaultModel: config.defaultModel,
@@ -3551,7 +3948,6 @@ health.delete("/api-keys/:provider", async (c) => {
3551
3948
  import { Hono as Hono4 } from "hono";
3552
3949
  import { zValidator as zValidator4 } from "@hono/zod-validator";
3553
3950
  import { z as z11 } from "zod";
3554
- init_db();
3555
3951
  var terminals2 = new Hono4();
3556
3952
  var spawnSchema = z11.object({
3557
3953
  command: z11.string(),
@@ -3849,14 +4245,11 @@ data: ${JSON.stringify({ status: "stopped" })}
3849
4245
  );
3850
4246
  });
3851
4247
 
3852
- // src/server/index.ts
3853
- init_db();
3854
-
3855
4248
  // src/utils/dependencies.ts
3856
- import { exec as exec3 } from "child_process";
3857
- import { promisify as promisify3 } from "util";
4249
+ import { exec as exec4 } from "child_process";
4250
+ import { promisify as promisify4 } from "util";
3858
4251
  import { platform as platform2 } from "os";
3859
- var execAsync3 = promisify3(exec3);
4252
+ var execAsync4 = promisify4(exec4);
3860
4253
  function getInstallInstructions() {
3861
4254
  const os2 = platform2();
3862
4255
  if (os2 === "darwin") {
@@ -3889,7 +4282,7 @@ Install tmux:
3889
4282
  }
3890
4283
  async function checkTmux() {
3891
4284
  try {
3892
- const { stdout } = await execAsync3("tmux -V", { timeout: 5e3 });
4285
+ const { stdout } = await execAsync4("tmux -V", { timeout: 5e3 });
3893
4286
  const version = stdout.trim();
3894
4287
  return {
3895
4288
  available: true,
@@ -3936,13 +4329,13 @@ var DEFAULT_WEB_PORT = 6969;
3936
4329
  var WEB_PORT_SEQUENCE = [6969, 6970, 6971, 6972, 6973, 6974, 6975, 6976, 6977, 6978];
3937
4330
  function getWebDirectory() {
3938
4331
  try {
3939
- const currentDir = dirname3(fileURLToPath(import.meta.url));
3940
- const webDir = resolve5(currentDir, "..", "web");
3941
- if (existsSync6(webDir) && existsSync6(join3(webDir, "package.json"))) {
4332
+ const currentDir = dirname4(fileURLToPath(import.meta.url));
4333
+ const webDir = resolve6(currentDir, "..", "web");
4334
+ if (existsSync7(webDir) && existsSync7(join3(webDir, "package.json"))) {
3942
4335
  return webDir;
3943
4336
  }
3944
- const altWebDir = resolve5(currentDir, "..", "..", "web");
3945
- if (existsSync6(altWebDir) && existsSync6(join3(altWebDir, "package.json"))) {
4337
+ const altWebDir = resolve6(currentDir, "..", "..", "web");
4338
+ if (existsSync7(altWebDir) && existsSync7(join3(altWebDir, "package.json"))) {
3946
4339
  return altWebDir;
3947
4340
  }
3948
4341
  return null;
@@ -3965,18 +4358,18 @@ async function isSparkcoderWebRunning(port) {
3965
4358
  }
3966
4359
  }
3967
4360
  function isPortInUse(port) {
3968
- return new Promise((resolve6) => {
4361
+ return new Promise((resolve7) => {
3969
4362
  const server = createNetServer();
3970
4363
  server.once("error", (err) => {
3971
4364
  if (err.code === "EADDRINUSE") {
3972
- resolve6(true);
4365
+ resolve7(true);
3973
4366
  } else {
3974
- resolve6(false);
4367
+ resolve7(false);
3975
4368
  }
3976
4369
  });
3977
4370
  server.once("listening", () => {
3978
4371
  server.close();
3979
- resolve6(false);
4372
+ resolve7(false);
3980
4373
  });
3981
4374
  server.listen(port, "0.0.0.0");
3982
4375
  });
@@ -4010,7 +4403,7 @@ async function startWebUI(apiPort, webPort = DEFAULT_WEB_PORT, quiet = false) {
4010
4403
  if (!quiet) console.log(` \u2713 Web UI already running at http://localhost:${actualPort}`);
4011
4404
  return { process: null, port: actualPort };
4012
4405
  }
4013
- const useNpm = existsSync6(join3(webDir, "package-lock.json"));
4406
+ const useNpm = existsSync7(join3(webDir, "package-lock.json"));
4014
4407
  const command = useNpm ? "npm" : "npx";
4015
4408
  const args = useNpm ? ["run", "dev", "--", "-p", String(actualPort)] : ["next", "dev", "-p", String(actualPort)];
4016
4409
  const child = spawn(command, args, {
@@ -4113,7 +4506,7 @@ async function startServer(options = {}) {
4113
4506
  if (options.workingDirectory) {
4114
4507
  config.resolvedWorkingDirectory = options.workingDirectory;
4115
4508
  }
4116
- if (!existsSync6(config.resolvedWorkingDirectory)) {
4509
+ if (!existsSync7(config.resolvedWorkingDirectory)) {
4117
4510
  mkdirSync2(config.resolvedWorkingDirectory, { recursive: true });
4118
4511
  if (!options.quiet) console.log(`\u{1F4C1} Created agent workspace: ${config.resolvedWorkingDirectory}`);
4119
4512
  }
@@ -4611,23 +5004,31 @@ function generateOpenAPISpec() {
4611
5004
  }
4612
5005
 
4613
5006
  // src/index.ts
4614
- init_db();
4615
5007
  var VERSION = "0.1.0";
4616
5008
  export {
4617
5009
  Agent,
4618
5010
  VERSION,
5011
+ backupFile,
5012
+ checkpointQueries,
5013
+ clearCheckpointManager,
4619
5014
  closeDatabase,
4620
5015
  createApp,
4621
5016
  createBashTool,
5017
+ createCheckpoint,
4622
5018
  createLoadSkillTool,
4623
5019
  createReadFileTool,
4624
5020
  createTodoTool,
4625
5021
  createTools,
4626
5022
  createWriteFileTool,
5023
+ fileBackupQueries,
5024
+ getCheckpointManager,
5025
+ getCheckpoints,
4627
5026
  getDb,
5027
+ getSessionDiff,
4628
5028
  initDatabase,
4629
5029
  loadConfig,
4630
5030
  messageQueries,
5031
+ revertToCheckpoint,
4631
5032
  sessionQueries,
4632
5033
  skillQueries,
4633
5034
  startServer,