@lakitu/sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +166 -0
- package/convex/_generated/api.d.ts +45 -0
- package/convex/_generated/api.js +23 -0
- package/convex/_generated/dataModel.d.ts +58 -0
- package/convex/_generated/server.d.ts +143 -0
- package/convex/_generated/server.js +93 -0
- package/convex/cloud/CLAUDE.md +238 -0
- package/convex/cloud/_generated/api.ts +84 -0
- package/convex/cloud/_generated/component.ts +861 -0
- package/convex/cloud/_generated/dataModel.ts +60 -0
- package/convex/cloud/_generated/server.ts +156 -0
- package/convex/cloud/convex.config.ts +16 -0
- package/convex/cloud/index.ts +29 -0
- package/convex/cloud/intentSchema/generate.ts +447 -0
- package/convex/cloud/intentSchema/index.ts +16 -0
- package/convex/cloud/intentSchema/types.ts +418 -0
- package/convex/cloud/ksaPolicy.ts +554 -0
- package/convex/cloud/mail.ts +92 -0
- package/convex/cloud/schema.ts +322 -0
- package/convex/cloud/utils/kanbanContext.ts +229 -0
- package/convex/cloud/workflows/agentBoard.ts +451 -0
- package/convex/cloud/workflows/agentPrompt.ts +272 -0
- package/convex/cloud/workflows/agentThread.ts +374 -0
- package/convex/cloud/workflows/compileSandbox.ts +146 -0
- package/convex/cloud/workflows/crudBoard.ts +217 -0
- package/convex/cloud/workflows/crudKSAs.ts +262 -0
- package/convex/cloud/workflows/crudLorobeads.ts +371 -0
- package/convex/cloud/workflows/crudSkills.ts +205 -0
- package/convex/cloud/workflows/crudThreads.ts +708 -0
- package/convex/cloud/workflows/lifecycleSandbox.ts +1396 -0
- package/convex/cloud/workflows/sandboxConvex.ts +1046 -0
- package/convex/sandbox/README.md +90 -0
- package/convex/sandbox/_generated/api.d.ts +2934 -0
- package/convex/sandbox/_generated/api.js +23 -0
- package/convex/sandbox/_generated/dataModel.d.ts +60 -0
- package/convex/sandbox/_generated/server.d.ts +143 -0
- package/convex/sandbox/_generated/server.js +93 -0
- package/convex/sandbox/actions/bash.ts +130 -0
- package/convex/sandbox/actions/browser.ts +282 -0
- package/convex/sandbox/actions/file.ts +336 -0
- package/convex/sandbox/actions/lsp.ts +325 -0
- package/convex/sandbox/actions/pdf.ts +119 -0
- package/convex/sandbox/agent/codeExecLoop.ts +535 -0
- package/convex/sandbox/agent/decisions.ts +284 -0
- package/convex/sandbox/agent/index.ts +515 -0
- package/convex/sandbox/agent/subagents.ts +651 -0
- package/convex/sandbox/brandResearch/index.ts +417 -0
- package/convex/sandbox/context/index.ts +7 -0
- package/convex/sandbox/context/session.ts +402 -0
- package/convex/sandbox/convex.config.ts +17 -0
- package/convex/sandbox/index.ts +51 -0
- package/convex/sandbox/nodeActions/codeExec.ts +130 -0
- package/convex/sandbox/planning/beads.ts +187 -0
- package/convex/sandbox/planning/index.ts +8 -0
- package/convex/sandbox/planning/sync.ts +194 -0
- package/convex/sandbox/prompts/codeExec.ts +852 -0
- package/convex/sandbox/prompts/modes.ts +231 -0
- package/convex/sandbox/prompts/system.ts +142 -0
- package/convex/sandbox/schema.ts +510 -0
- package/convex/sandbox/state/artifacts.ts +99 -0
- package/convex/sandbox/state/checkpoints.ts +341 -0
- package/convex/sandbox/state/files.ts +383 -0
- package/convex/sandbox/state/index.ts +10 -0
- package/convex/sandbox/state/verification.actions.ts +268 -0
- package/convex/sandbox/state/verification.ts +101 -0
- package/convex/sandbox/tsconfig.json +25 -0
- package/convex/sandbox/utils/codeExecHelpers.ts +52 -0
- package/dist/cli/commands/build.d.ts +19 -0
- package/dist/cli/commands/build.d.ts.map +1 -0
- package/dist/cli/commands/build.js +223 -0
- package/dist/cli/commands/init.d.ts +16 -0
- package/dist/cli/commands/init.d.ts.map +1 -0
- package/dist/cli/commands/init.js +148 -0
- package/dist/cli/commands/publish.d.ts +12 -0
- package/dist/cli/commands/publish.d.ts.map +1 -0
- package/dist/cli/commands/publish.js +33 -0
- package/dist/cli/index.d.ts +14 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +40 -0
- package/dist/sdk/builders.d.ts +104 -0
- package/dist/sdk/builders.d.ts.map +1 -0
- package/dist/sdk/builders.js +214 -0
- package/dist/sdk/index.d.ts +29 -0
- package/dist/sdk/index.d.ts.map +1 -0
- package/dist/sdk/index.js +38 -0
- package/dist/sdk/types.d.ts +107 -0
- package/dist/sdk/types.d.ts.map +1 -0
- package/dist/sdk/types.js +6 -0
- package/ksa/README.md +263 -0
- package/ksa/_generated/REFERENCE.md +2954 -0
- package/ksa/_generated/registry.ts +257 -0
- package/ksa/_shared/configReader.ts +302 -0
- package/ksa/_shared/configSchemas.ts +649 -0
- package/ksa/_shared/gateway.ts +175 -0
- package/ksa/_shared/ksaBehaviors.ts +411 -0
- package/ksa/_shared/ksaProxy.ts +248 -0
- package/ksa/_shared/localDb.ts +302 -0
- package/ksa/index.ts +134 -0
- package/package.json +93 -0
- package/runtime/browser/agent-browser.ts +330 -0
- package/runtime/entrypoint.ts +194 -0
- package/runtime/lsp/manager.ts +366 -0
- package/runtime/pdf/pdf-generator.ts +50 -0
- package/runtime/pdf/renderer.ts +357 -0
- package/runtime/pdf/schema.ts +97 -0
- package/runtime/services/file-watcher.ts +191 -0
- package/template/build.ts +307 -0
- package/template/e2b/Dockerfile +69 -0
- package/template/e2b/e2b.toml +13 -0
- package/template/e2b/prebuild.sh +68 -0
- package/template/e2b/start.sh +14 -0
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Checkpoints - State Persistence for Chained Agent Runs
|
|
3
|
+
*
|
|
4
|
+
* Save and restore agent state for long-running tasks that
|
|
5
|
+
* exceed sandbox timeout limits.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { mutation, query, internalMutation, internalQuery } from "../_generated/server";
|
|
9
|
+
import { v } from "convex/values";
|
|
10
|
+
|
|
11
|
+
// ============================================
|
|
12
|
+
// Types
|
|
13
|
+
// ============================================
|
|
14
|
+
|
|
15
|
+
const fileSnapshotValidator = v.object({
|
|
16
|
+
path: v.string(),
|
|
17
|
+
contentHash: v.string(),
|
|
18
|
+
size: v.number(),
|
|
19
|
+
lastModified: v.number(),
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
const beadSnapshotValidator = v.object({
|
|
23
|
+
id: v.string(),
|
|
24
|
+
title: v.string(),
|
|
25
|
+
status: v.string(),
|
|
26
|
+
type: v.string(),
|
|
27
|
+
priority: v.number(),
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
const messageValidator = v.object({
|
|
31
|
+
role: v.string(),
|
|
32
|
+
content: v.string(),
|
|
33
|
+
timestamp: v.optional(v.number()),
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
// ============================================
|
|
37
|
+
// Mutations
|
|
38
|
+
// ============================================
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Create a checkpoint
|
|
42
|
+
*/
|
|
43
|
+
export const create = mutation({
|
|
44
|
+
args: {
|
|
45
|
+
sessionId: v.string(),
|
|
46
|
+
threadId: v.string(),
|
|
47
|
+
iteration: v.number(),
|
|
48
|
+
// Compressed conversation history
|
|
49
|
+
messageHistory: v.array(messageValidator),
|
|
50
|
+
// Files touched so far
|
|
51
|
+
fileState: v.array(fileSnapshotValidator),
|
|
52
|
+
// Task tracking state
|
|
53
|
+
beadsState: v.array(beadSnapshotValidator),
|
|
54
|
+
// Artifacts saved (references)
|
|
55
|
+
artifactsProduced: v.array(v.string()),
|
|
56
|
+
// What to do next
|
|
57
|
+
nextTask: v.string(),
|
|
58
|
+
// Why we're checkpointing
|
|
59
|
+
reason: v.union(
|
|
60
|
+
v.literal("timeout"),
|
|
61
|
+
v.literal("token_limit"),
|
|
62
|
+
v.literal("manual"),
|
|
63
|
+
v.literal("error_recovery")
|
|
64
|
+
),
|
|
65
|
+
// Additional context
|
|
66
|
+
metadata: v.optional(v.any()),
|
|
67
|
+
},
|
|
68
|
+
handler: async (ctx, args) => {
|
|
69
|
+
// Mark previous checkpoints as superseded
|
|
70
|
+
const previous = await ctx.db
|
|
71
|
+
.query("checkpoints")
|
|
72
|
+
.withIndex("by_session", (q) => q.eq("sessionId", args.sessionId))
|
|
73
|
+
.filter((q) => q.eq(q.field("status"), "active"))
|
|
74
|
+
.collect();
|
|
75
|
+
|
|
76
|
+
for (const checkpoint of previous) {
|
|
77
|
+
await ctx.db.patch(checkpoint._id, { status: "superseded" });
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Create new checkpoint
|
|
81
|
+
return await ctx.db.insert("checkpoints", {
|
|
82
|
+
sessionId: args.sessionId,
|
|
83
|
+
threadId: args.threadId,
|
|
84
|
+
iteration: args.iteration,
|
|
85
|
+
messageHistory: args.messageHistory,
|
|
86
|
+
fileState: args.fileState,
|
|
87
|
+
beadsState: args.beadsState,
|
|
88
|
+
artifactsProduced: args.artifactsProduced,
|
|
89
|
+
nextTask: args.nextTask,
|
|
90
|
+
reason: args.reason,
|
|
91
|
+
status: "active",
|
|
92
|
+
metadata: args.metadata,
|
|
93
|
+
createdAt: Date.now(),
|
|
94
|
+
});
|
|
95
|
+
},
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Mark checkpoint as restored (used)
|
|
100
|
+
*/
|
|
101
|
+
export const markRestored = internalMutation({
|
|
102
|
+
args: {
|
|
103
|
+
id: v.id("checkpoints"),
|
|
104
|
+
newThreadId: v.optional(v.string()),
|
|
105
|
+
},
|
|
106
|
+
handler: async (ctx, args) => {
|
|
107
|
+
await ctx.db.patch(args.id, {
|
|
108
|
+
status: "restored",
|
|
109
|
+
restoredAt: Date.now(),
|
|
110
|
+
restoredToThread: args.newThreadId,
|
|
111
|
+
});
|
|
112
|
+
},
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Mark checkpoint as completed (task finished)
|
|
117
|
+
*/
|
|
118
|
+
export const markCompleted = mutation({
|
|
119
|
+
args: {
|
|
120
|
+
sessionId: v.string(),
|
|
121
|
+
finalResult: v.optional(v.any()),
|
|
122
|
+
},
|
|
123
|
+
handler: async (ctx, args) => {
|
|
124
|
+
const checkpoints = await ctx.db
|
|
125
|
+
.query("checkpoints")
|
|
126
|
+
.withIndex("by_session", (q) => q.eq("sessionId", args.sessionId))
|
|
127
|
+
.collect();
|
|
128
|
+
|
|
129
|
+
for (const checkpoint of checkpoints) {
|
|
130
|
+
await ctx.db.patch(checkpoint._id, {
|
|
131
|
+
status: "completed",
|
|
132
|
+
completedAt: Date.now(),
|
|
133
|
+
finalResult: args.finalResult,
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return { updated: checkpoints.length };
|
|
138
|
+
},
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Mark checkpoint as failed
|
|
143
|
+
*/
|
|
144
|
+
export const markFailed = mutation({
|
|
145
|
+
args: {
|
|
146
|
+
id: v.id("checkpoints"),
|
|
147
|
+
error: v.string(),
|
|
148
|
+
},
|
|
149
|
+
handler: async (ctx, args) => {
|
|
150
|
+
await ctx.db.patch(args.id, {
|
|
151
|
+
status: "failed",
|
|
152
|
+
failedAt: Date.now(),
|
|
153
|
+
error: args.error,
|
|
154
|
+
});
|
|
155
|
+
},
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
// ============================================
|
|
159
|
+
// Queries
|
|
160
|
+
// ============================================
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Get the latest active checkpoint for a session
|
|
164
|
+
*/
|
|
165
|
+
export const getLatest = query({
|
|
166
|
+
args: { sessionId: v.string() },
|
|
167
|
+
handler: async (ctx, args) => {
|
|
168
|
+
return await ctx.db
|
|
169
|
+
.query("checkpoints")
|
|
170
|
+
.withIndex("by_session", (q) => q.eq("sessionId", args.sessionId))
|
|
171
|
+
.filter((q) => q.eq(q.field("status"), "active"))
|
|
172
|
+
.order("desc")
|
|
173
|
+
.first();
|
|
174
|
+
},
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Get checkpoint by ID
|
|
179
|
+
*/
|
|
180
|
+
export const get = query({
|
|
181
|
+
args: { id: v.id("checkpoints") },
|
|
182
|
+
handler: async (ctx, args) => {
|
|
183
|
+
return await ctx.db.get(args.id);
|
|
184
|
+
},
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Internal: Get checkpoint by ID (for internal actions)
|
|
189
|
+
*/
|
|
190
|
+
export const internalGet = internalQuery({
|
|
191
|
+
args: { id: v.id("checkpoints") },
|
|
192
|
+
handler: async (ctx, args) => {
|
|
193
|
+
return await ctx.db.get(args.id);
|
|
194
|
+
},
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Get all checkpoints for a session (history)
|
|
199
|
+
*/
|
|
200
|
+
export const getHistory = query({
|
|
201
|
+
args: { sessionId: v.string() },
|
|
202
|
+
handler: async (ctx, args) => {
|
|
203
|
+
return await ctx.db
|
|
204
|
+
.query("checkpoints")
|
|
205
|
+
.withIndex("by_session", (q) => q.eq("sessionId", args.sessionId))
|
|
206
|
+
.order("desc")
|
|
207
|
+
.collect();
|
|
208
|
+
},
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
/**
|
|
212
|
+
* Get checkpoint stats
|
|
213
|
+
*/
|
|
214
|
+
export const getStats = query({
|
|
215
|
+
args: {},
|
|
216
|
+
handler: async (ctx) => {
|
|
217
|
+
const all = await ctx.db.query("checkpoints").collect();
|
|
218
|
+
|
|
219
|
+
const byStatus = {
|
|
220
|
+
active: 0,
|
|
221
|
+
restored: 0,
|
|
222
|
+
completed: 0,
|
|
223
|
+
failed: 0,
|
|
224
|
+
superseded: 0,
|
|
225
|
+
};
|
|
226
|
+
|
|
227
|
+
const byReason = {
|
|
228
|
+
timeout: 0,
|
|
229
|
+
token_limit: 0,
|
|
230
|
+
manual: 0,
|
|
231
|
+
error_recovery: 0,
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
for (const cp of all) {
|
|
235
|
+
byStatus[cp.status as keyof typeof byStatus]++;
|
|
236
|
+
byReason[cp.reason as keyof typeof byReason]++;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
return {
|
|
240
|
+
total: all.length,
|
|
241
|
+
byStatus,
|
|
242
|
+
byReason,
|
|
243
|
+
averageIteration:
|
|
244
|
+
all.length > 0
|
|
245
|
+
? all.reduce((sum, cp) => sum + cp.iteration, 0) / all.length
|
|
246
|
+
: 0,
|
|
247
|
+
};
|
|
248
|
+
},
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
// ============================================
|
|
252
|
+
// Internal Mutations
|
|
253
|
+
// ============================================
|
|
254
|
+
|
|
255
|
+
/**
|
|
256
|
+
* Create checkpoint from current state
|
|
257
|
+
* Called internally during timeout handling
|
|
258
|
+
*/
|
|
259
|
+
export const createFromCurrentState = internalMutation({
|
|
260
|
+
args: {
|
|
261
|
+
threadId: v.string(),
|
|
262
|
+
nextTask: v.string(),
|
|
263
|
+
iteration: v.number(),
|
|
264
|
+
},
|
|
265
|
+
handler: async (ctx, args) => {
|
|
266
|
+
// Get current file state
|
|
267
|
+
const fileStates = await ctx.db.query("fileState").collect();
|
|
268
|
+
const fileState = fileStates.map((f) => ({
|
|
269
|
+
path: f.path,
|
|
270
|
+
contentHash: f.contentHash || "",
|
|
271
|
+
size: f.size || 0,
|
|
272
|
+
lastModified: f.lastAccessAt,
|
|
273
|
+
}));
|
|
274
|
+
|
|
275
|
+
// Get current beads state
|
|
276
|
+
const beads = await ctx.db.query("beads").collect();
|
|
277
|
+
const beadsState = beads.map((b) => ({
|
|
278
|
+
id: b._id,
|
|
279
|
+
title: b.title,
|
|
280
|
+
status: b.status,
|
|
281
|
+
type: b.type,
|
|
282
|
+
priority: b.priority,
|
|
283
|
+
}));
|
|
284
|
+
|
|
285
|
+
// Get artifacts produced
|
|
286
|
+
const artifacts = await ctx.db.query("artifacts").collect();
|
|
287
|
+
const artifactsProduced = artifacts.map((a) => a.name);
|
|
288
|
+
|
|
289
|
+
// Create session ID based on thread
|
|
290
|
+
const sessionId = `session_${args.threadId}`;
|
|
291
|
+
|
|
292
|
+
// Create checkpoint
|
|
293
|
+
return await ctx.db.insert("checkpoints", {
|
|
294
|
+
sessionId,
|
|
295
|
+
threadId: args.threadId,
|
|
296
|
+
iteration: args.iteration,
|
|
297
|
+
messageHistory: [], // Will be populated by agent SDK
|
|
298
|
+
fileState,
|
|
299
|
+
beadsState,
|
|
300
|
+
artifactsProduced,
|
|
301
|
+
nextTask: args.nextTask,
|
|
302
|
+
reason: "timeout",
|
|
303
|
+
status: "active",
|
|
304
|
+
createdAt: Date.now(),
|
|
305
|
+
});
|
|
306
|
+
},
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
// ============================================
|
|
310
|
+
// Cleanup
|
|
311
|
+
// ============================================
|
|
312
|
+
|
|
313
|
+
/**
|
|
314
|
+
* Clean up old checkpoints
|
|
315
|
+
*/
|
|
316
|
+
export const cleanup = mutation({
|
|
317
|
+
args: {
|
|
318
|
+
olderThanMs: v.optional(v.number()),
|
|
319
|
+
keepCompleted: v.optional(v.boolean()),
|
|
320
|
+
},
|
|
321
|
+
handler: async (ctx, args) => {
|
|
322
|
+
const cutoff = Date.now() - (args.olderThanMs ?? 7 * 24 * 3600000); // 7 days default
|
|
323
|
+
|
|
324
|
+
const old = await ctx.db
|
|
325
|
+
.query("checkpoints")
|
|
326
|
+
.filter((q) => q.lt(q.field("createdAt"), cutoff))
|
|
327
|
+
.collect();
|
|
328
|
+
|
|
329
|
+
let deleted = 0;
|
|
330
|
+
for (const checkpoint of old) {
|
|
331
|
+
// Skip completed if requested
|
|
332
|
+
if (args.keepCompleted && checkpoint.status === "completed") {
|
|
333
|
+
continue;
|
|
334
|
+
}
|
|
335
|
+
await ctx.db.delete(checkpoint._id);
|
|
336
|
+
deleted++;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
return { deleted };
|
|
340
|
+
},
|
|
341
|
+
});
|
|
@@ -0,0 +1,383 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* State - File State Tracking
|
|
3
|
+
*
|
|
4
|
+
* Explicit state management with diff-driven architecture.
|
|
5
|
+
* Tracks file changes, generates diffs, and supports rollback.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { mutation, query, internalMutation, internalQuery } from "../_generated/server";
|
|
9
|
+
import { v } from "convex/values";
|
|
10
|
+
|
|
11
|
+
// ============================================
|
|
12
|
+
// Mutations
|
|
13
|
+
// ============================================
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Track a file access (read or write)
|
|
17
|
+
*/
|
|
18
|
+
export const trackFileAccess = mutation({
|
|
19
|
+
args: {
|
|
20
|
+
path: v.string(),
|
|
21
|
+
operation: v.union(v.literal("read"), v.literal("write"), v.literal("edit")),
|
|
22
|
+
contentHash: v.optional(v.string()),
|
|
23
|
+
size: v.optional(v.number()),
|
|
24
|
+
threadId: v.optional(v.string()),
|
|
25
|
+
},
|
|
26
|
+
handler: async (ctx, args) => {
|
|
27
|
+
// Get existing file state
|
|
28
|
+
const existing = await ctx.db
|
|
29
|
+
.query("fileState")
|
|
30
|
+
.withIndex("by_path", (q) => q.eq("path", args.path))
|
|
31
|
+
.first();
|
|
32
|
+
|
|
33
|
+
if (existing) {
|
|
34
|
+
// Update existing
|
|
35
|
+
await ctx.db.patch(existing._id, {
|
|
36
|
+
lastOperation: args.operation,
|
|
37
|
+
lastAccessAt: Date.now(),
|
|
38
|
+
contentHash: args.contentHash ?? existing.contentHash,
|
|
39
|
+
size: args.size ?? existing.size,
|
|
40
|
+
accessCount: existing.accessCount + 1,
|
|
41
|
+
});
|
|
42
|
+
return existing._id;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Create new file state entry
|
|
46
|
+
return await ctx.db.insert("fileState", {
|
|
47
|
+
path: args.path,
|
|
48
|
+
lastOperation: args.operation,
|
|
49
|
+
lastAccessAt: Date.now(),
|
|
50
|
+
contentHash: args.contentHash,
|
|
51
|
+
size: args.size,
|
|
52
|
+
threadId: args.threadId,
|
|
53
|
+
accessCount: 1,
|
|
54
|
+
createdAt: Date.now(),
|
|
55
|
+
});
|
|
56
|
+
},
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Record a file edit with diff
|
|
61
|
+
*/
|
|
62
|
+
export const recordEdit = mutation({
|
|
63
|
+
args: {
|
|
64
|
+
path: v.string(),
|
|
65
|
+
oldContent: v.string(),
|
|
66
|
+
newContent: v.string(),
|
|
67
|
+
diff: v.string(),
|
|
68
|
+
verified: v.boolean(),
|
|
69
|
+
threadId: v.optional(v.string()),
|
|
70
|
+
},
|
|
71
|
+
handler: async (ctx, args) => {
|
|
72
|
+
// Get file state
|
|
73
|
+
const fileState = await ctx.db
|
|
74
|
+
.query("fileState")
|
|
75
|
+
.withIndex("by_path", (q) => q.eq("path", args.path))
|
|
76
|
+
.first();
|
|
77
|
+
|
|
78
|
+
// Insert edit history
|
|
79
|
+
const editId = await ctx.db.insert("editHistory", {
|
|
80
|
+
path: args.path,
|
|
81
|
+
fileStateId: fileState?._id,
|
|
82
|
+
oldContentHash: simpleHash(args.oldContent),
|
|
83
|
+
newContentHash: simpleHash(args.newContent),
|
|
84
|
+
diff: args.diff,
|
|
85
|
+
verified: args.verified,
|
|
86
|
+
threadId: args.threadId,
|
|
87
|
+
createdAt: Date.now(),
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
// Update file state
|
|
91
|
+
if (fileState) {
|
|
92
|
+
await ctx.db.patch(fileState._id, {
|
|
93
|
+
lastOperation: "edit",
|
|
94
|
+
lastAccessAt: Date.now(),
|
|
95
|
+
contentHash: simpleHash(args.newContent),
|
|
96
|
+
size: args.newContent.length,
|
|
97
|
+
lastEditId: editId,
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
return editId;
|
|
102
|
+
},
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Mark a file as rolled back
|
|
107
|
+
*/
|
|
108
|
+
export const rollback = mutation({
|
|
109
|
+
args: {
|
|
110
|
+
editId: v.id("editHistory"),
|
|
111
|
+
reason: v.string(),
|
|
112
|
+
},
|
|
113
|
+
handler: async (ctx, args) => {
|
|
114
|
+
const edit = await ctx.db.get(args.editId);
|
|
115
|
+
if (!edit) {
|
|
116
|
+
throw new Error("Edit not found");
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
await ctx.db.patch(args.editId, {
|
|
120
|
+
rolledBack: true,
|
|
121
|
+
rollbackReason: args.reason,
|
|
122
|
+
rolledBackAt: Date.now(),
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
return { success: true };
|
|
126
|
+
},
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
// ============================================
|
|
130
|
+
// Queries
|
|
131
|
+
// ============================================
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Get file state by path
|
|
135
|
+
*/
|
|
136
|
+
export const getByPath = query({
|
|
137
|
+
args: { path: v.string() },
|
|
138
|
+
handler: async (ctx, args) => {
|
|
139
|
+
return await ctx.db
|
|
140
|
+
.query("fileState")
|
|
141
|
+
.withIndex("by_path", (q) => q.eq("path", args.path))
|
|
142
|
+
.first();
|
|
143
|
+
},
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Get all files accessed in a thread
|
|
148
|
+
*/
|
|
149
|
+
export const getByThread = query({
|
|
150
|
+
args: { threadId: v.string() },
|
|
151
|
+
handler: async (ctx, args) => {
|
|
152
|
+
return await ctx.db
|
|
153
|
+
.query("fileState")
|
|
154
|
+
.withIndex("by_thread", (q) => q.eq("threadId", args.threadId))
|
|
155
|
+
.collect();
|
|
156
|
+
},
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Get edit history for a file
|
|
161
|
+
*/
|
|
162
|
+
export const getEditHistory = query({
|
|
163
|
+
args: {
|
|
164
|
+
path: v.string(),
|
|
165
|
+
limit: v.optional(v.number()),
|
|
166
|
+
},
|
|
167
|
+
handler: async (ctx, args) => {
|
|
168
|
+
return await ctx.db
|
|
169
|
+
.query("editHistory")
|
|
170
|
+
.withIndex("by_path", (q) => q.eq("path", args.path))
|
|
171
|
+
.order("desc")
|
|
172
|
+
.take(args.limit ?? 20);
|
|
173
|
+
},
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Get recent edits across all files
|
|
178
|
+
*/
|
|
179
|
+
export const getRecentEdits = query({
|
|
180
|
+
args: { limit: v.optional(v.number()) },
|
|
181
|
+
handler: async (ctx, args) => {
|
|
182
|
+
return await ctx.db
|
|
183
|
+
.query("editHistory")
|
|
184
|
+
.order("desc")
|
|
185
|
+
.take(args.limit ?? 50);
|
|
186
|
+
},
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Check if a file has changed since a given hash
|
|
191
|
+
*/
|
|
192
|
+
export const hasChanged = query({
|
|
193
|
+
args: {
|
|
194
|
+
path: v.string(),
|
|
195
|
+
expectedHash: v.string(),
|
|
196
|
+
},
|
|
197
|
+
handler: async (ctx, args) => {
|
|
198
|
+
const fileState = await ctx.db
|
|
199
|
+
.query("fileState")
|
|
200
|
+
.withIndex("by_path", (q) => q.eq("path", args.path))
|
|
201
|
+
.first();
|
|
202
|
+
|
|
203
|
+
if (!fileState) {
|
|
204
|
+
return { changed: true, reason: "File not tracked" };
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
if (fileState.contentHash !== args.expectedHash) {
|
|
208
|
+
return {
|
|
209
|
+
changed: true,
|
|
210
|
+
reason: "Content hash mismatch",
|
|
211
|
+
currentHash: fileState.contentHash,
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
return { changed: false };
|
|
216
|
+
},
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
// ============================================
|
|
220
|
+
// Internal Mutations
|
|
221
|
+
// ============================================
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* Restore file state from a checkpoint
|
|
225
|
+
*/
|
|
226
|
+
export const restoreFromCheckpoint = internalMutation({
|
|
227
|
+
args: {
|
|
228
|
+
checkpointId: v.id("checkpoints"),
|
|
229
|
+
},
|
|
230
|
+
handler: async (ctx, args) => {
|
|
231
|
+
const checkpoint = await ctx.db.get(args.checkpointId);
|
|
232
|
+
if (!checkpoint) {
|
|
233
|
+
throw new Error(`Checkpoint ${args.checkpointId} not found`);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// Clear existing file state
|
|
237
|
+
const existingStates = await ctx.db.query("fileState").collect();
|
|
238
|
+
for (const state of existingStates) {
|
|
239
|
+
await ctx.db.delete(state._id);
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
// Restore file states from checkpoint
|
|
243
|
+
for (const file of checkpoint.fileState) {
|
|
244
|
+
await ctx.db.insert("fileState", {
|
|
245
|
+
path: file.path,
|
|
246
|
+
lastOperation: "read",
|
|
247
|
+
lastAccessAt: file.lastModified,
|
|
248
|
+
contentHash: file.contentHash,
|
|
249
|
+
size: file.size,
|
|
250
|
+
accessCount: 1,
|
|
251
|
+
createdAt: Date.now(),
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Restore beads state
|
|
256
|
+
// Note: This creates new beads, doesn't update existing ones
|
|
257
|
+
// In a real implementation, you'd want to sync with existing beads
|
|
258
|
+
for (const bead of checkpoint.beadsState) {
|
|
259
|
+
const existing = await ctx.db
|
|
260
|
+
.query("beads")
|
|
261
|
+
.filter((q) => q.eq(q.field("title"), bead.title))
|
|
262
|
+
.first();
|
|
263
|
+
|
|
264
|
+
if (!existing) {
|
|
265
|
+
await ctx.db.insert("beads", {
|
|
266
|
+
title: bead.title,
|
|
267
|
+
type: bead.type as any,
|
|
268
|
+
status: bead.status as any,
|
|
269
|
+
priority: bead.priority,
|
|
270
|
+
createdAt: Date.now(),
|
|
271
|
+
updatedAt: Date.now(),
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
return { restored: true, checkpointId: args.checkpointId };
|
|
277
|
+
},
|
|
278
|
+
});
|
|
279
|
+
|
|
280
|
+
// ============================================
|
|
281
|
+
// Internal Mutation Wrappers (for tools)
|
|
282
|
+
// ============================================
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Internal version of trackFileAccess for tool use
|
|
286
|
+
*/
|
|
287
|
+
export const internalTrackFileAccess = internalMutation({
|
|
288
|
+
args: {
|
|
289
|
+
path: v.string(),
|
|
290
|
+
operation: v.union(v.literal("read"), v.literal("write"), v.literal("edit")),
|
|
291
|
+
contentHash: v.optional(v.string()),
|
|
292
|
+
size: v.optional(v.number()),
|
|
293
|
+
threadId: v.optional(v.string()),
|
|
294
|
+
},
|
|
295
|
+
handler: async (ctx, args) => {
|
|
296
|
+
const existing = await ctx.db
|
|
297
|
+
.query("fileState")
|
|
298
|
+
.withIndex("by_path", (q) => q.eq("path", args.path))
|
|
299
|
+
.first();
|
|
300
|
+
|
|
301
|
+
if (existing) {
|
|
302
|
+
await ctx.db.patch(existing._id, {
|
|
303
|
+
lastOperation: args.operation,
|
|
304
|
+
lastAccessAt: Date.now(),
|
|
305
|
+
contentHash: args.contentHash ?? existing.contentHash,
|
|
306
|
+
size: args.size ?? existing.size,
|
|
307
|
+
accessCount: existing.accessCount + 1,
|
|
308
|
+
});
|
|
309
|
+
return existing._id;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
return await ctx.db.insert("fileState", {
|
|
313
|
+
path: args.path,
|
|
314
|
+
lastOperation: args.operation,
|
|
315
|
+
lastAccessAt: Date.now(),
|
|
316
|
+
contentHash: args.contentHash,
|
|
317
|
+
size: args.size,
|
|
318
|
+
threadId: args.threadId,
|
|
319
|
+
accessCount: 1,
|
|
320
|
+
createdAt: Date.now(),
|
|
321
|
+
});
|
|
322
|
+
},
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
/**
|
|
326
|
+
* Internal version of recordEdit for tool use
|
|
327
|
+
*/
|
|
328
|
+
export const internalRecordEdit = internalMutation({
|
|
329
|
+
args: {
|
|
330
|
+
path: v.string(),
|
|
331
|
+
oldContent: v.string(),
|
|
332
|
+
newContent: v.string(),
|
|
333
|
+
diff: v.string(),
|
|
334
|
+
verified: v.boolean(),
|
|
335
|
+
threadId: v.optional(v.string()),
|
|
336
|
+
},
|
|
337
|
+
handler: async (ctx, args) => {
|
|
338
|
+
const fileState = await ctx.db
|
|
339
|
+
.query("fileState")
|
|
340
|
+
.withIndex("by_path", (q) => q.eq("path", args.path))
|
|
341
|
+
.first();
|
|
342
|
+
|
|
343
|
+
const editId = await ctx.db.insert("editHistory", {
|
|
344
|
+
path: args.path,
|
|
345
|
+
fileStateId: fileState?._id,
|
|
346
|
+
oldContentHash: simpleHash(args.oldContent),
|
|
347
|
+
newContentHash: simpleHash(args.newContent),
|
|
348
|
+
diff: args.diff,
|
|
349
|
+
verified: args.verified,
|
|
350
|
+
threadId: args.threadId,
|
|
351
|
+
createdAt: Date.now(),
|
|
352
|
+
});
|
|
353
|
+
|
|
354
|
+
if (fileState) {
|
|
355
|
+
await ctx.db.patch(fileState._id, {
|
|
356
|
+
lastOperation: "edit",
|
|
357
|
+
lastAccessAt: Date.now(),
|
|
358
|
+
contentHash: simpleHash(args.newContent),
|
|
359
|
+
size: args.newContent.length,
|
|
360
|
+
lastEditId: editId,
|
|
361
|
+
});
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
return editId;
|
|
365
|
+
},
|
|
366
|
+
});
|
|
367
|
+
|
|
368
|
+
// ============================================
|
|
369
|
+
// Helpers
|
|
370
|
+
// ============================================
|
|
371
|
+
|
|
372
|
+
/**
|
|
373
|
+
* Simple hash function for content comparison
|
|
374
|
+
*/
|
|
375
|
+
function simpleHash(str: string): string {
|
|
376
|
+
let hash = 0;
|
|
377
|
+
for (let i = 0; i < str.length; i++) {
|
|
378
|
+
const char = str.charCodeAt(i);
|
|
379
|
+
hash = (hash << 5) - hash + char;
|
|
380
|
+
hash = hash & hash; // Convert to 32bit integer
|
|
381
|
+
}
|
|
382
|
+
return hash.toString(16);
|
|
383
|
+
}
|