@poncho-ai/harness 0.35.0 → 0.36.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/.turbo/turbo-build.log +12 -11
  2. package/CHANGELOG.md +25 -0
  3. package/dist/index.d.ts +485 -29
  4. package/dist/index.js +2839 -2114
  5. package/dist/isolate-TCWTUVG4.js +1532 -0
  6. package/package.json +23 -4
  7. package/scripts/migrate-to-engine.mjs +556 -0
  8. package/src/config.ts +106 -1
  9. package/src/harness.ts +226 -91
  10. package/src/index.ts +5 -0
  11. package/src/isolate/bindings.ts +206 -0
  12. package/src/isolate/bundler.ts +179 -0
  13. package/src/isolate/index.ts +10 -0
  14. package/src/isolate/polyfills.ts +796 -0
  15. package/src/isolate/run-code-tool.ts +220 -0
  16. package/src/isolate/runtime.ts +286 -0
  17. package/src/isolate/type-stubs.ts +196 -0
  18. package/src/memory.ts +129 -198
  19. package/src/reminder-store.ts +3 -237
  20. package/src/secrets-store.ts +2 -91
  21. package/src/state.ts +11 -1302
  22. package/src/storage/engine.ts +106 -0
  23. package/src/storage/index.ts +59 -0
  24. package/src/storage/memory-engine.ts +588 -0
  25. package/src/storage/postgres-engine.ts +139 -0
  26. package/src/storage/schema.ts +145 -0
  27. package/src/storage/sql-dialect.ts +963 -0
  28. package/src/storage/sqlite-engine.ts +99 -0
  29. package/src/storage/store-adapters.ts +100 -0
  30. package/src/todo-tools.ts +1 -136
  31. package/src/upload-store.ts +1 -0
  32. package/src/vfs/bash-manager.ts +120 -0
  33. package/src/vfs/bash-tool.ts +59 -0
  34. package/src/vfs/create-bash-fs.ts +32 -0
  35. package/src/vfs/edit-file-tool.ts +72 -0
  36. package/src/vfs/index.ts +5 -0
  37. package/src/vfs/poncho-fs-adapter.ts +267 -0
  38. package/src/vfs/protected-fs.ts +177 -0
  39. package/src/vfs/read-file-tool.ts +103 -0
  40. package/src/vfs/write-file-tool.ts +49 -0
  41. package/test/harness.test.ts +30 -36
  42. package/test/isolate-vfs.test.ts +453 -0
  43. package/test/isolate.test.ts +252 -0
  44. package/test/state.test.ts +4 -27
  45. package/test/storage-engine.test.ts +250 -0
  46. package/test/vfs.test.ts +242 -0
  47. package/.turbo/turbo-lint.log +0 -6
  48. package/.turbo/turbo-test.log +0 -11931
  49. package/src/kv-store.ts +0 -216
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@poncho-ai/harness",
3
- "version": "0.35.0",
3
+ "version": "0.36.1",
4
4
  "description": "Agent execution runtime - conversation loop, tool dispatch, streaming",
5
5
  "repository": {
6
6
  "type": "git",
@@ -22,22 +22,41 @@
22
22
  "dependencies": {
23
23
  "@ai-sdk/anthropic": "^3.0.44",
24
24
  "@ai-sdk/openai": "^3.0.29",
25
- "@aws-sdk/client-dynamodb": "^3.988.0",
26
25
  "@opentelemetry/api": "1.9.0",
27
26
  "@opentelemetry/exporter-trace-otlp-http": "^0.213.0",
28
27
  "@opentelemetry/sdk-trace-node": "^2.6.0",
29
28
  "ai": "^6.0.86",
29
+ "better-sqlite3": "^11.0.0",
30
30
  "cheerio": "^1.2.0",
31
31
  "jiti": "^2.6.1",
32
32
  "jose": "^6.2.2",
33
+ "just-bash": "^2.14.0",
33
34
  "mustache": "^4.2.0",
34
- "redis": "^5.10.0",
35
35
  "yaml": "^2.4.0",
36
36
  "zod": "^3.22.0",
37
- "@poncho-ai/sdk": "1.8.0"
37
+ "@poncho-ai/sdk": "1.8.1"
38
+ },
39
+ "peerDependencies": {
40
+ "esbuild": ">=0.17.0",
41
+ "isolated-vm": ">=5.0.0",
42
+ "postgres": ">=3.0.0"
43
+ },
44
+ "peerDependenciesMeta": {
45
+ "esbuild": {
46
+ "optional": true
47
+ },
48
+ "isolated-vm": {
49
+ "optional": true
50
+ },
51
+ "postgres": {
52
+ "optional": true
53
+ }
38
54
  },
39
55
  "devDependencies": {
56
+ "@types/better-sqlite3": "^7.6.0",
40
57
  "@types/mustache": "^4.2.6",
58
+ "esbuild": "^0.28.0",
59
+ "isolated-vm": "^6.1.2",
41
60
  "tsup": "^8.0.0",
42
61
  "vitest": "^1.4.0"
43
62
  },
@@ -0,0 +1,556 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Migration script: Import data from Upstash/local JSON stores into the
4
+ * new StorageEngine (SQLite or PostgreSQL).
5
+ *
6
+ * Usage:
7
+ * # Local (.poncho/ JSON files) → SQLite (default)
8
+ * node scripts/migrate-to-engine.mjs --source local --working-dir /path/to/project
9
+ *
10
+ * # Local → PostgreSQL
11
+ * DATABASE_URL=postgres://... node scripts/migrate-to-engine.mjs --source local --target postgresql --working-dir /path/to/project
12
+ *
13
+ * # Upstash → PostgreSQL
14
+ * UPSTASH_REDIS_REST_URL=... UPSTASH_REDIS_REST_TOKEN=... DATABASE_URL=postgres://... \
15
+ * node scripts/migrate-to-engine.mjs --source upstash --target postgresql --agent-id my-agent
16
+ *
17
+ * # Upstash → SQLite
18
+ * UPSTASH_REDIS_REST_URL=... UPSTASH_REDIS_REST_TOKEN=... \
19
+ * node scripts/migrate-to-engine.mjs --source upstash --target sqlite --agent-id my-agent --working-dir /path/to/project
20
+ */
21
+
22
+ import { readFile, readdir } from "node:fs/promises";
23
+ import { resolve, basename } from "node:path";
24
+ import { parseArgs } from "node:util";
25
+
26
+ // ---------------------------------------------------------------------------
27
+ // CLI args
28
+ // ---------------------------------------------------------------------------
29
+
30
+ const { values: args } = parseArgs({
31
+ options: {
32
+ source: { type: "string", default: "local" },
33
+ target: { type: "string", default: "sqlite" },
34
+ "working-dir": { type: "string", default: process.cwd() },
35
+ "agent-id": { type: "string" },
36
+ "url-env": { type: "string", default: "DATABASE_URL" },
37
+ "dry-run": { type: "boolean", default: false },
38
+ help: { type: "boolean", default: false },
39
+ },
40
+ });
41
+
42
+ if (args.help) {
43
+ console.log(`
44
+ Usage: node scripts/migrate-to-engine.mjs [options]
45
+
46
+ Options:
47
+ --source Source backend: "local" or "upstash" (default: local)
48
+ --target Target engine: "sqlite" or "postgresql" (default: sqlite)
49
+ --working-dir Project working directory (default: cwd)
50
+ --agent-id Agent ID (required for upstash, auto-detected for local)
51
+ --url-env Env var name for PostgreSQL URL (default: DATABASE_URL)
52
+ --dry-run Print what would be imported without writing
53
+ --help Show this help
54
+ `);
55
+ process.exit(0);
56
+ }
57
+
58
+ const SOURCE = args.source;
59
+ const TARGET = args.target;
60
+ const WORKING_DIR = args["working-dir"];
61
+ const AGENT_ID = args["agent-id"];
62
+ const URL_ENV = args["url-env"];
63
+ const DRY_RUN = args["dry-run"];
64
+
65
+ // ---------------------------------------------------------------------------
66
+ // Upstash reader
67
+ // ---------------------------------------------------------------------------
68
+
69
+ async function upstashGet(baseUrl, token, key) {
70
+ const res = await fetch(`${baseUrl}/get/${encodeURIComponent(key)}`, {
71
+ method: "POST",
72
+ headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
73
+ });
74
+ if (!res.ok) return undefined;
75
+ const { result } = await res.json();
76
+ return result ?? undefined;
77
+ }
78
+
79
+ async function upstashScan(baseUrl, token, pattern) {
80
+ let cursor = "0";
81
+ const keys = [];
82
+ do {
83
+ const res = await fetch(baseUrl, {
84
+ method: "POST",
85
+ headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
86
+ body: JSON.stringify(["SCAN", cursor, "MATCH", pattern, "COUNT", "5000"]),
87
+ });
88
+ const { result } = await res.json();
89
+ cursor = result[0];
90
+ keys.push(...result[1]);
91
+ process.stdout.write(`\r Scanning: ${keys.length} keys (cursor: ${cursor})`);
92
+ } while (cursor !== "0" && cursor !== 0);
93
+ console.log("");
94
+ return keys;
95
+ }
96
+
97
+ // ---------------------------------------------------------------------------
98
+ // Local JSON reader
99
+ // ---------------------------------------------------------------------------
100
+
101
+ async function readJsonSafe(filePath) {
102
+ try {
103
+ return JSON.parse(await readFile(filePath, "utf8"));
104
+ } catch {
105
+ return undefined;
106
+ }
107
+ }
108
+
109
+ async function findAgentDir(workingDir) {
110
+ const ponchoDir = resolve(workingDir, ".poncho");
111
+ try {
112
+ const entries = await readdir(ponchoDir, { withFileTypes: true });
113
+ for (const e of entries) {
114
+ if (e.isDirectory() && !e.name.startsWith(".")) {
115
+ return { dir: resolve(ponchoDir, e.name), id: e.name };
116
+ }
117
+ }
118
+ } catch { /* no .poncho dir */ }
119
+ return undefined;
120
+ }
121
+
122
+ // ---------------------------------------------------------------------------
123
+ // Read from local
124
+ // ---------------------------------------------------------------------------
125
+
126
+ async function readLocal(workingDir) {
127
+ const agent = await findAgentDir(workingDir);
128
+ if (!agent) {
129
+ console.error("No .poncho agent directory found in", workingDir);
130
+ process.exit(1);
131
+ }
132
+ console.log(`Found local agent: ${agent.id} at ${agent.dir}`);
133
+
134
+ const data = { agentId: agent.id, conversations: [], memories: [], todos: [], reminders: [] };
135
+
136
+ // Conversations: read index + individual files
137
+ const indexPath = resolve(agent.dir, "conversations", "index.json");
138
+ const index = await readJsonSafe(indexPath);
139
+ if (index?.conversations) {
140
+ for (const entry of index.conversations) {
141
+ const convFile = resolve(agent.dir, entry.fileName);
142
+ const conv = await readJsonSafe(convFile);
143
+ if (conv) {
144
+ data.conversations.push(conv);
145
+ }
146
+ }
147
+ } else {
148
+ // Try reading conversation files directly
149
+ try {
150
+ const files = await readdir(agent.dir);
151
+ for (const f of files) {
152
+ if (f.endsWith(".json") && f.includes("--")) {
153
+ const conv = await readJsonSafe(resolve(agent.dir, f));
154
+ if (conv?.conversationId) data.conversations.push(conv);
155
+ }
156
+ }
157
+ } catch { /* no conversation files */ }
158
+ }
159
+
160
+ // Memory (default tenant)
161
+ const memPath = resolve(agent.dir, "memory.json");
162
+ const mem = await readJsonSafe(memPath);
163
+ if (mem?.main?.content) {
164
+ data.memories.push({ tenantId: null, content: mem.main });
165
+ }
166
+
167
+ // Todos
168
+ const todosDir = resolve(agent.dir, "todos");
169
+ try {
170
+ const todoFiles = await readdir(todosDir);
171
+ for (const f of todoFiles) {
172
+ if (f.endsWith(".json")) {
173
+ const todos = await readJsonSafe(resolve(todosDir, f));
174
+ if (Array.isArray(todos)) {
175
+ const conversationId = basename(f, ".json");
176
+ data.todos.push({ conversationId, items: todos });
177
+ }
178
+ }
179
+ }
180
+ } catch { /* no todos dir */ }
181
+
182
+ // Reminders
183
+ const remPath = resolve(agent.dir, "reminders.json");
184
+ const rems = await readJsonSafe(remPath);
185
+ if (Array.isArray(rems)) {
186
+ data.reminders = rems;
187
+ }
188
+
189
+ return data;
190
+ }
191
+
192
+ // ---------------------------------------------------------------------------
193
+ // Read from Upstash
194
+ // ---------------------------------------------------------------------------
195
+
196
+ async function readUpstash(agentId) {
197
+ const baseUrl = (process.env.UPSTASH_REDIS_REST_URL ?? "").replace(/\/+$/, "");
198
+ const token = process.env.UPSTASH_REDIS_REST_TOKEN ?? "";
199
+ if (!baseUrl || !token) {
200
+ console.error("Missing UPSTASH_REDIS_REST_URL or UPSTASH_REDIS_REST_TOKEN");
201
+ process.exit(1);
202
+ }
203
+ if (!agentId) {
204
+ console.error("--agent-id is required for upstash source");
205
+ process.exit(1);
206
+ }
207
+
208
+ const prefix = `poncho:v1:${agentId}`;
209
+ console.log(`Scanning Upstash keys with prefix: ${prefix}*`);
210
+
211
+ const data = { agentId, conversations: [], memories: [], todos: [], reminders: [] };
212
+ const convMetaMap = new Map();
213
+
214
+ // Scan all keys
215
+ const keys = await upstashScan(baseUrl, token, `${prefix}*`);
216
+ console.log(`Found ${keys.length} keys`);
217
+
218
+ // Batch fetch with MGET (small batches — large conversation payloads
219
+ // can cause Upstash to truncate responses with big batches)
220
+ const BATCH = 5;
221
+ let processed = 0;
222
+ for (let i = 0; i < keys.length; i += BATCH) {
223
+ const batch = keys.slice(i, i + BATCH);
224
+ const res = await fetch(baseUrl, {
225
+ method: "POST",
226
+ headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
227
+ body: JSON.stringify(["MGET", ...batch]),
228
+ });
229
+ const { result: values } = await res.json();
230
+
231
+ for (let j = 0; j < batch.length; j++) {
232
+ const key = batch[j];
233
+ const raw = values?.[j];
234
+ if (!raw) continue;
235
+
236
+ try {
237
+ const parsed = JSON.parse(raw);
238
+
239
+ if (key.includes(":convmeta:")) {
240
+ // Conversation summary/metadata (has channelMeta, title, etc.)
241
+ // Skip timestamped variants like convmeta:id_1234567890
242
+ if (parsed.conversationId && !key.match(/:convmeta:[^:]+_\d+$/)) {
243
+ convMetaMap.set(parsed.conversationId, parsed);
244
+ }
245
+ } else if (key.includes(":conv:")) {
246
+ if (parsed.conversationId) {
247
+ data.conversations.push(parsed);
248
+ } else {
249
+ console.warn(`\n Warning: conv key missing conversationId: ${key}`);
250
+ }
251
+ } else if (key.includes(":memory:main")) {
252
+ // Extract tenant from key: ...:t:{tenantId}:memory:main or just ...:memory:main
253
+ const tenantMatch = key.match(/:t:([^:]+):memory:main/);
254
+ const tenantId = tenantMatch ? tenantMatch[1] : null;
255
+ const content = parsed?.main ?? parsed;
256
+ data.memories.push({ tenantId, content });
257
+ } else if (key.includes(":todos:")) {
258
+ const convId = key.split(":todos:")[1];
259
+ if (Array.isArray(parsed)) {
260
+ data.todos.push({ conversationId: convId, items: parsed });
261
+ }
262
+ } else if (key.includes(":reminders")) {
263
+ if (Array.isArray(parsed)) {
264
+ data.reminders = parsed;
265
+ }
266
+ }
267
+ } catch (e) {
268
+ if (key.includes(":conv:")) {
269
+ console.warn(`\n Warning: failed to parse conv key: ${key} — ${e.message}`);
270
+ }
271
+ }
272
+ }
273
+
274
+ processed += batch.length;
275
+ process.stdout.write(`\r Reading keys: ${processed}/${keys.length}`);
276
+ }
277
+ console.log(""); // newline after progress
278
+
279
+ // Merge convmeta (channelMeta, etc.) into full conversation data
280
+ let metaMerged = 0;
281
+ for (const conv of data.conversations) {
282
+ const meta = convMetaMap.get(conv.conversationId);
283
+ if (meta) {
284
+ if (meta.channelMeta && !conv.channelMeta) {
285
+ conv.channelMeta = meta.channelMeta;
286
+ }
287
+ if (meta.title && (!conv.title || conv.title === "New conversation")) {
288
+ conv.title = meta.title;
289
+ }
290
+ metaMerged++;
291
+ }
292
+ }
293
+
294
+ // Add conversations that only exist in convmeta (no conv: key)
295
+ const existingIds = new Set(data.conversations.map(c => c.conversationId));
296
+ let metaOnly = 0;
297
+ for (const [id, meta] of convMetaMap) {
298
+ if (!existingIds.has(id)) {
299
+ // convmeta-only conversation (no full data) — import as empty with metadata
300
+ data.conversations.push({
301
+ conversationId: id,
302
+ title: meta.title ?? "New conversation",
303
+ messages: [],
304
+ ownerId: meta.ownerId ?? "local-owner",
305
+ tenantId: meta.tenantId ?? null,
306
+ channelMeta: meta.channelMeta,
307
+ createdAt: meta.createdAt ?? Date.now(),
308
+ updatedAt: meta.updatedAt ?? Date.now(),
309
+ });
310
+ metaOnly++;
311
+ }
312
+ }
313
+ if (metaMerged > 0) console.log(` Merged metadata into ${metaMerged} conversations`);
314
+ if (metaOnly > 0) console.log(` Added ${metaOnly} metadata-only conversations (no message data in Upstash)`);
315
+
316
+ return data;
317
+ }
318
+
319
+ // ---------------------------------------------------------------------------
320
+ // Read from engine (sqlite or postgresql)
321
+ // ---------------------------------------------------------------------------
322
+
323
+ async function readFromEngine(sourceProvider, agentId) {
324
+ if (!agentId) {
325
+ // Try to detect from .poncho directory
326
+ const agent = await findAgentDir(WORKING_DIR);
327
+ if (agent) agentId = agent.id;
328
+ else {
329
+ console.error("--agent-id is required for engine source (or run from a project with .poncho/)");
330
+ process.exit(1);
331
+ }
332
+ }
333
+
334
+ const { createStorageEngine } = await import("../dist/index.js");
335
+ const engine = createStorageEngine({
336
+ provider: sourceProvider,
337
+ workingDir: WORKING_DIR,
338
+ agentId,
339
+ urlEnv: SOURCE === "postgresql" ? (process.env.SOURCE_DATABASE_URL ? "SOURCE_DATABASE_URL" : URL_ENV) : undefined,
340
+ });
341
+ await engine.initialize();
342
+
343
+ console.log(`Reading from ${sourceProvider} engine (agent: ${agentId})`);
344
+
345
+ const data = { agentId, conversations: [], memories: [], todos: [], reminders: [] };
346
+
347
+ // Conversations
348
+ const summaries = await engine.conversations.list();
349
+ let i = 0;
350
+ for (const s of summaries) {
351
+ const conv = await engine.conversations.get(s.conversationId);
352
+ if (conv) data.conversations.push(conv);
353
+ i++;
354
+ if (i % 20 === 0) process.stdout.write(`\r Reading conversations: ${i}/${summaries.length}`);
355
+ }
356
+ if (summaries.length > 0) console.log(`\r Reading conversations: ${summaries.length}/${summaries.length}`);
357
+
358
+ // Memory
359
+ const mem = await engine.memory.get();
360
+ if (mem.content) data.memories.push({ tenantId: null, content: mem });
361
+
362
+ // Reminders
363
+ data.reminders = await engine.reminders.list();
364
+
365
+ // VFS files (read all paths and content)
366
+ data.vfsFiles = [];
367
+ const paths = engine.vfs.listAllPaths("__default__");
368
+ for (const path of paths) {
369
+ try {
370
+ const stat = await engine.vfs.stat("__default__", path);
371
+ if (stat?.type === "file") {
372
+ const content = await engine.vfs.readFile("__default__", path);
373
+ data.vfsFiles.push({ path, content, mimeType: stat.mimeType });
374
+ }
375
+ } catch { /* skip unreadable */ }
376
+ }
377
+
378
+ await engine.close();
379
+ return data;
380
+ }
381
+
382
+ // ---------------------------------------------------------------------------
383
+ // Write to engine
384
+ // ---------------------------------------------------------------------------
385
+
386
+ async function writeToEngine(data) {
387
+ // Dynamic import of the storage engine
388
+ const { createStorageEngine } = await import("../dist/index.js");
389
+
390
+ const engine = createStorageEngine({
391
+ provider: TARGET,
392
+ workingDir: WORKING_DIR,
393
+ agentId: data.agentId,
394
+ urlEnv: URL_ENV,
395
+ });
396
+
397
+ await engine.initialize();
398
+
399
+ let convCount = 0;
400
+ let todoCount = 0;
401
+ let reminderCount = 0;
402
+ let timestampFixups = null;
403
+
404
+ // Import conversations
405
+ for (const conv of data.conversations) {
406
+ if (DRY_RUN) {
407
+ convCount++;
408
+ continue;
409
+ }
410
+ // Create with a new ID, then overwrite with original data (preserving timestamps)
411
+ const created = await engine.conversations.create(
412
+ conv.ownerId ?? "local-owner",
413
+ conv.title,
414
+ conv.tenantId,
415
+ );
416
+ // Temporarily set updatedAt so update() writes the original value
417
+ const merged = {
418
+ ...conv,
419
+ conversationId: created.conversationId,
420
+ updatedAt: conv.updatedAt || Date.now(),
421
+ createdAt: conv.createdAt || Date.now(),
422
+ };
423
+ // update() will overwrite updatedAt with Date.now(), so we track what we want
424
+ const wantedUpdatedAt = merged.updatedAt;
425
+ await engine.conversations.update(merged);
426
+ // Stash the desired timestamp for a post-import fixup
427
+ if (!timestampFixups) timestampFixups = [];
428
+ timestampFixups.push({ id: created.conversationId, updatedAt: wantedUpdatedAt, createdAt: merged.createdAt });
429
+ convCount++;
430
+ }
431
+
432
+ // Import memories (per-tenant)
433
+ let memoryCount = 0;
434
+ for (const mem of (data.memories ?? [])) {
435
+ const content = typeof mem.content === "string" ? mem.content : mem.content?.content;
436
+ if (!content) continue;
437
+ if (DRY_RUN) {
438
+ console.log(` [dry-run] Would import memory for tenant=${mem.tenantId ?? "(default)"} (${content.length} chars)`);
439
+ } else {
440
+ await engine.memory.update(content, mem.tenantId);
441
+ }
442
+ memoryCount++;
443
+ }
444
+
445
+ // Import todos
446
+ for (const { conversationId, items } of data.todos) {
447
+ if (DRY_RUN) {
448
+ console.log(` [dry-run] Would import ${items.length} todos for conversation ${conversationId}`);
449
+ todoCount += items.length;
450
+ continue;
451
+ }
452
+ await engine.todos.set(conversationId, items);
453
+ todoCount += items.length;
454
+ }
455
+
456
+ // Import reminders
457
+ for (const reminder of data.reminders) {
458
+ if (DRY_RUN) {
459
+ console.log(` [dry-run] Would import reminder: ${reminder.id} "${reminder.task}"`);
460
+ reminderCount++;
461
+ continue;
462
+ }
463
+ await engine.reminders.create({
464
+ task: reminder.task,
465
+ scheduledAt: reminder.scheduledAt,
466
+ timezone: reminder.timezone,
467
+ conversationId: reminder.conversationId ?? "__default__",
468
+ ownerId: reminder.ownerId,
469
+ tenantId: reminder.tenantId,
470
+ });
471
+ reminderCount++;
472
+ }
473
+
474
+ // Import VFS files (from engine-to-engine migrations)
475
+ let vfsCount = 0;
476
+ if (data.vfsFiles?.length) {
477
+ for (const file of data.vfsFiles) {
478
+ if (DRY_RUN) {
479
+ console.log(` [dry-run] Would import VFS file: ${file.path} (${file.content.byteLength} bytes)`);
480
+ vfsCount++;
481
+ continue;
482
+ }
483
+ await engine.vfs.writeFile("__default__", file.path, file.content, file.mimeType);
484
+ vfsCount++;
485
+ }
486
+ }
487
+
488
+ // Fix conversation timestamps (update() always sets updatedAt = now)
489
+ if (timestampFixups?.length && !DRY_RUN && TARGET === "sqlite") {
490
+ const Database = (await import("better-sqlite3")).default;
491
+ const { resolve: r } = await import("node:path");
492
+ const dbPath = r(WORKING_DIR, ".poncho", "poncho.db");
493
+ const db = new Database(dbPath);
494
+ const stmt = db.prepare("UPDATE conversations SET updated_at = ?, created_at = ? WHERE id = ?");
495
+ for (const fix of timestampFixups) {
496
+ stmt.run(new Date(fix.updatedAt).toISOString(), new Date(fix.createdAt).toISOString(), fix.id);
497
+ }
498
+ db.close();
499
+ console.log(` Fixed timestamps for ${timestampFixups.length} conversations`);
500
+ }
501
+
502
+ await engine.close();
503
+
504
+ return { convCount, todoCount, reminderCount, vfsCount, memoryCount };
505
+ }
506
+
507
+ // ---------------------------------------------------------------------------
508
+ // Main
509
+ // ---------------------------------------------------------------------------
510
+
511
+ async function main() {
512
+ console.log(`\nMigrating: ${SOURCE} → ${TARGET}`);
513
+ console.log(`Working dir: ${WORKING_DIR}`);
514
+ if (DRY_RUN) console.log("(dry run — no data will be written)\n");
515
+
516
+ // Read source
517
+ let data;
518
+ if (SOURCE === "local") {
519
+ data = await readLocal(WORKING_DIR);
520
+ } else if (SOURCE === "upstash") {
521
+ data = await readUpstash(AGENT_ID);
522
+ } else if (SOURCE === "sqlite" || SOURCE === "postgresql") {
523
+ data = await readFromEngine(SOURCE, AGENT_ID);
524
+ } else {
525
+ console.error(`Unknown source: ${SOURCE}. Use "local", "upstash", "sqlite", or "postgresql".`);
526
+ process.exit(1);
527
+ }
528
+
529
+ console.log(`\nRead from ${SOURCE}:`);
530
+ console.log(` Conversations: ${data.conversations.length}`);
531
+ console.log(` Memories: ${data.memories?.length ?? 0}`);
532
+ console.log(` Todo lists: ${data.todos.length}`);
533
+ console.log(` Reminders: ${data.reminders.length}`);
534
+ if (data.vfsFiles?.length) console.log(` VFS files: ${data.vfsFiles.length}`);
535
+
536
+ if (data.conversations.length === 0 && !data.memories?.length && data.todos.length === 0 && data.reminders.length === 0 && !data.vfsFiles?.length) {
537
+ console.log("\nNothing to migrate.");
538
+ process.exit(0);
539
+ }
540
+
541
+ // Write to target
542
+ const result = await writeToEngine(data);
543
+
544
+ console.log(`\n${DRY_RUN ? "Would import" : "Imported"} to ${TARGET}:`);
545
+ console.log(` Conversations: ${result.convCount}`);
546
+ console.log(` Memories: ${result.memoryCount}`);
547
+ console.log(` Todos: ${result.todoCount}`);
548
+ console.log(` Reminders: ${result.reminderCount}`);
549
+ if (result.vfsCount) console.log(` VFS files: ${result.vfsCount}`);
550
+ console.log("\nDone!");
551
+ }
552
+
553
+ main().catch((err) => {
554
+ console.error("\nMigration failed:", err);
555
+ process.exit(1);
556
+ });