@jamesaphoenix/tx-core 0.5.10 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/README.md +47 -4
  2. package/dist/errors.d.ts +25 -1
  3. package/dist/errors.d.ts.map +1 -1
  4. package/dist/errors.js +16 -0
  5. package/dist/errors.js.map +1 -1
  6. package/dist/index.d.ts +6 -5
  7. package/dist/index.d.ts.map +1 -1
  8. package/dist/index.js +7 -4
  9. package/dist/index.js.map +1 -1
  10. package/dist/layer.d.ts +8 -4
  11. package/dist/layer.d.ts.map +1 -1
  12. package/dist/layer.js +25 -4
  13. package/dist/layer.js.map +1 -1
  14. package/dist/mappers/index.d.ts +1 -0
  15. package/dist/mappers/index.d.ts.map +1 -1
  16. package/dist/mappers/index.js +2 -0
  17. package/dist/mappers/index.js.map +1 -1
  18. package/dist/mappers/memory.d.ts +34 -0
  19. package/dist/mappers/memory.d.ts.map +1 -0
  20. package/dist/mappers/memory.js +135 -0
  21. package/dist/mappers/memory.js.map +1 -0
  22. package/dist/mappers/pin.d.ts +6 -0
  23. package/dist/mappers/pin.d.ts.map +1 -0
  24. package/dist/mappers/pin.js +10 -0
  25. package/dist/mappers/pin.js.map +1 -0
  26. package/dist/migrations-embedded.d.ts.map +1 -1
  27. package/dist/migrations-embedded.js +10 -0
  28. package/dist/migrations-embedded.js.map +1 -1
  29. package/dist/repo/index.d.ts +2 -0
  30. package/dist/repo/index.d.ts.map +1 -1
  31. package/dist/repo/index.js +2 -0
  32. package/dist/repo/index.js.map +1 -1
  33. package/dist/repo/memory-repo.d.ts +88 -0
  34. package/dist/repo/memory-repo.d.ts.map +1 -0
  35. package/dist/repo/memory-repo.js +556 -0
  36. package/dist/repo/memory-repo.js.map +1 -0
  37. package/dist/repo/pin-repo.d.ts +29 -0
  38. package/dist/repo/pin-repo.d.ts.map +1 -0
  39. package/dist/repo/pin-repo.js +79 -0
  40. package/dist/repo/pin-repo.js.map +1 -0
  41. package/dist/schemas/index.d.ts +2 -2
  42. package/dist/schemas/index.d.ts.map +1 -1
  43. package/dist/schemas/index.js +1 -1
  44. package/dist/schemas/index.js.map +1 -1
  45. package/dist/schemas/sync.d.ts +598 -3
  46. package/dist/schemas/sync.d.ts.map +1 -1
  47. package/dist/schemas/sync.js +280 -6
  48. package/dist/schemas/sync.js.map +1 -1
  49. package/dist/services/agent-service.d.ts +2 -0
  50. package/dist/services/agent-service.d.ts.map +1 -1
  51. package/dist/services/agent-service.js +15 -1
  52. package/dist/services/agent-service.js.map +1 -1
  53. package/dist/services/cycle-scan-service.d.ts.map +1 -1
  54. package/dist/services/cycle-scan-service.js +11 -7
  55. package/dist/services/cycle-scan-service.js.map +1 -1
  56. package/dist/services/diversifier-service.d.ts +2 -2
  57. package/dist/services/diversifier-service.d.ts.map +1 -1
  58. package/dist/services/diversifier-service.js.map +1 -1
  59. package/dist/services/index.d.ts +3 -1
  60. package/dist/services/index.d.ts.map +1 -1
  61. package/dist/services/index.js +2 -0
  62. package/dist/services/index.js.map +1 -1
  63. package/dist/services/learning-service.d.ts +3 -2
  64. package/dist/services/learning-service.d.ts.map +1 -1
  65. package/dist/services/learning-service.js.map +1 -1
  66. package/dist/services/memory-retriever-service.d.ts +48 -0
  67. package/dist/services/memory-retriever-service.d.ts.map +1 -0
  68. package/dist/services/memory-retriever-service.js +332 -0
  69. package/dist/services/memory-retriever-service.js.map +1 -0
  70. package/dist/services/memory-service.d.ts +49 -0
  71. package/dist/services/memory-service.d.ts.map +1 -0
  72. package/dist/services/memory-service.js +1061 -0
  73. package/dist/services/memory-service.js.map +1 -0
  74. package/dist/services/pin-service.d.ts +33 -0
  75. package/dist/services/pin-service.d.ts.map +1 -0
  76. package/dist/services/pin-service.js +140 -0
  77. package/dist/services/pin-service.js.map +1 -0
  78. package/dist/services/ready-service.js +1 -1
  79. package/dist/services/ready-service.js.map +1 -1
  80. package/dist/services/retriever-service.d.ts +2 -2
  81. package/dist/services/retriever-service.d.ts.map +1 -1
  82. package/dist/services/retriever-service.js.map +1 -1
  83. package/dist/services/sync-service.d.ts +123 -4
  84. package/dist/services/sync-service.d.ts.map +1 -1
  85. package/dist/services/sync-service.js +1099 -11
  86. package/dist/services/sync-service.js.map +1 -1
  87. package/dist/services/task-service.js +2 -2
  88. package/dist/services/task-service.js.map +1 -1
  89. package/dist/utils/math.d.ts +4 -3
  90. package/dist/utils/math.d.ts.map +1 -1
  91. package/dist/utils/math.js +9 -4
  92. package/dist/utils/math.js.map +1 -1
  93. package/dist/utils/pin-file.d.ts +33 -0
  94. package/dist/utils/pin-file.d.ts.map +1 -0
  95. package/dist/utils/pin-file.js +87 -0
  96. package/dist/utils/pin-file.js.map +1 -0
  97. package/dist/utils/toml-config.d.ts +12 -0
  98. package/dist/utils/toml-config.d.ts.map +1 -1
  99. package/dist/utils/toml-config.js +111 -1
  100. package/dist/utils/toml-config.js.map +1 -1
  101. package/migrations/029_memory.sql +83 -0
  102. package/migrations/030_context_pins.sql +24 -0
  103. package/package.json +1 -1
@@ -1,13 +1,21 @@
1
1
  import { Context, Effect, Exit, Layer, Schema } from "effect";
2
2
  import { writeFile, rename, readFile, stat, mkdir, access } from "node:fs/promises";
3
- import { readFileSync } from "node:fs";
3
+ import { readFileSync, writeFileSync, mkdirSync, renameSync, unlinkSync } from "node:fs";
4
4
  import { createHash } from "node:crypto";
5
- import { dirname, resolve } from "node:path";
5
+ import { dirname, resolve, sep } from "node:path";
6
6
  import { DatabaseError, ValidationError } from "../errors.js";
7
7
  import { SqliteClient } from "../db.js";
8
8
  import { TaskService } from "./task-service.js";
9
9
  import { DependencyRepository } from "../repo/dep-repo.js";
10
- import { SyncOperation as SyncOperationSchema } from "../schemas/sync.js";
10
+ import { LearningRepository } from "../repo/learning-repo.js";
11
+ import { FileLearningRepository } from "../repo/file-learning-repo.js";
12
+ import { AttemptRepository } from "../repo/attempt-repo.js";
13
+ import { PinRepository } from "../repo/pin-repo.js";
14
+ import { syncBlocks } from "../utils/pin-file.js";
15
+ import { AnchorRepository } from "../repo/anchor-repo.js";
16
+ import { EdgeRepository } from "../repo/edge-repo.js";
17
+ import { DocRepository } from "../repo/doc-repo.js";
18
+ import { LearningUpsertOp as LearningUpsertOpSchema, FileLearningUpsertOp as FileLearningUpsertOpSchema, AttemptUpsertOp as AttemptUpsertOpSchema, PinUpsertOp as PinUpsertOpSchema, AnchorUpsertOp as AnchorUpsertOpSchema, EdgeUpsertOp as EdgeUpsertOpSchema, DocUpsertOp as DocUpsertOpSchema, DocLinkUpsertOp as DocLinkUpsertOpSchema, TaskDocLinkUpsertOp as TaskDocLinkUpsertOpSchema, InvariantUpsertOp as InvariantUpsertOpSchema, LabelUpsertOp as LabelUpsertOpSchema, LabelAssignmentUpsertOp as LabelAssignmentUpsertOpSchema, SyncOperation as SyncOperationSchema } from "../schemas/sync.js";
11
19
  /**
12
20
  * SyncService provides JSONL-based export/import for git-tracked task syncing.
13
21
  * See DD-009 for full specification.
@@ -15,6 +23,32 @@ import { SyncOperation as SyncOperationSchema } from "../schemas/sync.js";
15
23
  export class SyncService extends Context.Tag("SyncService")() {
16
24
  }
17
25
  const DEFAULT_JSONL_PATH = ".tx/tasks.jsonl";
26
+ const DEFAULT_LEARNINGS_JSONL_PATH = ".tx/learnings.jsonl";
27
+ const DEFAULT_FILE_LEARNINGS_JSONL_PATH = ".tx/file-learnings.jsonl";
28
+ const DEFAULT_ATTEMPTS_JSONL_PATH = ".tx/attempts.jsonl";
29
+ const DEFAULT_PINS_JSONL_PATH = ".tx/pins.jsonl";
30
+ const DEFAULT_ANCHORS_JSONL_PATH = ".tx/anchors.jsonl";
31
+ const DEFAULT_EDGES_JSONL_PATH = ".tx/edges.jsonl";
32
+ const DEFAULT_DOCS_JSONL_PATH = ".tx/docs.jsonl";
33
+ const DEFAULT_LABELS_JSONL_PATH = ".tx/labels.jsonl";
34
+ /**
35
+ * Compute a content hash for cross-machine dedup.
36
+ * Entities with auto-increment IDs use this to identify duplicates.
37
+ */
38
+ const contentHash = (...parts) => createHash("sha256").update(parts.join("|")).digest("hex");
39
+ /**
40
+ * Convert SQLite datetime string ("YYYY-MM-DD HH:MM:SS") to ISO 8601 ("YYYY-MM-DDTHH:MM:SS").
41
+ * Labels use raw SQL so timestamps come in SQLite format rather than Date objects.
42
+ */
43
+ const sqliteToIso = (s) => {
44
+ if (/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/.test(s))
45
+ return s.replace(" ", "T") + ".000Z";
46
+ return s;
47
+ };
48
+ /**
49
+ * Empty entity import result for early returns.
50
+ */
51
+ const EMPTY_ENTITY_IMPORT_RESULT = { imported: 0, skipped: 0 };
18
52
  /**
19
53
  * Empty import result for early returns.
20
54
  */
@@ -115,6 +149,8 @@ const taskToUpsertOp = (task) => ({
115
149
  status: task.status,
116
150
  score: task.score,
117
151
  parentId: task.parentId,
152
+ createdAt: task.createdAt.toISOString(),
153
+ completedAt: task.completedAt?.toISOString() ?? null,
118
154
  assigneeType: task.assigneeType ?? null,
119
155
  assigneeId: task.assigneeId ?? null,
120
156
  assignedAt: task.assignedAt?.toISOString() ?? null,
@@ -132,6 +168,294 @@ const depToAddOp = (dep) => ({
132
168
  blockerId: dep.blockerId,
133
169
  blockedId: dep.blockedId
134
170
  });
171
+ /**
172
+ * Convert a Learning to a LearningUpsertOp for JSONL export.
173
+ */
174
+ const learningToUpsertOp = (learning) => ({
175
+ v: 1,
176
+ op: "learning_upsert",
177
+ ts: learning.createdAt.toISOString(),
178
+ id: learning.id,
179
+ contentHash: contentHash(learning.content, learning.sourceType),
180
+ data: {
181
+ content: learning.content,
182
+ sourceType: learning.sourceType,
183
+ sourceRef: learning.sourceRef,
184
+ keywords: [...learning.keywords],
185
+ category: learning.category
186
+ }
187
+ });
188
+ /**
189
+ * Convert a FileLearning to a FileLearningUpsertOp for JSONL export.
190
+ */
191
+ const fileLearningToUpsertOp = (fl) => ({
192
+ v: 1,
193
+ op: "file_learning_upsert",
194
+ ts: fl.createdAt.toISOString(),
195
+ id: fl.id,
196
+ contentHash: contentHash(fl.filePattern, fl.note),
197
+ data: {
198
+ filePattern: fl.filePattern,
199
+ note: fl.note,
200
+ taskId: fl.taskId
201
+ }
202
+ });
203
+ /**
204
+ * Convert an Attempt to an AttemptUpsertOp for JSONL export.
205
+ */
206
+ const attemptToUpsertOp = (attempt) => ({
207
+ v: 1,
208
+ op: "attempt_upsert",
209
+ ts: attempt.createdAt.toISOString(),
210
+ id: attempt.id,
211
+ contentHash: contentHash(attempt.taskId, attempt.approach),
212
+ data: {
213
+ taskId: attempt.taskId,
214
+ approach: attempt.approach,
215
+ outcome: attempt.outcome,
216
+ reason: attempt.reason
217
+ }
218
+ });
219
+ /**
220
+ * Convert a Pin to a PinUpsertOp for JSONL export.
221
+ */
222
+ const pinToUpsertOp = (pin) => ({
223
+ v: 1,
224
+ op: "pin_upsert",
225
+ ts: new Date(pin.updatedAt).toISOString(),
226
+ id: pin.id,
227
+ contentHash: contentHash(pin.id, pin.content),
228
+ data: {
229
+ content: pin.content
230
+ }
231
+ });
232
+ /**
233
+ * Convert an Anchor to an AnchorUpsertOp for JSONL export.
234
+ * Uses the learning's content hash (looked up from learningHashMap) as stable reference.
235
+ */
236
+ const anchorToUpsertOp = (anchor, learningHashMap) => {
237
+ const learningContentHash = learningHashMap.get(anchor.learningId) ?? "";
238
+ return {
239
+ v: 1,
240
+ op: "anchor_upsert",
241
+ ts: anchor.createdAt.toISOString(),
242
+ id: anchor.id,
243
+ contentHash: contentHash(learningContentHash, anchor.filePath, anchor.anchorType, anchor.anchorValue),
244
+ data: {
245
+ learningContentHash,
246
+ anchorType: anchor.anchorType,
247
+ anchorValue: anchor.anchorValue,
248
+ filePath: anchor.filePath,
249
+ symbolFqname: anchor.symbolFqname,
250
+ lineStart: anchor.lineStart,
251
+ lineEnd: anchor.lineEnd,
252
+ contentHash: anchor.contentHash,
253
+ contentPreview: anchor.contentPreview,
254
+ status: anchor.status,
255
+ pinned: anchor.pinned
256
+ }
257
+ };
258
+ };
259
+ /**
260
+ * Convert an Edge to an EdgeUpsertOp for JSONL export.
261
+ */
262
+ const edgeToUpsertOp = (edge) => ({
263
+ v: 1,
264
+ op: "edge_upsert",
265
+ ts: edge.createdAt.toISOString(),
266
+ id: edge.id,
267
+ contentHash: contentHash(edge.edgeType, edge.sourceType, edge.sourceId, edge.targetType, edge.targetId),
268
+ data: {
269
+ edgeType: edge.edgeType,
270
+ sourceType: edge.sourceType,
271
+ sourceId: edge.sourceId,
272
+ targetType: edge.targetType,
273
+ targetId: edge.targetId,
274
+ weight: edge.weight,
275
+ metadata: edge.metadata
276
+ }
277
+ });
278
+ /**
279
+ * Convert a Doc to a DocUpsertOp for JSONL export.
280
+ */
281
+ const docToUpsertOp = (doc, parentDocKeyMap) => ({
282
+ v: 1,
283
+ op: "doc_upsert",
284
+ ts: doc.createdAt.toISOString(),
285
+ id: doc.id,
286
+ contentHash: contentHash(doc.kind, doc.name, String(doc.version)),
287
+ data: {
288
+ kind: doc.kind,
289
+ name: doc.name,
290
+ title: doc.title,
291
+ version: doc.version,
292
+ status: doc.status,
293
+ filePath: doc.filePath,
294
+ hash: doc.hash,
295
+ parentDocKey: doc.parentDocId ? (parentDocKeyMap.get(doc.parentDocId) ?? null) : null,
296
+ lockedAt: doc.lockedAt?.toISOString() ?? null,
297
+ metadata: doc.metadata
298
+ }
299
+ });
300
+ /**
301
+ * Convert a DocLink to a DocLinkUpsertOp for JSONL export.
302
+ * Uses docKeyMap to resolve integer IDs to stable name:version keys.
303
+ */
304
+ const docLinkToUpsertOp = (link, docKeyMap) => {
305
+ const fromDocKey = docKeyMap.get(link.fromDocId);
306
+ const toDocKey = docKeyMap.get(link.toDocId);
307
+ if (!fromDocKey || !toDocKey)
308
+ return null;
309
+ return {
310
+ v: 1,
311
+ op: "doc_link_upsert",
312
+ ts: link.createdAt.toISOString(),
313
+ id: link.id,
314
+ contentHash: contentHash(fromDocKey, toDocKey, link.linkType),
315
+ data: {
316
+ fromDocKey,
317
+ toDocKey,
318
+ linkType: link.linkType
319
+ }
320
+ };
321
+ };
322
+ /**
323
+ * Convert a TaskDocLink to a TaskDocLinkUpsertOp for JSONL export.
324
+ * Uses docKeyMap to resolve integer doc IDs to stable name:version keys.
325
+ */
326
+ const taskDocLinkToUpsertOp = (link, docKeyMap) => {
327
+ const docKey = docKeyMap.get(link.docId);
328
+ if (!docKey)
329
+ return null;
330
+ return {
331
+ v: 1,
332
+ op: "task_doc_link_upsert",
333
+ ts: link.createdAt.toISOString(),
334
+ id: link.id,
335
+ contentHash: contentHash(link.taskId, docKey),
336
+ data: {
337
+ taskId: link.taskId,
338
+ docKey,
339
+ linkType: link.linkType
340
+ }
341
+ };
342
+ };
343
+ /**
344
+ * Convert an Invariant to an InvariantUpsertOp for JSONL export.
345
+ * Uses docKeyMap to resolve integer doc IDs to stable name:version keys.
346
+ */
347
+ const invariantToUpsertOp = (inv, docKeyMap) => {
348
+ const docKey = docKeyMap.get(inv.docId);
349
+ if (!docKey)
350
+ return null;
351
+ return {
352
+ v: 1,
353
+ op: "invariant_upsert",
354
+ ts: inv.createdAt.toISOString(),
355
+ id: inv.id,
356
+ contentHash: contentHash(inv.id),
357
+ data: {
358
+ id: inv.id,
359
+ rule: inv.rule,
360
+ enforcement: inv.enforcement,
361
+ docKey,
362
+ subsystem: inv.subsystem,
363
+ testRef: inv.testRef,
364
+ lintRule: inv.lintRule,
365
+ promptRef: inv.promptRef,
366
+ status: inv.status,
367
+ metadata: inv.metadata
368
+ }
369
+ };
370
+ };
371
+ /**
372
+ * Convert a label row to a LabelUpsertOp for JSONL export.
373
+ */
374
+ const labelRowToUpsertOp = (row) => ({
375
+ v: 1,
376
+ op: "label_upsert",
377
+ ts: sqliteToIso(row.updated_at),
378
+ id: row.id,
379
+ contentHash: contentHash(row.name.toLowerCase()),
380
+ data: {
381
+ name: row.name,
382
+ color: row.color
383
+ }
384
+ });
385
+ /**
386
+ * Convert a label assignment row to a LabelAssignmentUpsertOp for JSONL export.
387
+ * Uses labelNameMap to resolve integer label IDs to stable names.
388
+ */
389
+ const labelAssignmentToUpsertOp = (row, labelNameMap) => {
390
+ const labelName = labelNameMap.get(row.label_id);
391
+ if (!labelName)
392
+ return null;
393
+ return {
394
+ v: 1,
395
+ op: "label_assignment_upsert",
396
+ ts: sqliteToIso(row.created_at),
397
+ contentHash: contentHash(row.task_id, labelName.toLowerCase()),
398
+ data: {
399
+ taskId: row.task_id,
400
+ labelName
401
+ }
402
+ };
403
+ };
404
+ /**
405
+ * Generic helper: parse a JSONL file, validate with schema, dedup by contentHash,
406
+ * filter against existing entities, and insert new ones via caller-provided batch function.
407
+ * Returns EntityImportResult with imported/skipped counts.
408
+ */
409
+ const importEntityJsonl = (filePath, schema, existingHashes, insertBatch) => Effect.gen(function* () {
410
+ const importFileExists = yield* fileExists(filePath);
411
+ if (!importFileExists) {
412
+ return EMPTY_ENTITY_IMPORT_RESULT;
413
+ }
414
+ const content = yield* Effect.tryPromise({
415
+ try: () => readFile(filePath, "utf-8"),
416
+ catch: (cause) => new DatabaseError({ cause })
417
+ });
418
+ const lines = content.trim().split("\n").filter(Boolean);
419
+ if (lines.length === 0) {
420
+ return EMPTY_ENTITY_IMPORT_RESULT;
421
+ }
422
+ // Parse and dedup by contentHash (keep latest by timestamp)
423
+ const states = new Map();
424
+ for (const line of lines) {
425
+ const parsed = yield* Effect.try({
426
+ try: () => JSON.parse(line),
427
+ catch: (cause) => new ValidationError({ reason: `Invalid JSON: ${cause}` })
428
+ });
429
+ const op = yield* Effect.try({
430
+ try: () => Schema.decodeUnknownSync(schema)(parsed),
431
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
432
+ });
433
+ const existing = states.get(op.contentHash);
434
+ if (!existing || op.ts > existing.ts) {
435
+ states.set(op.contentHash, op);
436
+ }
437
+ }
438
+ // Filter to new entities only (not already in DB)
439
+ const newOps = [];
440
+ let skipped = 0;
441
+ for (const op of states.values()) {
442
+ if (existingHashes.has(op.contentHash)) {
443
+ skipped++;
444
+ }
445
+ else {
446
+ newOps.push(op);
447
+ }
448
+ }
449
+ if (newOps.length === 0) {
450
+ return { imported: 0, skipped };
451
+ }
452
+ // Insert via caller-provided batch function (handles transaction)
453
+ const imported = yield* Effect.try({
454
+ try: () => insertBatch(newOps),
455
+ catch: (cause) => new DatabaseError({ cause })
456
+ });
457
+ return { imported, skipped };
458
+ });
135
459
  /**
136
460
  * Check if a file exists without blocking the event loop.
137
461
  */
@@ -154,6 +478,13 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
154
478
  const taskService = yield* TaskService;
155
479
  const depRepo = yield* DependencyRepository;
156
480
  const db = yield* SqliteClient;
481
+ const learningRepo = yield* LearningRepository;
482
+ const fileLearningRepo = yield* FileLearningRepository;
483
+ const attemptRepo = yield* AttemptRepository;
484
+ const pinRepo = yield* PinRepository;
485
+ const anchorRepo = yield* AnchorRepository;
486
+ const edgeRepo = yield* EdgeRepository;
487
+ const docRepo = yield* DocRepository;
157
488
  // Helper: Get config value from sync_config table
158
489
  const getConfig = (key) => Effect.try({
159
490
  try: () => {
@@ -169,7 +500,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
169
500
  },
170
501
  catch: (cause) => new DatabaseError({ cause })
171
502
  });
172
- return {
503
+ const syncService = {
173
504
  export: (path) => Effect.gen(function* () {
174
505
  const filePath = resolve(path ?? DEFAULT_JSONL_PATH);
175
506
  // Get all tasks and dependencies (explicit high limit for full export)
@@ -257,6 +588,14 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
257
588
  const deleteTaskStmt = db.prepare("DELETE FROM tasks WHERE id = ?");
258
589
  const insertDepStmt = db.prepare("INSERT INTO task_dependencies (blocker_id, blocked_id, created_at) VALUES (?, ?, ?)");
259
590
  const checkDepExistsStmt = db.prepare("SELECT 1 FROM task_dependencies WHERE blocker_id = ? AND blocked_id = ?");
591
+ // Cycle detection: check if adding blocker_id→blocked_id would create a cycle
592
+ // by walking DOWNSTREAM from blocked_id to see if it can reach blocker_id
593
+ const checkCycleStmt = db.prepare(`WITH RECURSIVE reachable(id) AS (
594
+ SELECT blocked_id FROM task_dependencies WHERE blocker_id = ?
595
+ UNION
596
+ SELECT d.blocked_id FROM task_dependencies d JOIN reachable r ON d.blocker_id = r.id
597
+ )
598
+ SELECT 1 AS found FROM reachable WHERE id = ? LIMIT 1`);
260
599
  const deleteDepStmt = db.prepare("DELETE FROM task_dependencies WHERE blocker_id = ? AND blocked_id = ?");
261
600
  const setConfigStmt = db.prepare("INSERT OR REPLACE INTO sync_config (key, value, updated_at) VALUES (?, ?, datetime('now'))");
262
601
  const checkParentExistsStmt = db.prepare("SELECT 1 FROM tasks WHERE id = ?");
@@ -297,15 +636,14 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
297
636
  const assignedBy = assigneeType === null ? null : (op.data.assignedBy ?? null);
298
637
  if (!existingRow) {
299
638
  // Create new task with the specified ID
300
- const now = new Date();
301
- insertTaskStmt.run(id, op.data.title, op.data.description, op.data.status, effectiveParentId, op.data.score, op.ts, op.ts, op.data.status === "done" ? now.toISOString() : null, assigneeType, assigneeId, assignedAt, assignedBy, JSON.stringify(op.data.metadata));
639
+ insertTaskStmt.run(id, op.data.title, op.data.description, op.data.status, effectiveParentId, op.data.score, op.data.createdAt ?? op.ts, op.ts, op.data.completedAt ?? null, assigneeType, assigneeId, assignedAt, assignedBy, JSON.stringify(op.data.metadata));
302
640
  imported++;
303
641
  }
304
642
  else {
305
643
  // Update if JSONL timestamp is newer than existing
306
644
  const existingTs = existingRow.updated_at;
307
645
  if (op.ts > existingTs) {
308
- updateTaskStmt.run(op.data.title, op.data.description, op.data.status, effectiveParentId, op.data.score, op.ts, op.data.status === "done" ? (existingRow.completed_at ?? new Date().toISOString()) : null, assigneeType, assigneeId, assignedAt, assignedBy, JSON.stringify(op.data.metadata), id);
646
+ updateTaskStmt.run(op.data.title, op.data.description, op.data.status, effectiveParentId, op.data.score, op.ts, op.data.completedAt !== undefined ? op.data.completedAt : (existingRow.completed_at ?? null), assigneeType, assigneeId, assignedAt, assignedBy, JSON.stringify(op.data.metadata), id);
309
647
  imported++;
310
648
  }
311
649
  else if (op.ts === existingTs) {
@@ -348,13 +686,23 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
348
686
  depsSkipped++;
349
687
  continue;
350
688
  }
689
+ // Check for cycles before inserting (RULE 4: no circular deps)
690
+ const wouldCycle = checkCycleStmt.get(op.blockedId, op.blockerId);
691
+ if (wouldCycle) {
692
+ depFailures.push({
693
+ blockerId: op.blockerId,
694
+ blockedId: op.blockedId,
695
+ error: "would create circular dependency"
696
+ });
697
+ continue;
698
+ }
351
699
  // Try to add dependency, track failures individually
352
700
  try {
353
701
  insertDepStmt.run(op.blockerId, op.blockedId, op.ts);
354
702
  depsAdded++;
355
703
  }
356
704
  catch (e) {
357
- // Dependency insert failed (e.g., foreign key constraint, circular dependency)
705
+ // Dependency insert failed (e.g., foreign key constraint)
358
706
  depFailures.push({
359
707
  blockerId: op.blockerId,
360
708
  blockedId: op.blockedId,
@@ -411,10 +759,22 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
411
759
  // Release: Commit on success, rollback on failure
412
760
  (_, exit) => Effect.sync(() => {
413
761
  if (Exit.isSuccess(exit)) {
414
- db.exec("COMMIT");
762
+ try {
763
+ db.exec("COMMIT");
764
+ }
765
+ catch {
766
+ // COMMIT failed — roll back to prevent a stuck open transaction
767
+ try {
768
+ db.exec("ROLLBACK");
769
+ }
770
+ catch { /* already rolled back */ }
771
+ }
415
772
  }
416
773
  else {
417
- db.exec("ROLLBACK");
774
+ try {
775
+ db.exec("ROLLBACK");
776
+ }
777
+ catch { /* no active transaction */ }
418
778
  }
419
779
  }));
420
780
  }),
@@ -619,7 +979,735 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
619
979
  return { before, after: compacted.length };
620
980
  }),
621
981
  setLastExport: (timestamp) => setConfig("last_export", timestamp.toISOString()),
622
- setLastImport: (timestamp) => setConfig("last_import", timestamp.toISOString())
982
+ setLastImport: (timestamp) => setConfig("last_import", timestamp.toISOString()),
983
+ exportLearnings: (path) => Effect.gen(function* () {
984
+ const filePath = resolve(path ?? DEFAULT_LEARNINGS_JSONL_PATH);
985
+ const learnings = yield* learningRepo.findAll();
986
+ const ops = learnings.map(learningToUpsertOp);
987
+ ops.sort((a, b) => a.ts.localeCompare(b.ts));
988
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
989
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
990
+ return { opCount: ops.length, path: filePath };
991
+ }),
992
+ importLearnings: (path) => Effect.gen(function* () {
993
+ const filePath = resolve(path ?? DEFAULT_LEARNINGS_JSONL_PATH);
994
+ const existing = yield* learningRepo.findAll();
995
+ const existingHashes = new Set(existing.map(l => contentHash(l.content, l.sourceType)));
996
+ const insertStmt = db.prepare("INSERT INTO learnings (content, source_type, source_ref, created_at, keywords, category) VALUES (?, ?, ?, ?, ?, ?)");
997
+ return yield* importEntityJsonl(filePath, LearningUpsertOpSchema, existingHashes, (ops) => {
998
+ db.exec("BEGIN IMMEDIATE");
999
+ try {
1000
+ let count = 0;
1001
+ for (const op of ops) {
1002
+ insertStmt.run(op.data.content, op.data.sourceType, op.data.sourceRef, op.ts, JSON.stringify(op.data.keywords), op.data.category);
1003
+ count++;
1004
+ }
1005
+ db.exec("COMMIT");
1006
+ return count;
1007
+ }
1008
+ catch (e) {
1009
+ try {
1010
+ db.exec("ROLLBACK");
1011
+ }
1012
+ catch { /* no active transaction */ }
1013
+ throw e;
1014
+ }
1015
+ });
1016
+ }),
1017
+ exportFileLearnings: (path) => Effect.gen(function* () {
1018
+ const filePath = resolve(path ?? DEFAULT_FILE_LEARNINGS_JSONL_PATH);
1019
+ const fileLearnings = yield* fileLearningRepo.findAll();
1020
+ const ops = fileLearnings.map(fileLearningToUpsertOp);
1021
+ ops.sort((a, b) => a.ts.localeCompare(b.ts));
1022
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
1023
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1024
+ return { opCount: ops.length, path: filePath };
1025
+ }),
1026
+ importFileLearnings: (path) => Effect.gen(function* () {
1027
+ const filePath = resolve(path ?? DEFAULT_FILE_LEARNINGS_JSONL_PATH);
1028
+ const existing = yield* fileLearningRepo.findAll();
1029
+ const existingHashes = new Set(existing.map(fl => contentHash(fl.filePattern, fl.note)));
1030
+ const insertStmt = db.prepare("INSERT INTO file_learnings (file_pattern, note, task_id, created_at) VALUES (?, ?, ?, ?)");
1031
+ return yield* importEntityJsonl(filePath, FileLearningUpsertOpSchema, existingHashes, (ops) => {
1032
+ db.exec("BEGIN IMMEDIATE");
1033
+ try {
1034
+ let count = 0;
1035
+ for (const op of ops) {
1036
+ insertStmt.run(op.data.filePattern, op.data.note, op.data.taskId, op.ts);
1037
+ count++;
1038
+ }
1039
+ db.exec("COMMIT");
1040
+ return count;
1041
+ }
1042
+ catch (e) {
1043
+ try {
1044
+ db.exec("ROLLBACK");
1045
+ }
1046
+ catch { /* no active transaction */ }
1047
+ throw e;
1048
+ }
1049
+ });
1050
+ }),
1051
+ exportAttempts: (path) => Effect.gen(function* () {
1052
+ const filePath = resolve(path ?? DEFAULT_ATTEMPTS_JSONL_PATH);
1053
+ const attempts = yield* attemptRepo.findAll();
1054
+ const ops = attempts.map(attemptToUpsertOp);
1055
+ ops.sort((a, b) => a.ts.localeCompare(b.ts));
1056
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
1057
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1058
+ return { opCount: ops.length, path: filePath };
1059
+ }),
1060
+ importAttempts: (path) => Effect.gen(function* () {
1061
+ const filePath = resolve(path ?? DEFAULT_ATTEMPTS_JSONL_PATH);
1062
+ const existing = yield* attemptRepo.findAll();
1063
+ const existingHashes = new Set(existing.map(a => contentHash(a.taskId, a.approach)));
1064
+ const insertStmt = db.prepare("INSERT INTO attempts (task_id, approach, outcome, reason, created_at) VALUES (?, ?, ?, ?, ?)");
1065
+ return yield* importEntityJsonl(filePath, AttemptUpsertOpSchema, existingHashes, (ops) => {
1066
+ db.exec("BEGIN IMMEDIATE");
1067
+ try {
1068
+ let count = 0;
1069
+ for (const op of ops) {
1070
+ insertStmt.run(op.data.taskId, op.data.approach, op.data.outcome, op.data.reason, op.ts);
1071
+ count++;
1072
+ }
1073
+ db.exec("COMMIT");
1074
+ return count;
1075
+ }
1076
+ catch (e) {
1077
+ try {
1078
+ db.exec("ROLLBACK");
1079
+ }
1080
+ catch { /* no active transaction */ }
1081
+ throw e;
1082
+ }
1083
+ });
1084
+ }),
1085
+ exportPins: (path) => Effect.gen(function* () {
1086
+ const filePath = resolve(path ?? DEFAULT_PINS_JSONL_PATH);
1087
+ const pins = yield* pinRepo.findAll();
1088
+ const ops = [...pins].map(pinToUpsertOp);
1089
+ ops.sort((a, b) => a.ts.localeCompare(b.ts));
1090
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
1091
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1092
+ return { opCount: ops.length, path: filePath };
1093
+ }),
1094
+ importPins: (path) => Effect.gen(function* () {
1095
+ const filePath = resolve(path ?? DEFAULT_PINS_JSONL_PATH);
1096
+ const existing = yield* pinRepo.findAll();
1097
+ const existingHashes = new Set([...existing].map(p => contentHash(p.id, p.content)));
1098
+ const upsertStmt = db.prepare(`INSERT INTO context_pins (id, content, created_at, updated_at)
1099
+ VALUES (?, ?, ?, ?)
1100
+ ON CONFLICT(id) DO UPDATE SET
1101
+ content = excluded.content,
1102
+ updated_at = excluded.updated_at`);
1103
+ const result = yield* importEntityJsonl(filePath, PinUpsertOpSchema, existingHashes, (ops) => {
1104
+ db.exec("BEGIN IMMEDIATE");
1105
+ try {
1106
+ let count = 0;
1107
+ for (const op of ops) {
1108
+ upsertStmt.run(op.id, op.data.content, op.ts, op.ts);
1109
+ count++;
1110
+ }
1111
+ db.exec("COMMIT");
1112
+ return count;
1113
+ }
1114
+ catch (e) {
1115
+ try {
1116
+ db.exec("ROLLBACK");
1117
+ }
1118
+ catch { /* no active transaction */ }
1119
+ throw e;
1120
+ }
1121
+ });
1122
+ // Sync imported pins to target files (pins exist to be written to context files)
1123
+ if (result.imported > 0) {
1124
+ const allPins = yield* pinRepo.findAll();
1125
+ const targetFiles = yield* pinRepo.getTargetFiles();
1126
+ const pinMap = new Map();
1127
+ for (const pin of allPins) {
1128
+ pinMap.set(pin.id, pin.content);
1129
+ }
1130
+ yield* Effect.try({
1131
+ try: () => {
1132
+ for (const targetFile of targetFiles) {
1133
+ const projectRoot = process.cwd();
1134
+ const resolvedPath = resolve(projectRoot, targetFile);
1135
+ if (!resolvedPath.startsWith(projectRoot + sep))
1136
+ continue;
1137
+ let fileContent = "";
1138
+ try {
1139
+ fileContent = readFileSync(resolvedPath, "utf-8");
1140
+ }
1141
+ catch { /* file doesn't exist yet */ }
1142
+ const updated = syncBlocks(fileContent, pinMap);
1143
+ if (updated !== fileContent) {
1144
+ const dir = dirname(resolvedPath);
1145
+ mkdirSync(dir, { recursive: true });
1146
+ const tempPath = `${resolvedPath}.tmp.${Date.now()}.${process.pid}`;
1147
+ try {
1148
+ writeFileSync(tempPath, updated, "utf-8");
1149
+ renameSync(tempPath, resolvedPath);
1150
+ }
1151
+ catch (e) {
1152
+ try {
1153
+ unlinkSync(tempPath);
1154
+ }
1155
+ catch { /* ignore cleanup error */ }
1156
+ throw e;
1157
+ }
1158
+ }
1159
+ }
1160
+ },
1161
+ catch: (cause) => new DatabaseError({ cause })
1162
+ });
1163
+ }
1164
+ return result;
1165
+ }),
1166
+ exportAnchors: (path) => Effect.gen(function* () {
1167
+ const filePath = resolve(path ?? DEFAULT_ANCHORS_JSONL_PATH);
1168
+ const anchors = yield* anchorRepo.findAll();
1169
+ // Build learning ID → content hash map for stable references
1170
+ const learnings = yield* learningRepo.findAll();
1171
+ const learningHashMap = new Map();
1172
+ for (const l of learnings) {
1173
+ learningHashMap.set(l.id, contentHash(l.content, l.sourceType));
1174
+ }
1175
+ const ops = anchors.map(a => anchorToUpsertOp(a, learningHashMap));
1176
+ ops.sort((a, b) => a.ts.localeCompare(b.ts));
1177
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
1178
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1179
+ return { opCount: ops.length, path: filePath };
1180
+ }),
1181
+ importAnchors: (path) => Effect.gen(function* () {
1182
+ const filePath = resolve(path ?? DEFAULT_ANCHORS_JSONL_PATH);
1183
+ // Build existing anchor content hashes
1184
+ const existingAnchors = yield* anchorRepo.findAll();
1185
+ const existingLearnings = yield* learningRepo.findAll();
1186
+ const learningHashMap = new Map();
1187
+ for (const l of existingLearnings) {
1188
+ learningHashMap.set(l.id, contentHash(l.content, l.sourceType));
1189
+ }
1190
+ const existingHashes = new Set(existingAnchors.map(a => {
1191
+ const lHash = learningHashMap.get(a.learningId) ?? "";
1192
+ return contentHash(lHash, a.filePath, a.anchorType, a.anchorValue);
1193
+ }));
1194
+ // Build reverse map: learning content hash → learning ID (for resolving references)
1195
+ const hashToLearningId = new Map();
1196
+ for (const l of existingLearnings) {
1197
+ hashToLearningId.set(contentHash(l.content, l.sourceType), l.id);
1198
+ }
1199
+ const insertStmt = db.prepare(`INSERT INTO learning_anchors
1200
+ (learning_id, anchor_type, anchor_value, file_path, symbol_fqname,
1201
+ line_start, line_end, content_hash, content_preview, status, pinned, created_at)
1202
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
1203
+ let orphanedCount = 0;
1204
+ const result = yield* importEntityJsonl(filePath, AnchorUpsertOpSchema, existingHashes, (ops) => {
1205
+ db.exec("BEGIN IMMEDIATE");
1206
+ try {
1207
+ let count = 0;
1208
+ for (const op of ops) {
1209
+ const learningId = hashToLearningId.get(op.data.learningContentHash);
1210
+ if (learningId === undefined) {
1211
+ orphanedCount++;
1212
+ continue;
1213
+ }
1214
+ insertStmt.run(learningId, op.data.anchorType, op.data.anchorValue, op.data.filePath, op.data.symbolFqname, op.data.lineStart, op.data.lineEnd, op.data.contentHash, op.data.contentPreview, op.data.status, op.data.pinned ? 1 : 0, op.ts);
1215
+ count++;
1216
+ }
1217
+ db.exec("COMMIT");
1218
+ return count;
1219
+ }
1220
+ catch (e) {
1221
+ try {
1222
+ db.exec("ROLLBACK");
1223
+ }
1224
+ catch { /* no active transaction */ }
1225
+ throw e;
1226
+ }
1227
+ });
1228
+ return { imported: result.imported, skipped: result.skipped + orphanedCount };
1229
+ }),
1230
+ exportEdges: (path) => Effect.gen(function* () {
1231
+ const filePath = resolve(path ?? DEFAULT_EDGES_JSONL_PATH);
1232
+ const edges = yield* edgeRepo.findAll();
1233
+ // Only export active (non-invalidated) edges
1234
+ const activeEdges = edges.filter(e => e.invalidatedAt === null);
1235
+ const ops = activeEdges.map(edgeToUpsertOp);
1236
+ ops.sort((a, b) => a.ts.localeCompare(b.ts));
1237
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
1238
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1239
+ return { opCount: ops.length, path: filePath };
1240
+ }),
1241
+ importEdges: (path) => Effect.gen(function* () {
1242
+ const filePath = resolve(path ?? DEFAULT_EDGES_JSONL_PATH);
1243
+ const existingEdges = yield* edgeRepo.findAll();
1244
+ const existingHashes = new Set(existingEdges.map(e => contentHash(e.edgeType, e.sourceType, e.sourceId, e.targetType, e.targetId)));
1245
+ const insertStmt = db.prepare(`INSERT INTO learning_edges
1246
+ (edge_type, source_type, source_id, target_type, target_id, weight, metadata, created_at)
1247
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`);
1248
+ return yield* importEntityJsonl(filePath, EdgeUpsertOpSchema, existingHashes, (ops) => {
1249
+ db.exec("BEGIN IMMEDIATE");
1250
+ try {
1251
+ let count = 0;
1252
+ for (const op of ops) {
1253
+ insertStmt.run(op.data.edgeType, op.data.sourceType, op.data.sourceId, op.data.targetType, op.data.targetId, op.data.weight, JSON.stringify(op.data.metadata), op.ts);
1254
+ count++;
1255
+ }
1256
+ db.exec("COMMIT");
1257
+ return count;
1258
+ }
1259
+ catch (e) {
1260
+ try {
1261
+ db.exec("ROLLBACK");
1262
+ }
1263
+ catch { /* no active transaction */ }
1264
+ throw e;
1265
+ }
1266
+ });
1267
+ }),
1268
+ exportDocs: (path) => Effect.gen(function* () {
1269
+ const filePath = resolve(path ?? DEFAULT_DOCS_JSONL_PATH);
1270
+ const docs = yield* docRepo.findAll();
1271
+ // Build doc ID → "name:version" key map for stable cross-machine references
1272
+ const docKeyMap = new Map();
1273
+ for (const d of docs) {
1274
+ docKeyMap.set(d.id, `${d.name}:${d.version}`);
1275
+ }
1276
+ const docOps = docs.map(d => docToUpsertOp(d, docKeyMap));
1277
+ // Get doc links
1278
+ const docLinks = yield* docRepo.getAllLinks();
1279
+ const docLinkOps = docLinks
1280
+ .map(l => docLinkToUpsertOp(l, docKeyMap))
1281
+ .filter((op) => op !== null);
1282
+ // Get task-doc links via raw SQL (no getAllTaskLinks method)
1283
+ const taskDocLinkRows = yield* Effect.try({
1284
+ try: () => db.prepare("SELECT * FROM task_doc_links").all(),
1285
+ catch: (cause) => new DatabaseError({ cause })
1286
+ });
1287
+ const taskDocLinkOps = taskDocLinkRows
1288
+ .map(row => taskDocLinkToUpsertOp({ id: row.id, taskId: row.task_id, docId: row.doc_id, linkType: row.link_type, createdAt: new Date(row.created_at) }, docKeyMap))
1289
+ .filter((op) => op !== null);
1290
+ // Get invariants
1291
+ const invariants = yield* docRepo.findInvariants();
1292
+ const invariantOps = invariants
1293
+ .map(inv => invariantToUpsertOp(inv, docKeyMap))
1294
+ .filter((op) => op !== null);
1295
+ // Combine all ops, sort by timestamp
1296
+ const allOps = [...docOps, ...docLinkOps, ...taskDocLinkOps, ...invariantOps];
1297
+ allOps.sort((a, b) => a.ts.localeCompare(b.ts));
1298
+ const jsonl = allOps.map(op => JSON.stringify(op)).join("\n");
1299
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1300
+ return { opCount: allOps.length, path: filePath };
1301
+ }),
1302
+ importDocs: (path) => Effect.gen(function* () {
1303
+ const filePath = resolve(path ?? DEFAULT_DOCS_JSONL_PATH);
1304
+ const importDocsFileExists = yield* fileExists(filePath);
1305
+ if (!importDocsFileExists)
1306
+ return EMPTY_ENTITY_IMPORT_RESULT;
1307
+ const content = yield* Effect.tryPromise({
1308
+ try: () => readFile(filePath, "utf-8"),
1309
+ catch: (cause) => new DatabaseError({ cause })
1310
+ });
1311
+ const lines = content.trim().split("\n").filter(Boolean);
1312
+ if (lines.length === 0)
1313
+ return EMPTY_ENTITY_IMPORT_RESULT;
1314
+ // Parse all ops, group by type
1315
+ const docOps = [];
1316
+ const docLinkOps = [];
1317
+ const taskDocLinkOps = [];
1318
+ const invariantOps = [];
1319
+ for (const line of lines) {
1320
+ const parsed = yield* Effect.try({
1321
+ try: () => JSON.parse(line),
1322
+ catch: (cause) => new ValidationError({ reason: `Invalid JSON: ${cause}` })
1323
+ });
1324
+ const opType = parsed.op;
1325
+ if (opType === "doc_upsert") {
1326
+ docOps.push(yield* Effect.try({
1327
+ try: () => Schema.decodeUnknownSync(DocUpsertOpSchema)(parsed),
1328
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
1329
+ }));
1330
+ }
1331
+ else if (opType === "doc_link_upsert") {
1332
+ docLinkOps.push(yield* Effect.try({
1333
+ try: () => Schema.decodeUnknownSync(DocLinkUpsertOpSchema)(parsed),
1334
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
1335
+ }));
1336
+ }
1337
+ else if (opType === "task_doc_link_upsert") {
1338
+ taskDocLinkOps.push(yield* Effect.try({
1339
+ try: () => Schema.decodeUnknownSync(TaskDocLinkUpsertOpSchema)(parsed),
1340
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
1341
+ }));
1342
+ }
1343
+ else if (opType === "invariant_upsert") {
1344
+ invariantOps.push(yield* Effect.try({
1345
+ try: () => Schema.decodeUnknownSync(InvariantUpsertOpSchema)(parsed),
1346
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
1347
+ }));
1348
+ }
1349
+ }
1350
+ // Pre-dedup sub-entity ops by contentHash (keep latest timestamp per hash)
1351
+ const dedupByHash = (ops) => {
1352
+ const map = new Map();
1353
+ for (const op of ops) {
1354
+ const existing = map.get(op.contentHash);
1355
+ if (!existing || op.ts > existing.ts)
1356
+ map.set(op.contentHash, op);
1357
+ }
1358
+ return [...map.values()];
1359
+ };
1360
+ const dedupedDocLinkOps = dedupByHash(docLinkOps);
1361
+ const dedupedTaskDocLinkOps = dedupByHash(taskDocLinkOps);
1362
+ const dedupedInvariantOps = dedupByHash(invariantOps);
1363
+ // Build existing doc hashes for dedup
1364
+ const existingDocs = yield* docRepo.findAll();
1365
+ const existingDocHashes = new Set(existingDocs.map(d => contentHash(d.kind, d.name, String(d.version))));
1366
+ // Prepare statements
1367
+ const insertDocStmt = db.prepare(`INSERT OR IGNORE INTO docs (hash, kind, name, title, version, status, file_path, parent_doc_id, locked_at, created_at, metadata)
1368
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
1369
+ const findDocByNameVersionStmt = db.prepare("SELECT id FROM docs WHERE name = ? AND version = ?");
1370
+ const checkDocLinkStmt = db.prepare("SELECT 1 FROM doc_links WHERE from_doc_id = ? AND to_doc_id = ? AND link_type = ?");
1371
+ const insertDocLinkStmt = db.prepare("INSERT INTO doc_links (from_doc_id, to_doc_id, link_type, created_at) VALUES (?, ?, ?, ?)");
1372
+ const checkTaskDocLinkStmt = db.prepare("SELECT 1 FROM task_doc_links WHERE task_id = ? AND doc_id = ? AND link_type = ?");
1373
+ const insertTaskDocLinkStmt = db.prepare("INSERT INTO task_doc_links (task_id, doc_id, link_type, created_at) VALUES (?, ?, ?, ?)");
1374
+ const findInvariantStmt = db.prepare("SELECT 1 FROM invariants WHERE id = ?");
1375
+ const insertInvariantStmt = db.prepare(`INSERT INTO invariants (id, rule, enforcement, doc_id, subsystem, test_ref, lint_rule, prompt_ref, status, created_at, metadata)
1376
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
1377
+ // Hoist parent resolution UPDATE statement outside the loop
1378
+ const updateParentDocStmt = db.prepare("UPDATE docs SET parent_doc_id = ? WHERE id = ?");
1379
+ return yield* Effect.try({
1380
+ try: () => {
1381
+ db.exec("BEGIN IMMEDIATE");
1382
+ try {
1383
+ let imported = 0;
1384
+ let skipped = 0;
1385
+ // 1. Import docs (dedup by content hash = kind:name:version)
1386
+ // Track newly inserted doc keys for parent resolution
1387
+ const newDocKeyToId = new Map();
1388
+ const insertedDocKeys = new Set();
1389
+ for (const op of docOps) {
1390
+ if (existingDocHashes.has(op.contentHash)) {
1391
+ // Still populate the key map for link resolution
1392
+ const existing = findDocByNameVersionStmt.get(op.data.name, op.data.version);
1393
+ if (existing)
1394
+ newDocKeyToId.set(`${op.data.name}:${op.data.version}`, existing.id);
1395
+ skipped++;
1396
+ continue;
1397
+ }
1398
+ // Check if doc already exists by name+version (handles kind mismatch with UNIQUE index)
1399
+ const existing = findDocByNameVersionStmt.get(op.data.name, op.data.version);
1400
+ if (existing) {
1401
+ newDocKeyToId.set(`${op.data.name}:${op.data.version}`, existing.id);
1402
+ skipped++;
1403
+ continue;
1404
+ }
1405
+ // INSERT OR IGNORE handles race with UNIQUE(name, version)
1406
+ const result = insertDocStmt.run(op.data.hash, op.data.kind, op.data.name, op.data.title, op.data.version, op.data.status, op.data.filePath, null, // parent_doc_id resolved after all docs inserted
1407
+ op.data.lockedAt ?? null, op.ts, JSON.stringify(op.data.metadata));
1408
+ if (result.changes > 0) {
1409
+ const docKey = `${op.data.name}:${op.data.version}`;
1410
+ newDocKeyToId.set(docKey, result.lastInsertRowid);
1411
+ insertedDocKeys.add(docKey);
1412
+ imported++;
1413
+ }
1414
+ else {
1415
+ // INSERT OR IGNORE did nothing — row already exists
1416
+ const row = findDocByNameVersionStmt.get(op.data.name, op.data.version);
1417
+ if (row)
1418
+ newDocKeyToId.set(`${op.data.name}:${op.data.version}`, row.id);
1419
+ skipped++;
1420
+ }
1421
+ }
1422
+ // Helper: resolve docKey (name:version) to doc ID
1423
+ const resolveDocKey = (docKey) => {
1424
+ const newId = newDocKeyToId.get(docKey);
1425
+ if (newId !== undefined)
1426
+ return newId;
1427
+ const parts = docKey.split(":");
1428
+ if (parts.length < 2)
1429
+ return undefined;
1430
+ const name = parts.slice(0, -1).join(":");
1431
+ const version = parseInt(parts[parts.length - 1], 10);
1432
+ if (isNaN(version))
1433
+ return undefined;
1434
+ const row = findDocByNameVersionStmt.get(name, version);
1435
+ return row?.id;
1436
+ };
1437
+ // Resolve parent doc references — only for newly inserted docs
1438
+ for (const op of docOps) {
1439
+ if (!op.data.parentDocKey)
1440
+ continue;
1441
+ const docKey = `${op.data.name}:${op.data.version}`;
1442
+ if (!insertedDocKeys.has(docKey))
1443
+ continue;
1444
+ const docId = resolveDocKey(docKey);
1445
+ const parentId = resolveDocKey(op.data.parentDocKey);
1446
+ if (docId && parentId) {
1447
+ updateParentDocStmt.run(parentId, docId);
1448
+ }
1449
+ }
1450
+ // 2. Import doc links
1451
+ for (const op of dedupedDocLinkOps) {
1452
+ const fromId = resolveDocKey(op.data.fromDocKey);
1453
+ const toId = resolveDocKey(op.data.toDocKey);
1454
+ if (!fromId || !toId) {
1455
+ skipped++;
1456
+ continue;
1457
+ }
1458
+ if (checkDocLinkStmt.get(fromId, toId, op.data.linkType)) {
1459
+ skipped++;
1460
+ continue;
1461
+ }
1462
+ insertDocLinkStmt.run(fromId, toId, op.data.linkType, op.ts);
1463
+ imported++;
1464
+ }
1465
+ // 3. Import task-doc links
1466
+ for (const op of dedupedTaskDocLinkOps) {
1467
+ const docId = resolveDocKey(op.data.docKey);
1468
+ if (!docId) {
1469
+ skipped++;
1470
+ continue;
1471
+ }
1472
+ if (checkTaskDocLinkStmt.get(op.data.taskId, docId, op.data.linkType)) {
1473
+ skipped++;
1474
+ continue;
1475
+ }
1476
+ try {
1477
+ insertTaskDocLinkStmt.run(op.data.taskId, docId, op.data.linkType, op.ts);
1478
+ imported++;
1479
+ }
1480
+ catch {
1481
+ // Skip FK failures (task may not exist)
1482
+ skipped++;
1483
+ }
1484
+ }
1485
+ // 4. Import invariants (use op.id as the canonical invariant ID)
1486
+ for (const op of dedupedInvariantOps) {
1487
+ if (findInvariantStmt.get(op.id)) {
1488
+ skipped++;
1489
+ continue;
1490
+ }
1491
+ const docId = resolveDocKey(op.data.docKey);
1492
+ if (!docId) {
1493
+ skipped++;
1494
+ continue;
1495
+ }
1496
+ insertInvariantStmt.run(op.id, op.data.rule, op.data.enforcement, docId, op.data.subsystem, op.data.testRef, op.data.lintRule, op.data.promptRef, op.data.status, op.ts, JSON.stringify(op.data.metadata));
1497
+ imported++;
1498
+ }
1499
+ db.exec("COMMIT");
1500
+ return { imported, skipped };
1501
+ }
1502
+ catch (e) {
1503
+ try {
1504
+ db.exec("ROLLBACK");
1505
+ }
1506
+ catch { /* no active transaction */ }
1507
+ throw e;
1508
+ }
1509
+ },
1510
+ catch: (cause) => new DatabaseError({ cause })
1511
+ });
1512
+ }),
1513
+ exportLabels: (path) => Effect.gen(function* () {
1514
+ const filePath = resolve(path ?? DEFAULT_LABELS_JSONL_PATH);
1515
+ // Read labels via raw SQL (no repository layer exists)
1516
+ const labels = yield* Effect.try({
1517
+ try: () => db.prepare("SELECT * FROM task_labels").all(),
1518
+ catch: (cause) => new DatabaseError({ cause })
1519
+ });
1520
+ const labelNameMap = new Map();
1521
+ for (const l of labels) {
1522
+ labelNameMap.set(l.id, l.name);
1523
+ }
1524
+ const labelOps = labels.map(labelRowToUpsertOp);
1525
+ // Read label assignments
1526
+ const assignments = yield* Effect.try({
1527
+ try: () => db.prepare("SELECT * FROM task_label_assignments").all(),
1528
+ catch: (cause) => new DatabaseError({ cause })
1529
+ });
1530
+ const assignmentOps = assignments
1531
+ .map(a => labelAssignmentToUpsertOp(a, labelNameMap))
1532
+ .filter((op) => op !== null);
1533
+ const allOps = [...labelOps, ...assignmentOps];
1534
+ allOps.sort((a, b) => a.ts.localeCompare(b.ts));
1535
+ const jsonl = allOps.map(op => JSON.stringify(op)).join("\n");
1536
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1537
+ return { opCount: allOps.length, path: filePath };
1538
+ }),
1539
+ importLabels: (path) => Effect.gen(function* () {
1540
+ const filePath = resolve(path ?? DEFAULT_LABELS_JSONL_PATH);
1541
+ const importLabelsFileExists = yield* fileExists(filePath);
1542
+ if (!importLabelsFileExists)
1543
+ return EMPTY_ENTITY_IMPORT_RESULT;
1544
+ const content = yield* Effect.tryPromise({
1545
+ try: () => readFile(filePath, "utf-8"),
1546
+ catch: (cause) => new DatabaseError({ cause })
1547
+ });
1548
+ const lines = content.trim().split("\n").filter(Boolean);
1549
+ if (lines.length === 0)
1550
+ return EMPTY_ENTITY_IMPORT_RESULT;
1551
+ const labelOps = [];
1552
+ const assignmentOps = [];
1553
+ for (const line of lines) {
1554
+ const parsed = yield* Effect.try({
1555
+ try: () => JSON.parse(line),
1556
+ catch: (cause) => new ValidationError({ reason: `Invalid JSON: ${cause}` })
1557
+ });
1558
+ const opType = parsed.op;
1559
+ if (opType === "label_upsert") {
1560
+ labelOps.push(yield* Effect.try({
1561
+ try: () => Schema.decodeUnknownSync(LabelUpsertOpSchema)(parsed),
1562
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
1563
+ }));
1564
+ }
1565
+ else if (opType === "label_assignment_upsert") {
1566
+ assignmentOps.push(yield* Effect.try({
1567
+ try: () => Schema.decodeUnknownSync(LabelAssignmentUpsertOpSchema)(parsed),
1568
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
1569
+ }));
1570
+ }
1571
+ }
1572
+ // Build existing label hashes
1573
+ const existingLabels = yield* Effect.try({
1574
+ try: () => db.prepare("SELECT * FROM task_labels").all(),
1575
+ catch: (cause) => new DatabaseError({ cause })
1576
+ });
1577
+ const existingLabelHashes = new Set(existingLabels.map(l => contentHash(l.name.toLowerCase())));
1578
+ const findLabelByNameStmt = db.prepare("SELECT id FROM task_labels WHERE lower(name) = lower(?)");
1579
+ const insertLabelStmt = db.prepare("INSERT INTO task_labels (name, color, created_at, updated_at) VALUES (?, ?, ?, ?)");
1580
+ const checkAssignmentStmt = db.prepare("SELECT 1 FROM task_label_assignments WHERE task_id = ? AND label_id = ?");
1581
+ const insertAssignmentStmt = db.prepare("INSERT INTO task_label_assignments (task_id, label_id, created_at) VALUES (?, ?, ?)");
1582
+ return yield* Effect.try({
1583
+ try: () => {
1584
+ db.exec("BEGIN IMMEDIATE");
1585
+ try {
1586
+ let imported = 0;
1587
+ let skipped = 0;
1588
+ const newLabelNameToId = new Map();
1589
+ // 1. Import labels (dedup by lower(name))
1590
+ for (const op of labelOps) {
1591
+ if (existingLabelHashes.has(op.contentHash)) {
1592
+ // Still populate the name map for assignment resolution
1593
+ const existing = findLabelByNameStmt.get(op.data.name);
1594
+ if (existing)
1595
+ newLabelNameToId.set(op.data.name.toLowerCase(), existing.id);
1596
+ skipped++;
1597
+ continue;
1598
+ }
1599
+ const existing = findLabelByNameStmt.get(op.data.name);
1600
+ if (existing) {
1601
+ newLabelNameToId.set(op.data.name.toLowerCase(), existing.id);
1602
+ skipped++;
1603
+ continue;
1604
+ }
1605
+ const result = insertLabelStmt.run(op.data.name, op.data.color, op.ts, op.ts);
1606
+ newLabelNameToId.set(op.data.name.toLowerCase(), result.lastInsertRowid);
1607
+ imported++;
1608
+ }
1609
+ // Helper: resolve label name to ID
1610
+ const resolveLabelId = (name) => {
1611
+ const newId = newLabelNameToId.get(name.toLowerCase());
1612
+ if (newId !== undefined)
1613
+ return newId;
1614
+ const row = findLabelByNameStmt.get(name);
1615
+ return row?.id;
1616
+ };
1617
+ // 2. Import label assignments
1618
+ for (const op of assignmentOps) {
1619
+ const labelId = resolveLabelId(op.data.labelName);
1620
+ if (!labelId) {
1621
+ skipped++;
1622
+ continue;
1623
+ }
1624
+ if (checkAssignmentStmt.get(op.data.taskId, labelId)) {
1625
+ skipped++;
1626
+ continue;
1627
+ }
1628
+ try {
1629
+ insertAssignmentStmt.run(op.data.taskId, labelId, op.ts);
1630
+ imported++;
1631
+ }
1632
+ catch {
1633
+ // Skip FK failures (task may not exist)
1634
+ skipped++;
1635
+ }
1636
+ }
1637
+ db.exec("COMMIT");
1638
+ return { imported, skipped };
1639
+ }
1640
+ catch (e) {
1641
+ try {
1642
+ db.exec("ROLLBACK");
1643
+ }
1644
+ catch { /* no active transaction */ }
1645
+ throw e;
1646
+ }
1647
+ },
1648
+ catch: (cause) => new DatabaseError({ cause })
1649
+ });
1650
+ }),
1651
+ exportAll: (options) => Effect.gen(function* () {
1652
+ const tasks = yield* syncService.export();
1653
+ const learnings = options?.learnings !== false
1654
+ ? yield* syncService.exportLearnings()
1655
+ : undefined;
1656
+ const fileLearnings = options?.fileLearnings !== false
1657
+ ? yield* syncService.exportFileLearnings()
1658
+ : undefined;
1659
+ const attempts = options?.attempts !== false
1660
+ ? yield* syncService.exportAttempts()
1661
+ : undefined;
1662
+ const pins = options?.pins !== false
1663
+ ? yield* syncService.exportPins()
1664
+ : undefined;
1665
+ const anchors = options?.anchors !== false
1666
+ ? yield* syncService.exportAnchors()
1667
+ : undefined;
1668
+ const edges = options?.edges !== false
1669
+ ? yield* syncService.exportEdges()
1670
+ : undefined;
1671
+ const docs = options?.docs !== false
1672
+ ? yield* syncService.exportDocs()
1673
+ : undefined;
1674
+ const labels = options?.labels !== false
1675
+ ? yield* syncService.exportLabels()
1676
+ : undefined;
1677
+ return { tasks, learnings, fileLearnings, attempts, pins, anchors, edges, docs, labels };
1678
+ }),
1679
+ importAll: (options) => Effect.gen(function* () {
1680
+ // Import in dependency order: tasks → learnings → anchors → edges → file-learnings → attempts → pins → docs → labels
1681
+ const tasks = yield* syncService.import();
1682
+ const learnings = options?.learnings !== false
1683
+ ? yield* syncService.importLearnings()
1684
+ : undefined;
1685
+ // Anchors depend on learnings (FK reference) — skip if learnings are disabled
1686
+ const anchors = options?.anchors !== false && options?.learnings !== false
1687
+ ? yield* syncService.importAnchors()
1688
+ : undefined;
1689
+ // Edges are a generic graph layer with no FK dependency on learnings
1690
+ const edges = options?.edges !== false
1691
+ ? yield* syncService.importEdges()
1692
+ : undefined;
1693
+ const fileLearnings = options?.fileLearnings !== false
1694
+ ? yield* syncService.importFileLearnings()
1695
+ : undefined;
1696
+ const attempts = options?.attempts !== false
1697
+ ? yield* syncService.importAttempts()
1698
+ : undefined;
1699
+ const pins = options?.pins !== false
1700
+ ? yield* syncService.importPins()
1701
+ : undefined;
1702
+ const docs = options?.docs !== false
1703
+ ? yield* syncService.importDocs()
1704
+ : undefined;
1705
+ const labels = options?.labels !== false
1706
+ ? yield* syncService.importLabels()
1707
+ : undefined;
1708
+ return { tasks, learnings, fileLearnings, attempts, pins, anchors, edges, docs, labels };
1709
+ })
623
1710
  };
1711
+ return syncService;
624
1712
  }));
625
1713
  //# sourceMappingURL=sync-service.js.map