@jamesaphoenix/tx-core 0.8.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (517) hide show
  1. package/dist/db.d.ts +6 -6
  2. package/dist/db.d.ts.map +1 -1
  3. package/dist/db.js +29 -2
  4. package/dist/db.js.map +1 -1
  5. package/dist/index.d.ts +8 -3
  6. package/dist/index.d.ts.map +1 -1
  7. package/dist/index.js +8 -3
  8. package/dist/index.js.map +1 -1
  9. package/dist/internal/cycle-scan-service-impl.d.ts +28 -0
  10. package/dist/internal/cycle-scan-service-impl.d.ts.map +1 -0
  11. package/dist/internal/cycle-scan-service-impl.js +877 -0
  12. package/dist/internal/cycle-scan-service-impl.js.map +1 -0
  13. package/dist/internal/doc-service-impl.d.ts +42 -0
  14. package/dist/internal/doc-service-impl.d.ts.map +1 -0
  15. package/dist/internal/doc-service-impl.js +812 -0
  16. package/dist/internal/doc-service-impl.js.map +1 -0
  17. package/dist/internal/embedding-service-impl.d.ts +202 -0
  18. package/dist/internal/embedding-service-impl.d.ts.map +1 -0
  19. package/dist/internal/embedding-service-impl.js +466 -0
  20. package/dist/internal/embedding-service-impl.js.map +1 -0
  21. package/dist/internal/memory-service-impl.d.ts +49 -0
  22. package/dist/internal/memory-service-impl.d.ts.map +1 -0
  23. package/dist/internal/memory-service-impl.js +1061 -0
  24. package/dist/internal/memory-service-impl.js.map +1 -0
  25. package/dist/internal/spec-trace-service-impl.d.ts +50 -0
  26. package/dist/internal/spec-trace-service-impl.d.ts.map +1 -0
  27. package/dist/internal/spec-trace-service-impl.js +707 -0
  28. package/dist/internal/spec-trace-service-impl.js.map +1 -0
  29. package/dist/internal/sync/service-impl.d.ts +41 -0
  30. package/dist/internal/sync/service-impl.d.ts.map +1 -0
  31. package/dist/{services/sync-service.js → internal/sync/service-impl.js} +954 -499
  32. package/dist/internal/sync/service-impl.js.map +1 -0
  33. package/dist/layer.d.ts +8 -5
  34. package/dist/layer.d.ts.map +1 -1
  35. package/dist/layer.js +22 -10
  36. package/dist/layer.js.map +1 -1
  37. package/dist/mappers/anchor.d.ts.map +1 -1
  38. package/dist/mappers/anchor.js +5 -4
  39. package/dist/mappers/anchor.js.map +1 -1
  40. package/dist/mappers/attempt.d.ts.map +1 -1
  41. package/dist/mappers/attempt.js +2 -1
  42. package/dist/mappers/attempt.js.map +1 -1
  43. package/dist/mappers/candidate.d.ts.map +1 -1
  44. package/dist/mappers/candidate.js +2 -1
  45. package/dist/mappers/candidate.js.map +1 -1
  46. package/dist/mappers/claim.d.ts.map +1 -1
  47. package/dist/mappers/claim.js +2 -1
  48. package/dist/mappers/claim.js.map +1 -1
  49. package/dist/mappers/doc.d.ts.map +1 -1
  50. package/dist/mappers/doc.js +9 -8
  51. package/dist/mappers/doc.js.map +1 -1
  52. package/dist/mappers/edge.d.ts.map +1 -1
  53. package/dist/mappers/edge.js +4 -3
  54. package/dist/mappers/edge.js.map +1 -1
  55. package/dist/mappers/file-learning.d.ts.map +1 -1
  56. package/dist/mappers/file-learning.js +2 -1
  57. package/dist/mappers/file-learning.js.map +1 -1
  58. package/dist/mappers/index.d.ts +1 -0
  59. package/dist/mappers/index.d.ts.map +1 -1
  60. package/dist/mappers/index.js +2 -0
  61. package/dist/mappers/index.js.map +1 -1
  62. package/dist/mappers/learning.d.ts.map +1 -1
  63. package/dist/mappers/learning.js +4 -3
  64. package/dist/mappers/learning.js.map +1 -1
  65. package/dist/mappers/memory.d.ts.map +1 -1
  66. package/dist/mappers/memory.js +7 -6
  67. package/dist/mappers/memory.js.map +1 -1
  68. package/dist/mappers/orchestrator-state.d.ts.map +1 -1
  69. package/dist/mappers/orchestrator-state.js +2 -1
  70. package/dist/mappers/orchestrator-state.js.map +1 -1
  71. package/dist/mappers/pin.d.ts.map +1 -1
  72. package/dist/mappers/pin.js +2 -1
  73. package/dist/mappers/pin.js.map +1 -1
  74. package/dist/mappers/run.d.ts.map +1 -1
  75. package/dist/mappers/run.js +4 -3
  76. package/dist/mappers/run.js.map +1 -1
  77. package/dist/mappers/spec-trace.d.ts +11 -0
  78. package/dist/mappers/spec-trace.d.ts.map +1 -0
  79. package/dist/mappers/spec-trace.js +59 -0
  80. package/dist/mappers/spec-trace.js.map +1 -0
  81. package/dist/mappers/task.d.ts.map +1 -1
  82. package/dist/mappers/task.js +4 -4
  83. package/dist/mappers/task.js.map +1 -1
  84. package/dist/mappers/worker.d.ts.map +1 -1
  85. package/dist/mappers/worker.js +2 -1
  86. package/dist/mappers/worker.js.map +1 -1
  87. package/dist/migrations-embedded.d.ts.map +1 -1
  88. package/dist/migrations-embedded.js +15 -0
  89. package/dist/migrations-embedded.js.map +1 -1
  90. package/dist/repo/anchor-repo.d.ts +2 -2
  91. package/dist/repo/anchor-repo.d.ts.map +1 -1
  92. package/dist/repo/anchor-repo.js +18 -17
  93. package/dist/repo/anchor-repo.js.map +1 -1
  94. package/dist/repo/attempt-repo.d.ts.map +1 -1
  95. package/dist/repo/attempt-repo.js +11 -10
  96. package/dist/repo/attempt-repo.js.map +1 -1
  97. package/dist/repo/candidate-repo.d.ts.map +1 -1
  98. package/dist/repo/candidate-repo.js +8 -7
  99. package/dist/repo/candidate-repo.js.map +1 -1
  100. package/dist/repo/claim-repo.d.ts +4 -4
  101. package/dist/repo/claim-repo.d.ts.map +1 -1
  102. package/dist/repo/claim-repo.js +14 -13
  103. package/dist/repo/claim-repo.js.map +1 -1
  104. package/dist/repo/compaction-repo.d.ts +4 -4
  105. package/dist/repo/compaction-repo.d.ts.map +1 -1
  106. package/dist/repo/compaction-repo.js +7 -6
  107. package/dist/repo/compaction-repo.js.map +1 -1
  108. package/dist/repo/deduplication-repo.d.ts.map +1 -1
  109. package/dist/repo/deduplication-repo.js +80 -57
  110. package/dist/repo/deduplication-repo.js.map +1 -1
  111. package/dist/repo/dep-repo.d.ts.map +1 -1
  112. package/dist/repo/dep-repo.js +90 -88
  113. package/dist/repo/dep-repo.js.map +1 -1
  114. package/dist/repo/doc-repo.d.ts +2 -63
  115. package/dist/repo/doc-repo.d.ts.map +1 -1
  116. package/dist/repo/doc-repo.js +6 -4
  117. package/dist/repo/doc-repo.js.map +1 -1
  118. package/dist/repo/doc-repo.types.d.ts +64 -0
  119. package/dist/repo/doc-repo.types.d.ts.map +1 -0
  120. package/dist/repo/doc-repo.types.js +2 -0
  121. package/dist/repo/doc-repo.types.js.map +1 -0
  122. package/dist/repo/edge-repo.d.ts.map +1 -1
  123. package/dist/repo/edge-repo.js +91 -81
  124. package/dist/repo/edge-repo.js.map +1 -1
  125. package/dist/repo/file-learning-repo.d.ts.map +1 -1
  126. package/dist/repo/file-learning-repo.js +7 -6
  127. package/dist/repo/file-learning-repo.js.map +1 -1
  128. package/dist/repo/guard-repo.d.ts +4 -4
  129. package/dist/repo/guard-repo.d.ts.map +1 -1
  130. package/dist/repo/guard-repo.js +8 -7
  131. package/dist/repo/guard-repo.js.map +1 -1
  132. package/dist/repo/index.d.ts +1 -0
  133. package/dist/repo/index.d.ts.map +1 -1
  134. package/dist/repo/index.js +1 -0
  135. package/dist/repo/index.js.map +1 -1
  136. package/dist/repo/label-repo.d.ts +4 -4
  137. package/dist/repo/label-repo.d.ts.map +1 -1
  138. package/dist/repo/label-repo.js +9 -8
  139. package/dist/repo/label-repo.js.map +1 -1
  140. package/dist/repo/learning-repo.d.ts +2 -2
  141. package/dist/repo/learning-repo.d.ts.map +1 -1
  142. package/dist/repo/learning-repo.js +37 -31
  143. package/dist/repo/learning-repo.js.map +1 -1
  144. package/dist/repo/memory-repo/document.d.ts +4 -0
  145. package/dist/repo/memory-repo/document.d.ts.map +1 -0
  146. package/dist/repo/memory-repo/document.js +205 -0
  147. package/dist/repo/memory-repo/document.js.map +1 -0
  148. package/dist/repo/memory-repo/link.d.ts +4 -0
  149. package/dist/repo/memory-repo/link.d.ts.map +1 -0
  150. package/dist/repo/memory-repo/link.js +124 -0
  151. package/dist/repo/memory-repo/link.js.map +1 -0
  152. package/dist/repo/memory-repo/property.d.ts +4 -0
  153. package/dist/repo/memory-repo/property.d.ts.map +1 -0
  154. package/dist/repo/memory-repo/property.js +56 -0
  155. package/dist/repo/memory-repo/property.js.map +1 -0
  156. package/dist/repo/memory-repo/shared.d.ts +28 -0
  157. package/dist/repo/memory-repo/shared.d.ts.map +1 -0
  158. package/dist/repo/memory-repo/shared.js +63 -0
  159. package/dist/repo/memory-repo/shared.js.map +1 -0
  160. package/dist/repo/memory-repo/source.d.ts +4 -0
  161. package/dist/repo/memory-repo/source.d.ts.map +1 -0
  162. package/dist/repo/memory-repo/source.js +58 -0
  163. package/dist/repo/memory-repo/source.js.map +1 -0
  164. package/dist/repo/memory-repo.d.ts +14 -10
  165. package/dist/repo/memory-repo.d.ts.map +1 -1
  166. package/dist/repo/memory-repo.js +8 -527
  167. package/dist/repo/memory-repo.js.map +1 -1
  168. package/dist/repo/message-repo.d.ts.map +1 -1
  169. package/dist/repo/message-repo.js +9 -8
  170. package/dist/repo/message-repo.js.map +1 -1
  171. package/dist/repo/orchestrator-state-repo.d.ts.map +1 -1
  172. package/dist/repo/orchestrator-state-repo.js +2 -1
  173. package/dist/repo/orchestrator-state-repo.js.map +1 -1
  174. package/dist/repo/pin-repo.d.ts.map +1 -1
  175. package/dist/repo/pin-repo.js +5 -4
  176. package/dist/repo/pin-repo.js.map +1 -1
  177. package/dist/repo/run-repo.d.ts.map +1 -1
  178. package/dist/repo/run-repo.js +11 -10
  179. package/dist/repo/run-repo.js.map +1 -1
  180. package/dist/repo/spec-trace-repo.d.ts +9 -0
  181. package/dist/repo/spec-trace-repo.d.ts.map +1 -0
  182. package/dist/repo/spec-trace-repo.filter.d.ts +3 -0
  183. package/dist/repo/spec-trace-repo.filter.d.ts.map +1 -0
  184. package/dist/repo/spec-trace-repo.filter.js +13 -0
  185. package/dist/repo/spec-trace-repo.filter.js.map +1 -0
  186. package/dist/repo/spec-trace-repo.js +323 -0
  187. package/dist/repo/spec-trace-repo.js.map +1 -0
  188. package/dist/repo/spec-trace-repo.types.d.ts +60 -0
  189. package/dist/repo/spec-trace-repo.types.d.ts.map +1 -0
  190. package/dist/repo/spec-trace-repo.types.js +2 -0
  191. package/dist/repo/spec-trace-repo.types.js.map +1 -0
  192. package/dist/repo/task-repo/factory.d.ts +4 -0
  193. package/dist/repo/task-repo/factory.d.ts.map +1 -0
  194. package/dist/repo/task-repo/factory.js +7 -0
  195. package/dist/repo/task-repo/factory.js.map +1 -0
  196. package/dist/repo/task-repo/read.d.ts +6 -0
  197. package/dist/repo/task-repo/read.d.ts.map +1 -0
  198. package/dist/repo/task-repo/read.js +332 -0
  199. package/dist/repo/task-repo/read.js.map +1 -0
  200. package/dist/repo/task-repo/shared.d.ts +10 -0
  201. package/dist/repo/task-repo/shared.d.ts.map +1 -0
  202. package/dist/repo/task-repo/shared.js +29 -0
  203. package/dist/repo/task-repo/shared.js.map +1 -0
  204. package/dist/repo/task-repo/write.d.ts +6 -0
  205. package/dist/repo/task-repo/write.d.ts.map +1 -0
  206. package/dist/repo/task-repo/write.js +182 -0
  207. package/dist/repo/task-repo/write.js.map +1 -0
  208. package/dist/repo/task-repo.d.ts +5 -4
  209. package/dist/repo/task-repo.d.ts.map +1 -1
  210. package/dist/repo/task-repo.js +2 -520
  211. package/dist/repo/task-repo.js.map +1 -1
  212. package/dist/repo/tracked-project-repo.d.ts.map +1 -1
  213. package/dist/repo/tracked-project-repo.js +6 -5
  214. package/dist/repo/tracked-project-repo.js.map +1 -1
  215. package/dist/repo/worker-repo.d.ts.map +1 -1
  216. package/dist/repo/worker-repo.js +60 -47
  217. package/dist/repo/worker-repo.js.map +1 -1
  218. package/dist/schemas/index.d.ts +4 -2
  219. package/dist/schemas/index.d.ts.map +1 -1
  220. package/dist/schemas/index.js +2 -1
  221. package/dist/schemas/index.js.map +1 -1
  222. package/dist/schemas/sync-events.d.ts +25 -0
  223. package/dist/schemas/sync-events.d.ts.map +1 -0
  224. package/dist/schemas/sync-events.js +23 -0
  225. package/dist/schemas/sync-events.js.map +1 -0
  226. package/dist/schemas/sync.d.ts +20 -10
  227. package/dist/schemas/sync.d.ts.map +1 -1
  228. package/dist/schemas/sync.js +10 -4
  229. package/dist/schemas/sync.js.map +1 -1
  230. package/dist/services/agent-service.d.ts +4 -4
  231. package/dist/services/agent-service.d.ts.map +1 -1
  232. package/dist/services/agent-service.js.map +1 -1
  233. package/dist/services/anchor/anchor-service-core-ops.d.ts +125 -0
  234. package/dist/services/anchor/anchor-service-core-ops.d.ts.map +1 -0
  235. package/dist/services/anchor/anchor-service-core-ops.js +41 -0
  236. package/dist/services/anchor/anchor-service-core-ops.js.map +1 -0
  237. package/dist/services/anchor/anchor-service-deps.d.ts +10 -0
  238. package/dist/services/anchor/anchor-service-deps.d.ts.map +1 -0
  239. package/dist/services/anchor/anchor-service-deps.js +2 -0
  240. package/dist/services/anchor/anchor-service-deps.js.map +1 -0
  241. package/dist/services/anchor/anchor-service-ops.d.ts +296 -0
  242. package/dist/services/anchor/anchor-service-ops.d.ts.map +1 -0
  243. package/dist/services/anchor/anchor-service-ops.js +9 -0
  244. package/dist/services/anchor/anchor-service-ops.js.map +1 -0
  245. package/dist/services/anchor/anchor-service-state-ops.d.ts +116 -0
  246. package/dist/services/anchor/anchor-service-state-ops.d.ts.map +1 -0
  247. package/dist/services/anchor/anchor-service-state-ops.js +150 -0
  248. package/dist/services/anchor/anchor-service-state-ops.js.map +1 -0
  249. package/dist/services/anchor/anchor-service-validation.d.ts +7 -0
  250. package/dist/services/anchor/anchor-service-validation.d.ts.map +1 -0
  251. package/dist/services/anchor/anchor-service-validation.js +114 -0
  252. package/dist/services/anchor/anchor-service-validation.js.map +1 -0
  253. package/dist/services/anchor/anchor-service-verification-ops.d.ts +71 -0
  254. package/dist/services/anchor/anchor-service-verification-ops.d.ts.map +1 -0
  255. package/dist/services/anchor/anchor-service-verification-ops.js +169 -0
  256. package/dist/services/anchor/anchor-service-verification-ops.js.map +1 -0
  257. package/dist/services/anchor/anchor-verification-batch.d.ts +12 -0
  258. package/dist/services/anchor/anchor-verification-batch.d.ts.map +1 -0
  259. package/dist/services/anchor/anchor-verification-batch.js +109 -0
  260. package/dist/services/anchor/anchor-verification-batch.js.map +1 -0
  261. package/dist/services/anchor/anchor-verification-single.d.ts +7 -0
  262. package/dist/services/anchor/anchor-verification-single.d.ts.map +1 -0
  263. package/dist/services/anchor/anchor-verification-single.js +407 -0
  264. package/dist/services/anchor/anchor-verification-single.js.map +1 -0
  265. package/dist/services/anchor/anchor-verification-utils.d.ts +19 -0
  266. package/dist/services/anchor/anchor-verification-utils.d.ts.map +1 -0
  267. package/dist/services/anchor/anchor-verification-utils.js +107 -0
  268. package/dist/services/anchor/anchor-verification-utils.js.map +1 -0
  269. package/dist/services/anchor-service.d.ts +12 -90
  270. package/dist/services/anchor-service.d.ts.map +1 -1
  271. package/dist/services/anchor-service.js +5 -530
  272. package/dist/services/anchor-service.js.map +1 -1
  273. package/dist/services/anchor-verification.d.ts +9 -60
  274. package/dist/services/anchor-verification.d.ts.map +1 -1
  275. package/dist/services/anchor-verification.js +5 -796
  276. package/dist/services/anchor-verification.js.map +1 -1
  277. package/dist/services/ast-grep-service/patterns.d.ts +90 -0
  278. package/dist/services/ast-grep-service/patterns.d.ts.map +1 -0
  279. package/dist/services/ast-grep-service/patterns.js +261 -0
  280. package/dist/services/ast-grep-service/patterns.js.map +1 -0
  281. package/dist/services/ast-grep-service.d.ts +2 -13
  282. package/dist/services/ast-grep-service.d.ts.map +1 -1
  283. package/dist/services/ast-grep-service.js +3 -261
  284. package/dist/services/ast-grep-service.js.map +1 -1
  285. package/dist/services/auto-sync-service.d.ts +3 -3
  286. package/dist/services/auto-sync-service.d.ts.map +1 -1
  287. package/dist/services/auto-sync-service.js +12 -13
  288. package/dist/services/auto-sync-service.js.map +1 -1
  289. package/dist/services/compaction-service.d.ts +6 -6
  290. package/dist/services/compaction-service.d.ts.map +1 -1
  291. package/dist/services/compaction-service.js +11 -7
  292. package/dist/services/compaction-service.js.map +1 -1
  293. package/dist/services/cycle-scan-service.d.ts +1 -27
  294. package/dist/services/cycle-scan-service.d.ts.map +1 -1
  295. package/dist/services/cycle-scan-service.js +1 -876
  296. package/dist/services/cycle-scan-service.js.map +1 -1
  297. package/dist/services/daemon-service/process.d.ts +93 -0
  298. package/dist/services/daemon-service/process.d.ts.map +1 -0
  299. package/dist/services/daemon-service/process.js +325 -0
  300. package/dist/services/daemon-service/process.js.map +1 -0
  301. package/dist/services/daemon-service/templates.d.ts +88 -0
  302. package/dist/services/daemon-service/templates.d.ts.map +1 -0
  303. package/dist/services/daemon-service/templates.js +119 -0
  304. package/dist/services/daemon-service/templates.js.map +1 -0
  305. package/dist/services/daemon-service.d.ts +8 -160
  306. package/dist/services/daemon-service.d.ts.map +1 -1
  307. package/dist/services/daemon-service.js +4 -440
  308. package/dist/services/daemon-service.js.map +1 -1
  309. package/dist/services/doc-service.d.ts +1 -41
  310. package/dist/services/doc-service.d.ts.map +1 -1
  311. package/dist/services/doc-service.js +1 -780
  312. package/dist/services/doc-service.js.map +1 -1
  313. package/dist/services/edge-service.d.ts +6 -6
  314. package/dist/services/edge-service.d.ts.map +1 -1
  315. package/dist/services/edge-service.js.map +1 -1
  316. package/dist/services/embedding-service.d.ts +1 -201
  317. package/dist/services/embedding-service.d.ts.map +1 -1
  318. package/dist/services/embedding-service.js +1 -465
  319. package/dist/services/embedding-service.js.map +1 -1
  320. package/dist/services/feedback-tracker.d.ts +2 -2
  321. package/dist/services/feedback-tracker.d.ts.map +1 -1
  322. package/dist/services/feedback-tracker.js.map +1 -1
  323. package/dist/services/file-watcher-service/shared.d.ts +76 -0
  324. package/dist/services/file-watcher-service/shared.d.ts.map +1 -0
  325. package/dist/services/file-watcher-service/shared.js +43 -0
  326. package/dist/services/file-watcher-service/shared.js.map +1 -0
  327. package/dist/services/file-watcher-service.d.ts +2 -42
  328. package/dist/services/file-watcher-service.d.ts.map +1 -1
  329. package/dist/services/file-watcher-service.js +1 -46
  330. package/dist/services/file-watcher-service.js.map +1 -1
  331. package/dist/services/graph-expansion/from-files.d.ts +14 -0
  332. package/dist/services/graph-expansion/from-files.d.ts.map +1 -0
  333. package/dist/services/graph-expansion/from-files.js +187 -0
  334. package/dist/services/graph-expansion/from-files.js.map +1 -0
  335. package/dist/services/graph-expansion/live.d.ts +11 -0
  336. package/dist/services/graph-expansion/live.d.ts.map +1 -0
  337. package/dist/services/graph-expansion/live.js +263 -0
  338. package/dist/services/graph-expansion/live.js.map +1 -0
  339. package/dist/services/graph-expansion.d.ts +17 -20
  340. package/dist/services/graph-expansion.d.ts.map +1 -1
  341. package/dist/services/graph-expansion.js +2 -439
  342. package/dist/services/graph-expansion.js.map +1 -1
  343. package/dist/services/guard-service.d.ts +2 -2
  344. package/dist/services/guard-service.d.ts.map +1 -1
  345. package/dist/services/guard-service.js.map +1 -1
  346. package/dist/services/index.d.ts +3 -1
  347. package/dist/services/index.d.ts.map +1 -1
  348. package/dist/services/index.js +3 -1
  349. package/dist/services/index.js.map +1 -1
  350. package/dist/services/learning-service.d.ts +4 -4
  351. package/dist/services/learning-service.d.ts.map +1 -1
  352. package/dist/services/learning-service.js.map +1 -1
  353. package/dist/services/llm-service.d.ts +4 -4
  354. package/dist/services/llm-service.d.ts.map +1 -1
  355. package/dist/services/llm-service.js.map +1 -1
  356. package/dist/services/memory-retriever-service.d.ts.map +1 -1
  357. package/dist/services/memory-retriever-service.js.map +1 -1
  358. package/dist/services/memory-service.d.ts +1 -48
  359. package/dist/services/memory-service.d.ts.map +1 -1
  360. package/dist/services/memory-service.js +1 -1060
  361. package/dist/services/memory-service.js.map +1 -1
  362. package/dist/services/migration-service.d.ts +6 -6
  363. package/dist/services/migration-service.d.ts.map +1 -1
  364. package/dist/services/migration-service.js +6 -2
  365. package/dist/services/migration-service.js.map +1 -1
  366. package/dist/services/orchestrator-service.d.ts +2 -2
  367. package/dist/services/orchestrator-service.d.ts.map +1 -1
  368. package/dist/services/orchestrator-service.js.map +1 -1
  369. package/dist/services/pin-service.d.ts.map +1 -1
  370. package/dist/services/pin-service.js +21 -28
  371. package/dist/services/pin-service.js.map +1 -1
  372. package/dist/services/promotion-service.d.ts +4 -4
  373. package/dist/services/promotion-service.d.ts.map +1 -1
  374. package/dist/services/promotion-service.js.map +1 -1
  375. package/dist/services/query-expansion-service.d.ts +2 -2
  376. package/dist/services/query-expansion-service.d.ts.map +1 -1
  377. package/dist/services/query-expansion-service.js.map +1 -1
  378. package/dist/services/reflect-service.d.ts +6 -6
  379. package/dist/services/reflect-service.d.ts.map +1 -1
  380. package/dist/services/reflect-service.js.map +1 -1
  381. package/dist/services/reranker-service.d.ts +2 -2
  382. package/dist/services/reranker-service.d.ts.map +1 -1
  383. package/dist/services/reranker-service.js.map +1 -1
  384. package/dist/services/retriever-scoring.d.ts +52 -0
  385. package/dist/services/retriever-scoring.d.ts.map +1 -0
  386. package/dist/services/retriever-scoring.js +146 -0
  387. package/dist/services/retriever-scoring.js.map +1 -0
  388. package/dist/services/retriever-service.d.ts.map +1 -1
  389. package/dist/services/retriever-service.js +1 -191
  390. package/dist/services/retriever-service.js.map +1 -1
  391. package/dist/services/run-heartbeat-service.d.ts +10 -10
  392. package/dist/services/run-heartbeat-service.d.ts.map +1 -1
  393. package/dist/services/run-heartbeat-service.js.map +1 -1
  394. package/dist/services/score-service.d.ts +2 -2
  395. package/dist/services/score-service.d.ts.map +1 -1
  396. package/dist/services/score-service.js.map +1 -1
  397. package/dist/services/spec-trace-service.d.ts +2 -0
  398. package/dist/services/spec-trace-service.d.ts.map +1 -0
  399. package/dist/services/spec-trace-service.js +2 -0
  400. package/dist/services/spec-trace-service.js.map +1 -0
  401. package/dist/services/stream-service.d.ts +31 -0
  402. package/dist/services/stream-service.d.ts.map +1 -0
  403. package/dist/services/stream-service.js +162 -0
  404. package/dist/services/stream-service.js.map +1 -0
  405. package/dist/services/swarm-verification/shared.d.ts +71 -0
  406. package/dist/services/swarm-verification/shared.d.ts.map +1 -0
  407. package/dist/services/swarm-verification/shared.js +108 -0
  408. package/dist/services/swarm-verification/shared.js.map +1 -0
  409. package/dist/services/swarm-verification.d.ts +4 -68
  410. package/dist/services/swarm-verification.d.ts.map +1 -1
  411. package/dist/services/swarm-verification.js +2 -122
  412. package/dist/services/swarm-verification.js.map +1 -1
  413. package/dist/services/sync/converters.d.ts +63 -0
  414. package/dist/services/sync/converters.d.ts.map +1 -0
  415. package/dist/services/sync/converters.js +253 -0
  416. package/dist/services/sync/converters.js.map +1 -0
  417. package/dist/services/sync/entity-export.d.ts +22 -0
  418. package/dist/services/sync/entity-export.d.ts.map +1 -0
  419. package/dist/services/sync/entity-export.js +15 -0
  420. package/dist/services/sync/entity-export.js.map +1 -0
  421. package/dist/services/sync/entity-import.d.ts +22 -0
  422. package/dist/services/sync/entity-import.d.ts.map +1 -0
  423. package/dist/services/sync/entity-import.js +15 -0
  424. package/dist/services/sync/entity-import.js.map +1 -0
  425. package/dist/services/sync/file-utils.d.ts +23 -0
  426. package/dist/services/sync/file-utils.d.ts.map +1 -0
  427. package/dist/services/sync/file-utils.js +97 -0
  428. package/dist/services/sync/file-utils.js.map +1 -0
  429. package/dist/services/sync/index.d.ts +3 -0
  430. package/dist/services/sync/index.d.ts.map +1 -0
  431. package/dist/services/sync/index.js +2 -0
  432. package/dist/services/sync/index.js.map +1 -0
  433. package/dist/services/sync/service.d.ts +2 -0
  434. package/dist/services/sync/service.d.ts.map +1 -0
  435. package/dist/services/sync/service.js +2 -0
  436. package/dist/services/sync/service.js.map +1 -0
  437. package/dist/services/sync/sync-helpers.d.ts +54 -0
  438. package/dist/services/sync/sync-helpers.d.ts.map +1 -0
  439. package/dist/services/sync/sync-helpers.js +245 -0
  440. package/dist/services/sync/sync-helpers.js.map +1 -0
  441. package/dist/services/sync/types.d.ts +104 -0
  442. package/dist/services/sync/types.d.ts.map +1 -0
  443. package/dist/services/sync/types.js +5 -0
  444. package/dist/services/sync/types.js.map +1 -0
  445. package/dist/services/task-service/internals.d.ts +36 -0
  446. package/dist/services/task-service/internals.d.ts.map +1 -0
  447. package/dist/services/task-service/internals.js +270 -0
  448. package/dist/services/task-service/internals.js.map +1 -0
  449. package/dist/services/task-service.d.ts +2 -1
  450. package/dist/services/task-service.d.ts.map +1 -1
  451. package/dist/services/task-service.js +28 -236
  452. package/dist/services/task-service.js.map +1 -1
  453. package/dist/services/tracing-service.d.ts +2 -2
  454. package/dist/services/tracing-service.d.ts.map +1 -1
  455. package/dist/services/tracing-service.js.map +1 -1
  456. package/dist/services/transcript-adapter.d.ts +6 -6
  457. package/dist/services/transcript-adapter.d.ts.map +1 -1
  458. package/dist/services/transcript-adapter.js +3 -8
  459. package/dist/services/transcript-adapter.js.map +1 -1
  460. package/dist/services/validation-service.d.ts +8 -8
  461. package/dist/services/validation-service.d.ts.map +1 -1
  462. package/dist/services/validation-service.js.map +1 -1
  463. package/dist/services/verify-service.d.ts +2 -2
  464. package/dist/services/verify-service.d.ts.map +1 -1
  465. package/dist/services/verify-service.js.map +1 -1
  466. package/dist/services/worker-process/runtime.d.ts +51 -0
  467. package/dist/services/worker-process/runtime.d.ts.map +1 -0
  468. package/dist/services/worker-process/runtime.js +150 -0
  469. package/dist/services/worker-process/runtime.js.map +1 -0
  470. package/dist/services/worker-process.d.ts +3 -18
  471. package/dist/services/worker-process.d.ts.map +1 -1
  472. package/dist/services/worker-process.js +3 -173
  473. package/dist/services/worker-process.js.map +1 -1
  474. package/dist/services/worker-service.d.ts +6 -6
  475. package/dist/services/worker-service.d.ts.map +1 -1
  476. package/dist/services/worker-service.js.map +1 -1
  477. package/dist/utils/db-result.d.ts +14 -0
  478. package/dist/utils/db-result.d.ts.map +1 -0
  479. package/dist/utils/db-result.js +37 -0
  480. package/dist/utils/db-result.js.map +1 -0
  481. package/dist/utils/doc-renderer.d.ts +10 -10
  482. package/dist/utils/doc-renderer.d.ts.map +1 -1
  483. package/dist/utils/doc-renderer.js.map +1 -1
  484. package/dist/utils/ears-validator.d.ts +2 -2
  485. package/dist/utils/ears-validator.d.ts.map +1 -1
  486. package/dist/utils/ears-validator.js.map +1 -1
  487. package/dist/utils/file-path.d.ts +27 -0
  488. package/dist/utils/file-path.d.ts.map +1 -0
  489. package/dist/utils/file-path.js +77 -0
  490. package/dist/utils/file-path.js.map +1 -0
  491. package/dist/utils/glob.d.ts +2 -11
  492. package/dist/utils/glob.d.ts.map +1 -1
  493. package/dist/utils/glob.js +22 -13
  494. package/dist/utils/glob.js.map +1 -1
  495. package/dist/utils/spec-discovery.d.ts +34 -0
  496. package/dist/utils/spec-discovery.d.ts.map +1 -0
  497. package/dist/utils/spec-discovery.js +344 -0
  498. package/dist/utils/spec-discovery.js.map +1 -0
  499. package/dist/utils/toml-config.d.ts +7 -2
  500. package/dist/utils/toml-config.d.ts.map +1 -1
  501. package/dist/utils/toml-config.js +106 -2
  502. package/dist/utils/toml-config.js.map +1 -1
  503. package/dist/utils/ulid.d.ts +8 -0
  504. package/dist/utils/ulid.d.ts.map +1 -0
  505. package/dist/utils/ulid.js +30 -0
  506. package/dist/utils/ulid.js.map +1 -0
  507. package/dist/worker/hooks.d.ts +10 -10
  508. package/dist/worker/hooks.d.ts.map +1 -1
  509. package/dist/worker/run-worker.d.ts.map +1 -1
  510. package/dist/worker/run-worker.js.map +1 -1
  511. package/migrations/033_sync_events.sql +33 -0
  512. package/migrations/034_spec_test_traceability.sql +51 -0
  513. package/migrations/035_anchor_schema_repair.sql +82 -0
  514. package/package.json +3 -2
  515. package/dist/services/sync-service.d.ts +0 -247
  516. package/dist/services/sync-service.d.ts.map +0 -1
  517. package/dist/services/sync-service.js.map +0 -1
@@ -1,23 +1,31 @@
1
+ // @ts-nocheck
1
2
  import { Context, Effect, Exit, Layer, Schema } from "effect";
2
- import { writeFile, rename, readFile, stat, mkdir, access } from "node:fs/promises";
3
+ import { writeFile, rename, readFile, mkdir, access, appendFile, readdir, rm, stat } from "node:fs/promises";
3
4
  import { readFileSync, writeFileSync, mkdirSync, renameSync, unlinkSync } from "node:fs";
4
5
  import { createHash } from "node:crypto";
5
- import { dirname, resolve, sep } from "node:path";
6
- import { DatabaseError, ValidationError } from "../errors.js";
7
- import { SqliteClient } from "../db.js";
8
- import { TaskService } from "./task-service.js";
9
- import { DependencyRepository } from "../repo/dep-repo.js";
10
- import { LearningRepository } from "../repo/learning-repo.js";
11
- import { FileLearningRepository } from "../repo/file-learning-repo.js";
12
- import { AttemptRepository } from "../repo/attempt-repo.js";
13
- import { PinRepository } from "../repo/pin-repo.js";
14
- import { syncBlocks } from "../utils/pin-file.js";
15
- import { AnchorRepository } from "../repo/anchor-repo.js";
16
- import { EdgeRepository } from "../repo/edge-repo.js";
17
- import { DocRepository } from "../repo/doc-repo.js";
18
- import { LearningUpsertOp as LearningUpsertOpSchema, FileLearningUpsertOp as FileLearningUpsertOpSchema, AttemptUpsertOp as AttemptUpsertOpSchema, PinUpsertOp as PinUpsertOpSchema, AnchorUpsertOp as AnchorUpsertOpSchema, EdgeUpsertOp as EdgeUpsertOpSchema, DocUpsertOp as DocUpsertOpSchema, DocLinkUpsertOp as DocLinkUpsertOpSchema, TaskDocLinkUpsertOp as TaskDocLinkUpsertOpSchema, InvariantUpsertOp as InvariantUpsertOpSchema, LabelUpsertOp as LabelUpsertOpSchema, LabelAssignmentUpsertOp as LabelAssignmentUpsertOpSchema, SyncOperation as SyncOperationSchema } from "../schemas/sync.js";
6
+ import { dirname, resolve, basename } from "node:path";
7
+ import { DatabaseError, ValidationError } from "../../errors.js";
8
+ import { SqliteClient } from "../../db.js";
9
+ import { TaskService } from "../../services/task-service.js";
10
+ import { StreamService } from "../../services/stream-service.js";
11
+ import { DependencyRepository } from "../../repo/dep-repo.js";
12
+ import { LearningRepository } from "../../repo/learning-repo.js";
13
+ import { FileLearningRepository } from "../../repo/file-learning-repo.js";
14
+ import { AttemptRepository } from "../../repo/attempt-repo.js";
15
+ import { PinRepository } from "../../repo/pin-repo.js";
16
+ import { syncBlocks } from "../../utils/pin-file.js";
17
+ import { resolvePathWithin } from "../../utils/file-path.js";
18
+ import { AnchorRepository } from "../../repo/anchor-repo.js";
19
+ import { EdgeRepository } from "../../repo/edge-repo.js";
20
+ import { DocRepository } from "../../repo/doc-repo.js";
21
+ import { LearningUpsertOp as LearningUpsertOpSchema, FileLearningUpsertOp as FileLearningUpsertOpSchema, AttemptUpsertOp as AttemptUpsertOpSchema, PinUpsertOp as PinUpsertOpSchema, AnchorUpsertOp as AnchorUpsertOpSchema, EdgeUpsertOp as EdgeUpsertOpSchema, DocUpsertOp as DocUpsertOpSchema, DocLinkUpsertOp as DocLinkUpsertOpSchema, TaskDocLinkUpsertOp as TaskDocLinkUpsertOpSchema, InvariantUpsertOp as InvariantUpsertOpSchema, LabelUpsertOp as LabelUpsertOpSchema, LabelAssignmentUpsertOp as LabelAssignmentUpsertOpSchema, TaskSyncOperation as TaskSyncOperationSchema } from "../../schemas/sync.js";
22
+ import { SyncEventEnvelopeSchema } from "../../schemas/sync-events.js";
23
+ import { generateUlid } from "../../utils/ulid.js";
24
+ import { applyEntityImportContract } from "../../services/sync/entity-import.js";
25
+ import { applyEntityExportContract } from "../../services/sync/entity-export.js";
26
+ import { importEntityJsonl } from "../../services/sync/file-utils.js";
19
27
  /**
20
- * SyncService provides JSONL-based export/import for git-tracked task syncing.
28
+ * SyncService provides stream-event export/import for git-tracked task syncing.
21
29
  * See DD-009 for full specification.
22
30
  */
23
31
  export class SyncService extends Context.Tag("SyncService")() {
@@ -31,6 +39,11 @@ const DEFAULT_ANCHORS_JSONL_PATH = ".tx/anchors.jsonl";
31
39
  const DEFAULT_EDGES_JSONL_PATH = ".tx/edges.jsonl";
32
40
  const DEFAULT_DOCS_JSONL_PATH = ".tx/docs.jsonl";
33
41
  const DEFAULT_LABELS_JSONL_PATH = ".tx/labels.jsonl";
42
+ const DEFAULT_STREAMS_DIR = ".tx/streams";
43
+ const DEFAULT_SYNC_WATERMARK_KEY = "last_import_at";
44
+ const FULL_EXPORT_LIMIT = 1_000_000_000;
45
+ const MAX_SYNC_JSONL_FILE_BYTES = 64 * 1024 * 1024;
46
+ const MAX_STREAM_IMPORT_EVENTS = 250_000;
34
47
  /**
35
48
  * Compute a content hash for cross-machine dedup.
36
49
  * Entities with auto-increment IDs use this to identify duplicates.
@@ -45,6 +58,101 @@ const sqliteToIso = (s) => {
45
58
  return s.replace(" ", "T") + ".000Z";
46
59
  return s;
47
60
  };
61
+ const V1_TO_SYNC_TYPE = {
62
+ upsert: "task.upsert",
63
+ delete: "task.delete",
64
+ dep_add: "dep.add",
65
+ dep_remove: "dep.remove",
66
+ learning_upsert: "learning.upsert",
67
+ learning_delete: "learning.delete",
68
+ file_learning_upsert: "file_learning.upsert",
69
+ file_learning_delete: "file_learning.delete",
70
+ attempt_upsert: "attempt.upsert",
71
+ pin_upsert: "pin.upsert",
72
+ pin_delete: "pin.delete",
73
+ anchor_upsert: "anchor.upsert",
74
+ anchor_delete: "anchor.delete",
75
+ edge_upsert: "edge.upsert",
76
+ edge_delete: "edge.delete",
77
+ doc_upsert: "doc.upsert",
78
+ doc_delete: "doc.delete",
79
+ doc_link_upsert: "doc_link.upsert",
80
+ task_doc_link_upsert: "task_doc_link.upsert",
81
+ invariant_upsert: "invariant.upsert",
82
+ label_upsert: "label.upsert",
83
+ label_assignment_upsert: "label_assignment.upsert",
84
+ };
85
+ const entityIdFromV1Op = (op) => {
86
+ const kind = typeof op.op === "string" ? op.op : "";
87
+ switch (kind) {
88
+ case "dep_add":
89
+ case "dep_remove":
90
+ return `${String(op.blockerId)}:${String(op.blockedId)}`;
91
+ case "learning_upsert":
92
+ case "learning_delete":
93
+ case "file_learning_upsert":
94
+ case "file_learning_delete":
95
+ case "attempt_upsert":
96
+ case "anchor_upsert":
97
+ case "anchor_delete":
98
+ case "edge_upsert":
99
+ case "edge_delete":
100
+ case "doc_upsert":
101
+ case "doc_delete":
102
+ case "label_upsert":
103
+ return String(op.contentHash ?? op.id ?? "");
104
+ case "doc_link_upsert":
105
+ return String(op.contentHash ?? op.id ?? "");
106
+ case "task_doc_link_upsert":
107
+ return String(op.contentHash ?? op.id ?? "");
108
+ case "label_assignment_upsert":
109
+ return String(op.contentHash ?? "");
110
+ default:
111
+ return String(op.id ?? op.contentHash ?? "");
112
+ }
113
+ };
114
+ const opToSyncEventType = (op) => {
115
+ const v1Op = typeof op.op === "string" ? op.op : "";
116
+ return V1_TO_SYNC_TYPE[v1Op] ?? null;
117
+ };
118
+ const getTsFromOp = (op) => typeof op.ts === "string" ? op.ts : new Date().toISOString();
119
+ const getEventIdFromOp = (op) => typeof op.eventId === "string" ? op.eventId : (typeof op.__event_id === "string" ? op.__event_id : "");
120
+ const compareOpOrder = (a, b) => {
121
+ const t = getTsFromOp(a).localeCompare(getTsFromOp(b));
122
+ if (t !== 0)
123
+ return t;
124
+ const eventCmp = getEventIdFromOp(a).localeCompare(getEventIdFromOp(b));
125
+ if (eventCmp !== 0)
126
+ return eventCmp;
127
+ return 0;
128
+ };
129
+ const compareSyncOrder = (a, b) => {
130
+ const t = a.ts.localeCompare(b.ts);
131
+ if (t !== 0)
132
+ return t;
133
+ return (a.eventId ?? "").localeCompare(b.eventId ?? "");
134
+ };
135
+ const toSyncEvent = (op, streamId, seq) => {
136
+ const type = opToSyncEventType(op);
137
+ if (!type)
138
+ return null;
139
+ return {
140
+ event_id: generateUlid(),
141
+ stream_id: streamId,
142
+ seq,
143
+ ts: getTsFromOp(op),
144
+ type,
145
+ entity_id: entityIdFromV1Op(op),
146
+ v: 2,
147
+ payload: op,
148
+ };
149
+ };
150
+ const syncEventToV1Op = (event) => {
151
+ if (!event.payload || typeof event.payload !== "object")
152
+ return null;
153
+ const payload = event.payload;
154
+ return typeof payload.op === "string" ? payload : null;
155
+ };
48
156
  /**
49
157
  * Empty entity import result for early returns.
50
158
  */
@@ -58,6 +166,123 @@ const EMPTY_IMPORT_RESULT = {
58
166
  conflicts: 0,
59
167
  dependencies: { added: 0, removed: 0, skipped: 0, failures: [] }
60
168
  };
169
+ const emptyV1Buckets = () => ({
170
+ tasks: [],
171
+ learnings: [],
172
+ fileLearnings: [],
173
+ attempts: [],
174
+ pins: [],
175
+ anchors: [],
176
+ edges: [],
177
+ docs: [],
178
+ labels: [],
179
+ });
180
+ const bucketForOp = (opName) => {
181
+ if (opName === "upsert" || opName === "delete" || opName === "dep_add" || opName === "dep_remove")
182
+ return "tasks";
183
+ if (opName === "learning_upsert" || opName === "learning_delete")
184
+ return "learnings";
185
+ if (opName === "file_learning_upsert" || opName === "file_learning_delete")
186
+ return "fileLearnings";
187
+ if (opName === "attempt_upsert")
188
+ return "attempts";
189
+ if (opName === "pin_upsert" || opName === "pin_delete")
190
+ return "pins";
191
+ if (opName === "anchor_upsert" || opName === "anchor_delete")
192
+ return "anchors";
193
+ if (opName === "edge_upsert" || opName === "edge_delete")
194
+ return "edges";
195
+ if (opName === "doc_upsert" || opName === "doc_delete" || opName === "doc_link_upsert" || opName === "task_doc_link_upsert" || opName === "invariant_upsert")
196
+ return "docs";
197
+ if (opName === "label_upsert" || opName === "label_assignment_upsert")
198
+ return "labels";
199
+ return null;
200
+ };
201
+ const stateCategoryForOp = (opName) => {
202
+ if (opName === "upsert" || opName === "delete")
203
+ return "task";
204
+ if (opName === "dep_add" || opName === "dep_remove")
205
+ return "dep";
206
+ if (opName === "learning_upsert" || opName === "learning_delete")
207
+ return "learning";
208
+ if (opName === "file_learning_upsert" || opName === "file_learning_delete")
209
+ return "file_learning";
210
+ if (opName === "attempt_upsert")
211
+ return "attempt";
212
+ if (opName === "pin_upsert" || opName === "pin_delete")
213
+ return "pin";
214
+ if (opName === "anchor_upsert" || opName === "anchor_delete")
215
+ return "anchor";
216
+ if (opName === "edge_upsert" || opName === "edge_delete")
217
+ return "edge";
218
+ if (opName === "doc_upsert" || opName === "doc_delete")
219
+ return "doc";
220
+ if (opName === "doc_link_upsert")
221
+ return "doc_link";
222
+ if (opName === "task_doc_link_upsert")
223
+ return "task_doc_link";
224
+ if (opName === "invariant_upsert")
225
+ return "invariant";
226
+ if (opName === "label_upsert")
227
+ return "label";
228
+ if (opName === "label_assignment_upsert")
229
+ return "label_assignment";
230
+ return null;
231
+ };
232
+ const stateCategoryForSyncType = (syncType) => {
233
+ if (syncType === "task.upsert" || syncType === "task.delete")
234
+ return "task";
235
+ if (syncType === "dep.add" || syncType === "dep.remove")
236
+ return "dep";
237
+ if (syncType === "learning.upsert" || syncType === "learning.delete")
238
+ return "learning";
239
+ if (syncType === "file_learning.upsert" || syncType === "file_learning.delete")
240
+ return "file_learning";
241
+ if (syncType === "attempt.upsert")
242
+ return "attempt";
243
+ if (syncType === "pin.upsert" || syncType === "pin.delete")
244
+ return "pin";
245
+ if (syncType === "anchor.upsert" || syncType === "anchor.delete")
246
+ return "anchor";
247
+ if (syncType === "edge.upsert" || syncType === "edge.delete")
248
+ return "edge";
249
+ if (syncType === "doc.upsert" || syncType === "doc.delete")
250
+ return "doc";
251
+ if (syncType === "doc_link.upsert")
252
+ return "doc_link";
253
+ if (syncType === "task_doc_link.upsert")
254
+ return "task_doc_link";
255
+ if (syncType === "invariant.upsert")
256
+ return "invariant";
257
+ if (syncType === "label.upsert")
258
+ return "label";
259
+ if (syncType === "label_assignment.upsert")
260
+ return "label_assignment";
261
+ return null;
262
+ };
263
+ const isRemovalSyncType = (syncType) => syncType === "task.delete" ||
264
+ syncType === "dep.remove" ||
265
+ syncType === "learning.delete" ||
266
+ syncType === "file_learning.delete" ||
267
+ syncType === "pin.delete" ||
268
+ syncType === "anchor.delete" ||
269
+ syncType === "edge.delete" ||
270
+ syncType === "doc.delete";
271
+ const stateKeyForSyncEvent = (syncType, entityId) => {
272
+ if (typeof entityId !== "string" || entityId.length === 0)
273
+ return null;
274
+ const category = stateCategoryForSyncType(syncType);
275
+ if (!category)
276
+ return null;
277
+ return `${category}:${entityId}`;
278
+ };
279
+ const stateKeyForOp = (op) => {
280
+ const opName = typeof op.op === "string" ? op.op : "";
281
+ const category = stateCategoryForOp(opName);
282
+ if (!category)
283
+ return null;
284
+ return `${category}:${entityIdFromV1Op(op)}`;
285
+ };
61
286
  /**
62
287
  * Topologically sort task operations so parents are processed before children.
63
288
  * This ensures foreign key constraints are satisfied during import.
@@ -402,64 +627,30 @@ const labelAssignmentToUpsertOp = (row, labelNameMap) => {
402
627
  };
403
628
  };
404
629
  /**
405
- * Generic helper: parse a JSONL file, validate with schema, dedup by contentHash,
406
- * filter against existing entities, and insert new ones via caller-provided batch function.
407
- * Returns EntityImportResult with imported/skipped counts.
630
+ * Check if a file exists without blocking the event loop.
408
631
  */
409
- const importEntityJsonl = (filePath, schema, existingHashes, insertBatch) => Effect.gen(function* () {
410
- const importFileExists = yield* fileExists(filePath);
411
- if (!importFileExists) {
412
- return EMPTY_ENTITY_IMPORT_RESULT;
632
+ const fileExists = (filePath) => Effect.promise(() => access(filePath).then(() => true).catch(() => false));
633
+ const readUtf8FileWithLimit = (filePath, maxBytes = MAX_SYNC_JSONL_FILE_BYTES) => Effect.gen(function* () {
634
+ const fileStats = yield* Effect.tryPromise({
635
+ try: () => stat(filePath),
636
+ catch: (cause) => new DatabaseError({ cause })
637
+ });
638
+ if (fileStats.size > maxBytes) {
639
+ return yield* Effect.fail(new ValidationError({
640
+ reason: `Sync import file exceeds ${maxBytes} bytes: ${filePath}`
641
+ }));
413
642
  }
414
643
  const content = yield* Effect.tryPromise({
415
644
  try: () => readFile(filePath, "utf-8"),
416
645
  catch: (cause) => new DatabaseError({ cause })
417
646
  });
418
- const lines = content.trim().split("\n").filter(Boolean);
419
- if (lines.length === 0) {
420
- return EMPTY_ENTITY_IMPORT_RESULT;
421
- }
422
- // Parse and dedup by contentHash (keep latest by timestamp)
423
- const states = new Map();
424
- for (const line of lines) {
425
- const parsed = yield* Effect.try({
426
- try: () => JSON.parse(line),
427
- catch: (cause) => new ValidationError({ reason: `Invalid JSON: ${cause}` })
428
- });
429
- const op = yield* Effect.try({
430
- try: () => Schema.decodeUnknownSync(schema)(parsed),
431
- catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
432
- });
433
- const existing = states.get(op.contentHash);
434
- if (!existing || op.ts > existing.ts) {
435
- states.set(op.contentHash, op);
436
- }
647
+ if (Buffer.byteLength(content, "utf8") > maxBytes) {
648
+ return yield* Effect.fail(new ValidationError({
649
+ reason: `Sync import file exceeds ${maxBytes} bytes: ${filePath}`
650
+ }));
437
651
  }
438
- // Filter to new entities only (not already in DB)
439
- const newOps = [];
440
- let skipped = 0;
441
- for (const op of states.values()) {
442
- if (existingHashes.has(op.contentHash)) {
443
- skipped++;
444
- }
445
- else {
446
- newOps.push(op);
447
- }
448
- }
449
- if (newOps.length === 0) {
450
- return { imported: 0, skipped };
451
- }
452
- // Insert via caller-provided batch function (handles transaction)
453
- const imported = yield* Effect.try({
454
- try: () => insertBatch(newOps),
455
- catch: (cause) => new DatabaseError({ cause })
456
- });
457
- return { imported, skipped };
652
+ return content;
458
653
  });
459
- /**
460
- * Check if a file exists without blocking the event loop.
461
- */
462
- const fileExists = (filePath) => Effect.promise(() => access(filePath).then(() => true).catch(() => false));
463
654
  /**
464
655
  * Write content to file atomically using temp file + rename.
465
656
  * Uses async fs operations to avoid blocking the event loop.
@@ -476,6 +667,7 @@ const atomicWrite = (filePath, content) => Effect.tryPromise({
476
667
  });
477
668
  export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* () {
478
669
  const taskService = yield* TaskService;
670
+ const streamService = yield* StreamService;
479
671
  const depRepo = yield* DependencyRepository;
480
672
  const db = yield* SqliteClient;
481
673
  const learningRepo = yield* LearningRepository;
@@ -500,29 +692,336 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
500
692
  },
501
693
  catch: (cause) => new DatabaseError({ cause })
502
694
  });
503
- const syncService = {
504
- export: (path) => Effect.gen(function* () {
505
- const filePath = resolve(path ?? DEFAULT_JSONL_PATH);
506
- // Get all tasks and dependencies (explicit high limit for full export)
507
- const tasks = yield* taskService.list();
508
- const deps = yield* depRepo.getAll(100_000);
509
- // Convert to sync operations
510
- const taskOps = tasks.map(taskToUpsertOp);
511
- const depOps = deps.map(depToAddOp);
512
- // Combine and sort by timestamp
513
- const allOps = [...taskOps, ...depOps].sort((a, b) => a.ts.localeCompare(b.ts));
514
- // Convert to JSONL format (one JSON object per line)
515
- const jsonl = allOps.map(op => JSON.stringify(op)).join("\n");
516
- // Write atomically
517
- yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
518
- // Record export time
519
- yield* setConfig("last_export", new Date().toISOString());
520
- return {
521
- opCount: allOps.length,
522
- path: filePath
523
- };
524
- }),
525
- import: (path) => Effect.gen(function* () {
695
+ const setWatermark = (key, value) => Effect.try({
696
+ try: () => {
697
+ db.prepare("INSERT OR REPLACE INTO sync_watermark (key, value) VALUES (?, ?)").run(key, value);
698
+ },
699
+ catch: (cause) => new DatabaseError({ cause })
700
+ });
701
+ const touchStreamProgress = (streamId, lastSeq, lastEventAt) => {
702
+ db.prepare(`INSERT INTO sync_streams (stream_id, created_at, last_seq, last_event_at)
703
+ VALUES (?, datetime('now'), ?, ?)
704
+ ON CONFLICT(stream_id) DO UPDATE SET
705
+ last_seq = CASE
706
+ WHEN excluded.last_seq > sync_streams.last_seq THEN excluded.last_seq
707
+ ELSE sync_streams.last_seq
708
+ END,
709
+ last_event_at = CASE
710
+ WHEN excluded.last_event_at IS NOT NULL THEN excluded.last_event_at
711
+ ELSE sync_streams.last_event_at
712
+ END`).run(streamId, lastSeq, lastEventAt ?? null);
713
+ };
714
+ const runWriteTransaction = db.transaction((body) => body());
715
+ const withWriteTransaction = (body) => {
716
+ if (db.inTransaction) {
717
+ return runWriteTransaction(body);
718
+ }
719
+ return runWriteTransaction.immediate(body);
720
+ };
721
+ const readJsonlRecords = (filePath) => Effect.gen(function* () {
722
+ const exists = yield* fileExists(filePath);
723
+ if (!exists)
724
+ return [];
725
+ const content = yield* readUtf8FileWithLimit(filePath);
726
+ const lines = content.trim().split("\n").filter(Boolean);
727
+ const records = [];
728
+ for (const line of lines) {
729
+ const parsed = yield* Effect.try({
730
+ try: () => JSON.parse(line),
731
+ catch: (cause) => new ValidationError({ reason: `Invalid JSON: ${cause}` })
732
+ });
733
+ if (!parsed || typeof parsed !== "object") {
734
+ return yield* Effect.fail(new ValidationError({ reason: "JSONL line is not an object" }));
735
+ }
736
+ records.push(parsed);
737
+ }
738
+ return records;
739
+ });
740
+ const loadEventsFromStreams = (mode) => Effect.gen(function* () {
741
+ const streamsRoot = resolve(DEFAULT_STREAMS_DIR);
742
+ const rootExists = yield* fileExists(streamsRoot);
743
+ if (!rootExists) {
744
+ return { events: [], streamCount: 0, maxSeqByStream: new Map() };
745
+ }
746
+ const entries = yield* Effect.tryPromise({
747
+ try: () => readdir(streamsRoot, { withFileTypes: true }),
748
+ catch: (cause) => new DatabaseError({ cause })
749
+ });
750
+ const streamDirs = entries.filter(entry => entry.isDirectory());
751
+ const events = [];
752
+ const maxSeqByStream = new Map();
753
+ for (const dir of streamDirs) {
754
+ const streamId = dir.name;
755
+ const knownLastSeq = mode === "incremental"
756
+ ? (yield* Effect.try({
757
+ try: () => db.prepare("SELECT last_seq FROM sync_streams WHERE stream_id = ?").get(streamId)?.last_seq ?? 0,
758
+ catch: (cause) => new DatabaseError({ cause })
759
+ }))
760
+ : 0;
761
+ const dirPath = resolve(streamsRoot, streamId);
762
+ const files = yield* Effect.tryPromise({
763
+ try: () => readdir(dirPath),
764
+ catch: (cause) => new DatabaseError({ cause })
765
+ });
766
+ const eventFiles = files
767
+ .filter(name => /^events-\d{4}-\d{2}-\d{2}\.jsonl$/.test(name))
768
+ .sort();
769
+ for (const file of eventFiles) {
770
+ const filePath = resolve(dirPath, file);
771
+ const lines = (yield* readJsonlRecords(filePath));
772
+ for (const line of lines) {
773
+ const event = yield* Effect.try({
774
+ try: () => Schema.decodeUnknownSync(SyncEventEnvelopeSchema)(line),
775
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
776
+ });
777
+ if (event.stream_id !== streamId) {
778
+ return yield* Effect.fail(new ValidationError({
779
+ reason: `Event stream mismatch in ${basename(filePath)}: expected ${streamId}, got ${event.stream_id}`
780
+ }));
781
+ }
782
+ if (event.seq <= knownLastSeq)
783
+ continue;
784
+ if (events.length >= MAX_STREAM_IMPORT_EVENTS) {
785
+ return yield* Effect.fail(new ValidationError({
786
+ reason: `Stream import exceeds ${MAX_STREAM_IMPORT_EVENTS} events; split stream files or run incremental imports more frequently`
787
+ }));
788
+ }
789
+ events.push(event);
790
+ const currentMax = maxSeqByStream.get(streamId) ?? 0;
791
+ if (event.seq > currentMax)
792
+ maxSeqByStream.set(streamId, event.seq);
793
+ }
794
+ }
795
+ }
796
+ events.sort((a, b) => {
797
+ const t = a.ts.localeCompare(b.ts);
798
+ return t !== 0 ? t : a.event_id.localeCompare(b.event_id);
799
+ });
800
+ return {
801
+ events,
802
+ streamCount: streamDirs.length,
803
+ maxSeqByStream
804
+ };
805
+ });
806
+ const bucketEventsToV1Ops = (events) => {
807
+ const buckets = emptyV1Buckets();
808
+ for (const event of events) {
809
+ const base = syncEventToV1Op(event);
810
+ if (!base)
811
+ continue;
812
+ const op = { ...base, eventId: event.event_id };
813
+ const name = typeof op.op === "string" ? op.op : "";
814
+ const bucket = bucketForOp(name);
815
+ if (!bucket)
816
+ continue;
817
+ buckets[bucket].push(op);
818
+ }
819
+ const sortByTs = (ops) => ops.sort(compareOpOrder);
820
+ sortByTs(buckets.tasks);
821
+ sortByTs(buckets.learnings);
822
+ sortByTs(buckets.fileLearnings);
823
+ sortByTs(buckets.attempts);
824
+ sortByTs(buckets.pins);
825
+ sortByTs(buckets.anchors);
826
+ sortByTs(buckets.edges);
827
+ sortByTs(buckets.docs);
828
+ sortByTs(buckets.labels);
829
+ return buckets;
830
+ };
831
+ const writeBucketsToTempFiles = (buckets) => Effect.gen(function* () {
832
+ const dir = resolve(".tx", ".sync-temp", `${Date.now()}-${Math.random().toString(36).slice(2)}`);
833
+ yield* Effect.tryPromise({
834
+ try: () => mkdir(dir, { recursive: true }),
835
+ catch: (cause) => new DatabaseError({ cause })
836
+ });
837
+ const writeBucket = (name, ops) => Effect.gen(function* () {
838
+ const filePath = resolve(dir, name);
839
+ if (ops.length === 0) {
840
+ yield* atomicWrite(filePath, "");
841
+ }
842
+ else {
843
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
844
+ yield* atomicWrite(filePath, `${jsonl}\n`);
845
+ }
846
+ return filePath;
847
+ });
848
+ const tasksPath = yield* writeBucket("tasks.jsonl", buckets.tasks);
849
+ const learningsPath = yield* writeBucket("learnings.jsonl", buckets.learnings);
850
+ const fileLearningsPath = yield* writeBucket("file-learnings.jsonl", buckets.fileLearnings);
851
+ const attemptsPath = yield* writeBucket("attempts.jsonl", buckets.attempts);
852
+ const pinsPath = yield* writeBucket("pins.jsonl", buckets.pins);
853
+ const anchorsPath = yield* writeBucket("anchors.jsonl", buckets.anchors);
854
+ const edgesPath = yield* writeBucket("edges.jsonl", buckets.edges);
855
+ const docsPath = yield* writeBucket("docs.jsonl", buckets.docs);
856
+ const labelsPath = yield* writeBucket("labels.jsonl", buckets.labels);
857
+ return {
858
+ dir,
859
+ tasksPath,
860
+ learningsPath,
861
+ fileLearningsPath,
862
+ attemptsPath,
863
+ pinsPath,
864
+ anchorsPath,
865
+ edgesPath,
866
+ docsPath,
867
+ labelsPath,
868
+ };
869
+ });
870
+ const clearMaterializedTables = () => Effect.try({
871
+ try: () => {
872
+ withWriteTransaction(() => {
873
+ db.prepare("DELETE FROM task_label_assignments").run();
874
+ db.prepare("DELETE FROM task_labels").run();
875
+ db.prepare("DELETE FROM invariant_checks").run();
876
+ db.prepare("DELETE FROM invariants").run();
877
+ db.prepare("DELETE FROM task_doc_links").run();
878
+ db.prepare("DELETE FROM doc_links").run();
879
+ db.prepare("DELETE FROM docs").run();
880
+ db.prepare("DELETE FROM learning_edges").run();
881
+ db.prepare("DELETE FROM learning_anchors").run();
882
+ db.prepare("DELETE FROM context_pins").run();
883
+ db.prepare("DELETE FROM attempts").run();
884
+ db.prepare("DELETE FROM file_learnings").run();
885
+ db.prepare("DELETE FROM learnings").run();
886
+ db.prepare("DELETE FROM task_dependencies").run();
887
+ db.prepare("DELETE FROM tasks").run();
888
+ });
889
+ },
890
+ catch: (cause) => new DatabaseError({ cause })
891
+ });
892
+ const cleanupTempDir = (dir) => Effect.promise(() => rm(dir, { recursive: true, force: true }).then(() => undefined).catch(() => undefined));
893
+ const collectCurrentOpsForSync = () => Effect.gen(function* () {
894
+ const tasks = yield* taskService.list();
895
+ const deps = yield* depRepo.getAll(100_000);
896
+ const taskOps = tasks.map(taskToUpsertOp);
897
+ const depOps = deps.map(depToAddOp);
898
+ const learnings = yield* learningRepo.findAll(FULL_EXPORT_LIMIT);
899
+ const learningOps = learnings.map(learningToUpsertOp);
900
+ const learningHashMap = new Map();
901
+ for (const l of learnings) {
902
+ learningHashMap.set(l.id, contentHash(l.content, l.sourceType));
903
+ }
904
+ const fileLearnings = yield* fileLearningRepo.findAll(FULL_EXPORT_LIMIT);
905
+ const fileLearningOps = fileLearnings.map(fileLearningToUpsertOp);
906
+ const attempts = yield* attemptRepo.findAll();
907
+ const attemptOps = attempts.map(attemptToUpsertOp);
908
+ const pins = yield* pinRepo.findAll();
909
+ const pinOps = [...pins].map(pinToUpsertOp);
910
+ const anchors = yield* anchorRepo.findAll(FULL_EXPORT_LIMIT);
911
+ const anchorOps = anchors.map(anchor => anchorToUpsertOp(anchor, learningHashMap));
912
+ const edges = yield* edgeRepo.findAll(FULL_EXPORT_LIMIT);
913
+ const edgeOps = edges
914
+ .filter(edge => edge.invalidatedAt === null)
915
+ .map(edgeToUpsertOp);
916
+ const docs = yield* docRepo.findAll();
917
+ const docKeyMap = new Map();
918
+ for (const d of docs) {
919
+ docKeyMap.set(d.id, `${d.name}:${d.version}`);
920
+ }
921
+ const docOps = docs.map(d => docToUpsertOp(d, docKeyMap));
922
+ const docLinks = yield* docRepo.getAllLinks();
923
+ const docLinkOps = docLinks
924
+ .map(link => docLinkToUpsertOp(link, docKeyMap))
925
+ .filter((op) => op !== null);
926
+ const taskDocLinkRows = yield* Effect.try({
927
+ try: () => db.prepare("SELECT * FROM task_doc_links").all(),
928
+ catch: (cause) => new DatabaseError({ cause })
929
+ });
930
+ const taskDocLinkOps = taskDocLinkRows
931
+ .map(row => taskDocLinkToUpsertOp({ id: row.id, taskId: row.task_id, docId: row.doc_id, linkType: row.link_type, createdAt: new Date(row.created_at) }, docKeyMap))
932
+ .filter((op) => op !== null);
933
+ const invariants = yield* docRepo.findInvariants();
934
+ const invariantOps = invariants
935
+ .map(inv => invariantToUpsertOp(inv, docKeyMap))
936
+ .filter((op) => op !== null);
937
+ const labelRows = yield* Effect.try({
938
+ try: () => db.prepare("SELECT * FROM task_labels").all(),
939
+ catch: (cause) => new DatabaseError({ cause })
940
+ });
941
+ const labelNameMap = new Map();
942
+ for (const l of labelRows) {
943
+ labelNameMap.set(l.id, l.name);
944
+ }
945
+ const labelOps = labelRows.map(labelRowToUpsertOp);
946
+ const assignmentRows = yield* Effect.try({
947
+ try: () => db.prepare("SELECT * FROM task_label_assignments").all(),
948
+ catch: (cause) => new DatabaseError({ cause })
949
+ });
950
+ const labelAssignmentOps = assignmentRows
951
+ .map(a => labelAssignmentToUpsertOp(a, labelNameMap))
952
+ .filter((op) => op !== null);
953
+ const all = [
954
+ ...taskOps,
955
+ ...depOps,
956
+ ...learningOps,
957
+ ...fileLearningOps,
958
+ ...attemptOps,
959
+ ...pinOps,
960
+ ...anchorOps,
961
+ ...edgeOps,
962
+ ...docOps,
963
+ ...docLinkOps,
964
+ ...taskDocLinkOps,
965
+ ...invariantOps,
966
+ ...labelOps,
967
+ ...labelAssignmentOps,
968
+ ];
969
+ all.sort(compareOpOrder);
970
+ return all;
971
+ });
972
+ const collectLegacyTaskOpsForSync = () => Effect.gen(function* () {
973
+ const tasks = yield* taskService.list();
974
+ const deps = yield* depRepo.getAll(100_000);
975
+ const taskOps = tasks.map(taskToUpsertOp);
976
+ const depOps = deps.map(depToAddOp);
977
+ const all = [...taskOps, ...depOps];
978
+ all.sort(compareOpOrder);
979
+ return all;
980
+ });
981
+ const syncPinsToTargetFiles = () => Effect.gen(function* () {
982
+ const allPins = yield* pinRepo.findAll();
983
+ const targetFiles = yield* pinRepo.getTargetFiles();
984
+ const pinMap = new Map();
985
+ for (const pin of allPins) {
986
+ pinMap.set(pin.id, pin.content);
987
+ }
988
+ yield* Effect.try({
989
+ try: () => {
990
+ for (const targetFile of targetFiles) {
991
+ const projectRoot = process.cwd();
992
+ const resolvedPath = resolvePathWithin(projectRoot, targetFile, {
993
+ useRealpath: true
994
+ });
995
+ if (!resolvedPath)
996
+ continue;
997
+ let fileContent = "";
998
+ try {
999
+ fileContent = readFileSync(resolvedPath, "utf-8");
1000
+ }
1001
+ catch { /* file doesn't exist yet */ }
1002
+ const updated = syncBlocks(fileContent, pinMap);
1003
+ if (updated !== fileContent) {
1004
+ const dir = dirname(resolvedPath);
1005
+ mkdirSync(dir, { recursive: true });
1006
+ const tempPath = `${resolvedPath}.tmp.${Date.now()}.${process.pid}`;
1007
+ writeFileSync(tempPath, updated, "utf-8");
1008
+ try {
1009
+ renameSync(tempPath, resolvedPath);
1010
+ }
1011
+ finally {
1012
+ try {
1013
+ unlinkSync(tempPath);
1014
+ }
1015
+ catch { /* ignore cleanup error */ }
1016
+ }
1017
+ }
1018
+ }
1019
+ },
1020
+ catch: (cause) => new DatabaseError({ cause })
1021
+ });
1022
+ });
1023
+ const syncService = applyEntityImportContract(applyEntityExportContract({
1024
+ importTaskOps: (path) => Effect.gen(function* () {
526
1025
  const filePath = resolve(path ?? DEFAULT_JSONL_PATH);
527
1026
  // Check if file exists (outside transaction - no DB access)
528
1027
  const importFileExists = yield* fileExists(filePath);
@@ -530,10 +1029,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
530
1029
  return EMPTY_IMPORT_RESULT;
531
1030
  }
532
1031
  // Read and parse JSONL file (outside transaction - no DB access)
533
- const content = yield* Effect.tryPromise({
534
- try: () => readFile(filePath, "utf-8"),
535
- catch: (cause) => new DatabaseError({ cause })
536
- });
1032
+ const content = yield* readUtf8FileWithLimit(filePath);
537
1033
  const lines = content.trim().split("\n").filter(Boolean);
538
1034
  if (lines.length === 0) {
539
1035
  return EMPTY_IMPORT_RESULT;
@@ -548,7 +1044,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
548
1044
  catch: (cause) => new ValidationError({ reason: `Invalid JSON: ${cause}` })
549
1045
  });
550
1046
  const op = yield* Effect.try({
551
- try: () => Schema.decodeUnknownSync(SyncOperationSchema)(parsed),
1047
+ try: () => Schema.decodeUnknownSync(TaskSyncOperationSchema)(parsed),
552
1048
  catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
553
1049
  });
554
1050
  ops.push(op);
@@ -559,15 +1055,15 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
559
1055
  for (const op of ops) {
560
1056
  if (op.op === "upsert" || op.op === "delete") {
561
1057
  const existing = taskStates.get(op.id);
562
- if (!existing || op.ts > existing.ts) {
563
- taskStates.set(op.id, { op: op, ts: op.ts });
1058
+ if (!existing || compareSyncOrder(op, existing) > 0) {
1059
+ taskStates.set(op.id, { op: op, ts: op.ts, eventId: op.eventId });
564
1060
  }
565
1061
  }
566
1062
  else if (op.op === "dep_add" || op.op === "dep_remove") {
567
1063
  const key = `${op.blockerId}:${op.blockedId}`;
568
1064
  const existing = depStates.get(key);
569
- if (!existing || op.ts > existing.ts) {
570
- depStates.set(key, { op: op, ts: op.ts });
1065
+ if (!existing || compareSyncOrder(op, existing) > 0) {
1066
+ depStates.set(key, { op: op, ts: op.ts, eventId: op.eventId });
571
1067
  }
572
1068
  }
573
1069
  }
@@ -601,15 +1097,8 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
601
1097
  const checkParentExistsStmt = db.prepare("SELECT 1 FROM tasks WHERE id = ?");
602
1098
  // ALL database operations inside a single transaction for atomicity
603
1099
  // If any operation fails, the entire import is rolled back
604
- return yield* Effect.acquireUseRelease(
605
- // Acquire: Begin transaction
606
- Effect.try({
607
- try: () => db.exec("BEGIN IMMEDIATE"),
608
- catch: (cause) => new DatabaseError({ cause })
609
- }),
610
- // Use: Run all database operations
611
- () => Effect.try({
612
- try: () => {
1100
+ return yield* Effect.try({
1101
+ try: () => withWriteTransaction(() => {
613
1102
  let imported = 0;
614
1103
  let skipped = 0;
615
1104
  let conflicts = 0;
@@ -729,16 +1218,14 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
729
1218
  const details = depFailures
730
1219
  .map(f => `${f.blockerId} -> ${f.blockedId}: ${f.error}`)
731
1220
  .join("; ");
732
- throw new Error(`Sync import rolled back: ${depFailures.length} dependency failure(s): ${details}`);
1221
+ throw new ValidationError({ reason: `Sync import rolled back: ${depFailures.length} dependency failure(s): ${details}` });
733
1222
  }
734
1223
  // Verify file hasn't been modified during import (TOCTOU protection).
735
- // Re-read synchronously while holding the DB write lock (BEGIN IMMEDIATE).
736
- // If another process exported between our initial read and now, the hash
737
- // will differ and we roll back to avoid committing stale data.
1224
+ // Re-read synchronously while holding the DB write lock.
738
1225
  const verifyContent = readFileSync(filePath, "utf-8");
739
1226
  const verifyHash = createHash("sha256").update(verifyContent).digest("hex");
740
1227
  if (verifyHash !== fileHash) {
741
- throw new Error("Sync import rolled back: JSONL file was modified during import (concurrent export detected). Retry the import.");
1228
+ throw new ValidationError({ reason: "Sync import rolled back: JSONL file was modified during import (concurrent export detected). Retry the import." });
742
1229
  }
743
1230
  // Record import time
744
1231
  setConfigStmt.run("last_import", new Date().toISOString());
@@ -753,153 +1240,76 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
753
1240
  failures: depFailures
754
1241
  }
755
1242
  };
756
- },
757
- catch: (cause) => new DatabaseError({ cause })
758
- }),
759
- // Release: Commit on success, rollback on failure
760
- (_, exit) => Effect.sync(() => {
761
- if (Exit.isSuccess(exit)) {
762
- try {
763
- db.exec("COMMIT");
764
- }
765
- catch {
766
- // COMMIT failed — roll back to prevent a stuck open transaction
767
- try {
768
- db.exec("ROLLBACK");
769
- }
770
- catch { /* already rolled back */ }
771
- }
772
- }
773
- else {
774
- try {
775
- db.exec("ROLLBACK");
776
- }
777
- catch { /* no active transaction */ }
778
- }
779
- }));
1243
+ }),
1244
+ catch: (cause) => cause instanceof ValidationError ? cause : new DatabaseError({ cause })
1245
+ });
780
1246
  }),
781
1247
  status: () => Effect.gen(function* () {
782
- const filePath = resolve(DEFAULT_JSONL_PATH);
783
- // Count tasks in database (SQL COUNT instead of loading all rows)
784
1248
  const dbTaskCount = yield* taskService.count();
785
- // Count dependencies in database (SQL COUNT instead of loading all rows)
786
- const dbDepCount = yield* Effect.try({
1249
+ const eventOpCount = yield* Effect.try({
787
1250
  try: () => {
788
- const row = db.prepare("SELECT COUNT(*) as cnt FROM task_dependencies").get();
1251
+ const row = db.prepare("SELECT COUNT(*) as cnt FROM sync_events").get();
789
1252
  return row.cnt;
790
1253
  },
791
1254
  catch: (cause) => new DatabaseError({ cause })
792
1255
  });
793
- // Count operations in JSONL file and get file info
794
- let jsonlOpCount = 0;
795
- let jsonlTaskCount = 0;
796
- let jsonlDepCount = 0;
797
- let lastExport = null;
798
- const jsonlFileExists = yield* fileExists(filePath);
799
- if (jsonlFileExists) {
800
- // Get file modification time as lastExport
801
- const stats = yield* Effect.tryPromise({
802
- try: () => stat(filePath),
803
- catch: (cause) => new DatabaseError({ cause })
804
- });
805
- lastExport = stats.mtime;
806
- // Count non-empty lines (each line is one operation)
807
- const content = yield* Effect.tryPromise({
808
- try: () => readFile(filePath, "utf-8"),
809
- catch: (cause) => new DatabaseError({ cause })
810
- });
811
- const lines = content.trim().split("\n").filter(Boolean);
812
- jsonlOpCount = lines.length;
813
- // Parse JSONL to count EFFECTIVE task and dependency states
814
- // After git merges, the file may have multiple operations for the same entity
815
- // We need to deduplicate by ID and track the latest operation (timestamp wins)
816
- // to get accurate counts that match what the DB state should be after import
817
- const taskStates = new Map();
818
- const depStates = new Map();
819
- for (const line of lines) {
820
- try {
821
- const op = JSON.parse(line);
822
- if (op.op === "upsert" || op.op === "delete") {
823
- const existing = taskStates.get(op.id);
824
- if (!existing || op.ts > existing.ts) {
825
- taskStates.set(op.id, { op: op.op, ts: op.ts });
826
- }
827
- }
828
- else if (op.op === "dep_add" || op.op === "dep_remove") {
829
- const key = `${op.blockerId}:${op.blockedId}`;
830
- const existing = depStates.get(key);
831
- if (!existing || op.ts > existing.ts) {
832
- depStates.set(key, { op: op.op, ts: op.ts });
833
- }
834
- }
835
- }
836
- catch {
837
- // Skip malformed lines for counting purposes
838
- }
839
- }
840
- // Count only entities whose latest operation is an "add" operation
841
- // (upsert for tasks, dep_add for dependencies)
842
- for (const state of taskStates.values()) {
843
- if (state.op === "upsert") {
844
- jsonlTaskCount++;
845
- }
846
- }
847
- for (const state of depStates.values()) {
848
- if (state.op === "dep_add") {
849
- jsonlDepCount++;
850
- }
851
- }
852
- }
853
1256
  // Get last export/import timestamps from config
854
1257
  const lastExportConfig = yield* getConfig("last_export");
855
1258
  const lastImportConfig = yield* getConfig("last_import");
856
- const lastExportDate = lastExportConfig && lastExportConfig !== "" ? new Date(lastExportConfig) : lastExport;
1259
+ const lastExportDate = lastExportConfig && lastExportConfig !== "" ? new Date(lastExportConfig) : null;
857
1260
  const lastImportDate = lastImportConfig && lastImportConfig !== "" ? new Date(lastImportConfig) : null;
858
1261
  // Get auto-sync status
859
1262
  const autoSyncConfig = yield* getConfig("auto_sync");
860
1263
  const autoSyncEnabled = autoSyncConfig === "true";
861
- // Determine if dirty: DB has changes not in JSONL
862
- // Per DD-009: dirty if tasks exist AND (no lastExport OR any task/dep updated after lastExport)
863
- // Additionally: dirty if counts differ (indicates deletions/removals)
864
- let isDirty = false;
865
- if (dbTaskCount > 0 && !jsonlFileExists) {
866
- // No JSONL file but tasks exist → dirty
867
- isDirty = true;
1264
+ // Dirty detection includes timestamp drift and state-shape drift.
1265
+ // Uses sync_events table (not filesystem scans) for exported state.
1266
+ const currentOps = yield* collectCurrentOpsForSync();
1267
+ const currentStateKeys = new Set();
1268
+ for (const op of currentOps) {
1269
+ const key = stateKeyForOp(op);
1270
+ if (key)
1271
+ currentStateKeys.add(key);
868
1272
  }
869
- else if (dbTaskCount > 0 || dbDepCount > 0) {
870
- if (lastExportDate === null) {
871
- // Tasks/deps exist but never exported dirty
872
- isDirty = true;
1273
+ const latestExportedState = new Map();
1274
+ const exportedRows = yield* Effect.try({
1275
+ try: () => db.prepare("SELECT event_id, ts, type, entity_id FROM sync_events").all(),
1276
+ catch: (cause) => new DatabaseError({ cause })
1277
+ });
1278
+ for (const row of exportedRows) {
1279
+ const syncType = typeof row.type === "string" ? row.type : "";
1280
+ const key = stateKeyForSyncEvent(syncType, row.entity_id);
1281
+ if (!key)
1282
+ continue;
1283
+ const next = { syncType, ts: row.ts, eventId: row.event_id };
1284
+ const existing = latestExportedState.get(key);
1285
+ if (!existing || compareSyncOrder(next, existing) > 0) {
1286
+ latestExportedState.set(key, next);
873
1287
  }
874
- else {
875
- // Check if any task was updated after the last export (uses idx_tasks_updated index)
876
- const lastExportIso = lastExportDate.toISOString();
877
- const tasksDirty = yield* Effect.try({
878
- try: () => {
879
- const row = db.prepare("SELECT COUNT(*) as cnt FROM tasks WHERE updated_at > ?").get(lastExportIso);
880
- return row.cnt > 0;
881
- },
882
- catch: (cause) => new DatabaseError({ cause })
883
- });
884
- // Check if any dependency was created after the last export
885
- const depsDirty = yield* Effect.try({
886
- try: () => {
887
- const row = db.prepare("SELECT COUNT(*) as cnt FROM task_dependencies WHERE created_at > ?").get(lastExportIso);
888
- return row.cnt > 0;
889
- },
890
- catch: (cause) => new DatabaseError({ cause })
891
- });
892
- // Check if counts differ (indicates deletions occurred since export)
893
- // DB count < JSONL count means tasks/deps were deleted
894
- // DB count > JSONL count means tasks/deps were added (also caught by timestamp check)
895
- const taskCountMismatch = dbTaskCount !== jsonlTaskCount;
896
- const depCountMismatch = dbDepCount !== jsonlDepCount;
897
- isDirty = tasksDirty || depsDirty || taskCountMismatch || depCountMismatch;
1288
+ }
1289
+ const exportedStateKeys = new Set();
1290
+ for (const [key, state] of latestExportedState.entries()) {
1291
+ if (!isRemovalSyncType(state.syncType)) {
1292
+ exportedStateKeys.add(key);
898
1293
  }
899
1294
  }
1295
+ const stateMismatch = currentStateKeys.size !== exportedStateKeys.size ||
1296
+ [...currentStateKeys].some((key) => !exportedStateKeys.has(key)) ||
1297
+ [...exportedStateKeys].some((key) => !currentStateKeys.has(key));
1298
+ const hasLocalState = currentStateKeys.size > 0;
1299
+ const lastOpTs = currentOps.reduce((max, op) => {
1300
+ const ts = typeof op.ts === "string" ? op.ts : null;
1301
+ if (!ts)
1302
+ return max;
1303
+ if (max === null)
1304
+ return ts;
1305
+ return ts > max ? ts : max;
1306
+ }, null);
1307
+ const hasStateWithoutExport = lastExportDate === null && (hasLocalState || exportedStateKeys.size > 0);
1308
+ const hasNewerProjectedState = lastExportDate !== null && lastOpTs !== null && lastOpTs > lastExportDate.toISOString();
1309
+ const isDirty = hasStateWithoutExport || hasNewerProjectedState || stateMismatch;
900
1310
  return {
901
1311
  dbTaskCount,
902
- jsonlOpCount,
1312
+ eventOpCount,
903
1313
  lastExport: lastExportDate,
904
1314
  lastImport: lastImportDate,
905
1315
  isDirty,
@@ -912,74 +1322,6 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
912
1322
  const value = yield* getConfig("auto_sync");
913
1323
  return value === "true";
914
1324
  }),
915
- compact: (path) => Effect.gen(function* () {
916
- const filePath = resolve(path ?? DEFAULT_JSONL_PATH);
917
- // Check if file exists
918
- const compactFileExists = yield* fileExists(filePath);
919
- if (!compactFileExists) {
920
- return { before: 0, after: 0 };
921
- }
922
- // Read and parse JSONL file
923
- const content = yield* Effect.tryPromise({
924
- try: () => readFile(filePath, "utf-8"),
925
- catch: (cause) => new DatabaseError({ cause })
926
- });
927
- const lines = content.trim().split("\n").filter(Boolean);
928
- if (lines.length === 0) {
929
- return { before: 0, after: 0 };
930
- }
931
- const before = lines.length;
932
- // Parse and deduplicate - keep only latest state per entity
933
- const taskStates = new Map();
934
- const depStates = new Map();
935
- for (const line of lines) {
936
- const parsed = yield* Effect.try({
937
- try: () => JSON.parse(line),
938
- catch: (cause) => new ValidationError({ reason: `Invalid JSON: ${cause}` })
939
- });
940
- const op = yield* Effect.try({
941
- try: () => Schema.decodeUnknownSync(SyncOperationSchema)(parsed),
942
- catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
943
- });
944
- if (op.op === "upsert" || op.op === "delete") {
945
- const taskOp = op;
946
- const existing = taskStates.get(taskOp.id);
947
- if (!existing || taskOp.ts > existing.ts) {
948
- taskStates.set(taskOp.id, op);
949
- }
950
- }
951
- else if (op.op === "dep_add" || op.op === "dep_remove") {
952
- const depOp = op;
953
- const key = `${depOp.blockerId}:${depOp.blockedId}`;
954
- const existing = depStates.get(key);
955
- if (!existing || depOp.ts > existing.ts) {
956
- depStates.set(key, op);
957
- }
958
- }
959
- }
960
- // Rebuild compacted JSONL, excluding deleted tasks and removed deps
961
- const compacted = [];
962
- for (const op of taskStates.values()) {
963
- // Only keep upserts, skip deletes (tombstones)
964
- if (op.op === "upsert") {
965
- compacted.push(op);
966
- }
967
- }
968
- for (const op of depStates.values()) {
969
- // Only keep dep_adds, skip dep_removes
970
- if (op.op === "dep_add") {
971
- compacted.push(op);
972
- }
973
- }
974
- // Sort by timestamp for deterministic output
975
- compacted.sort((a, b) => a.ts.localeCompare(b.ts));
976
- // Write compacted JSONL atomically
977
- const newContent = compacted.map(op => JSON.stringify(op)).join("\n");
978
- yield* atomicWrite(filePath, newContent + (newContent.length > 0 ? "\n" : ""));
979
- return { before, after: compacted.length };
980
- }),
981
- setLastExport: (timestamp) => setConfig("last_export", timestamp.toISOString()),
982
- setLastImport: (timestamp) => setConfig("last_import", timestamp.toISOString()),
983
1325
  exportLearnings: (path) => Effect.gen(function* () {
984
1326
  const filePath = resolve(path ?? DEFAULT_LEARNINGS_JSONL_PATH);
985
1327
  const learnings = yield* learningRepo.findAll();
@@ -995,23 +1337,14 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
995
1337
  const existingHashes = new Set(existing.map(l => contentHash(l.content, l.sourceType)));
996
1338
  const insertStmt = db.prepare("INSERT INTO learnings (content, source_type, source_ref, created_at, keywords, category) VALUES (?, ?, ?, ?, ?, ?)");
997
1339
  return yield* importEntityJsonl(filePath, LearningUpsertOpSchema, existingHashes, (ops) => {
998
- db.exec("BEGIN IMMEDIATE");
999
- try {
1340
+ return withWriteTransaction(() => {
1000
1341
  let count = 0;
1001
1342
  for (const op of ops) {
1002
1343
  insertStmt.run(op.data.content, op.data.sourceType, op.data.sourceRef, op.ts, JSON.stringify(op.data.keywords), op.data.category);
1003
1344
  count++;
1004
1345
  }
1005
- db.exec("COMMIT");
1006
1346
  return count;
1007
- }
1008
- catch (e) {
1009
- try {
1010
- db.exec("ROLLBACK");
1011
- }
1012
- catch { /* no active transaction */ }
1013
- throw e;
1014
- }
1347
+ });
1015
1348
  });
1016
1349
  }),
1017
1350
  exportFileLearnings: (path) => Effect.gen(function* () {
@@ -1029,23 +1362,14 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1029
1362
  const existingHashes = new Set(existing.map(fl => contentHash(fl.filePattern, fl.note)));
1030
1363
  const insertStmt = db.prepare("INSERT INTO file_learnings (file_pattern, note, task_id, created_at) VALUES (?, ?, ?, ?)");
1031
1364
  return yield* importEntityJsonl(filePath, FileLearningUpsertOpSchema, existingHashes, (ops) => {
1032
- db.exec("BEGIN IMMEDIATE");
1033
- try {
1365
+ return withWriteTransaction(() => {
1034
1366
  let count = 0;
1035
1367
  for (const op of ops) {
1036
1368
  insertStmt.run(op.data.filePattern, op.data.note, op.data.taskId, op.ts);
1037
1369
  count++;
1038
1370
  }
1039
- db.exec("COMMIT");
1040
1371
  return count;
1041
- }
1042
- catch (e) {
1043
- try {
1044
- db.exec("ROLLBACK");
1045
- }
1046
- catch { /* no active transaction */ }
1047
- throw e;
1048
- }
1372
+ });
1049
1373
  });
1050
1374
  }),
1051
1375
  exportAttempts: (path) => Effect.gen(function* () {
@@ -1063,23 +1387,14 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1063
1387
  const existingHashes = new Set(existing.map(a => contentHash(a.taskId, a.approach)));
1064
1388
  const insertStmt = db.prepare("INSERT INTO attempts (task_id, approach, outcome, reason, created_at) VALUES (?, ?, ?, ?, ?)");
1065
1389
  return yield* importEntityJsonl(filePath, AttemptUpsertOpSchema, existingHashes, (ops) => {
1066
- db.exec("BEGIN IMMEDIATE");
1067
- try {
1390
+ return withWriteTransaction(() => {
1068
1391
  let count = 0;
1069
1392
  for (const op of ops) {
1070
1393
  insertStmt.run(op.data.taskId, op.data.approach, op.data.outcome, op.data.reason, op.ts);
1071
1394
  count++;
1072
1395
  }
1073
- db.exec("COMMIT");
1074
1396
  return count;
1075
- }
1076
- catch (e) {
1077
- try {
1078
- db.exec("ROLLBACK");
1079
- }
1080
- catch { /* no active transaction */ }
1081
- throw e;
1082
- }
1397
+ });
1083
1398
  });
1084
1399
  }),
1085
1400
  exportPins: (path) => Effect.gen(function* () {
@@ -1101,65 +1416,18 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1101
1416
  content = excluded.content,
1102
1417
  updated_at = excluded.updated_at`);
1103
1418
  const result = yield* importEntityJsonl(filePath, PinUpsertOpSchema, existingHashes, (ops) => {
1104
- db.exec("BEGIN IMMEDIATE");
1105
- try {
1419
+ return withWriteTransaction(() => {
1106
1420
  let count = 0;
1107
1421
  for (const op of ops) {
1108
1422
  upsertStmt.run(op.id, op.data.content, op.ts, op.ts);
1109
1423
  count++;
1110
1424
  }
1111
- db.exec("COMMIT");
1112
1425
  return count;
1113
- }
1114
- catch (e) {
1115
- try {
1116
- db.exec("ROLLBACK");
1117
- }
1118
- catch { /* no active transaction */ }
1119
- throw e;
1120
- }
1121
- });
1122
- // Sync imported pins to target files (pins exist to be written to context files)
1123
- if (result.imported > 0) {
1124
- const allPins = yield* pinRepo.findAll();
1125
- const targetFiles = yield* pinRepo.getTargetFiles();
1126
- const pinMap = new Map();
1127
- for (const pin of allPins) {
1128
- pinMap.set(pin.id, pin.content);
1129
- }
1130
- yield* Effect.try({
1131
- try: () => {
1132
- for (const targetFile of targetFiles) {
1133
- const projectRoot = process.cwd();
1134
- const resolvedPath = resolve(projectRoot, targetFile);
1135
- if (!resolvedPath.startsWith(projectRoot + sep))
1136
- continue;
1137
- let fileContent = "";
1138
- try {
1139
- fileContent = readFileSync(resolvedPath, "utf-8");
1140
- }
1141
- catch { /* file doesn't exist yet */ }
1142
- const updated = syncBlocks(fileContent, pinMap);
1143
- if (updated !== fileContent) {
1144
- const dir = dirname(resolvedPath);
1145
- mkdirSync(dir, { recursive: true });
1146
- const tempPath = `${resolvedPath}.tmp.${Date.now()}.${process.pid}`;
1147
- try {
1148
- writeFileSync(tempPath, updated, "utf-8");
1149
- renameSync(tempPath, resolvedPath);
1150
- }
1151
- catch (e) {
1152
- try {
1153
- unlinkSync(tempPath);
1154
- }
1155
- catch { /* ignore cleanup error */ }
1156
- throw e;
1157
- }
1158
- }
1159
- }
1160
- },
1161
- catch: (cause) => new DatabaseError({ cause })
1162
1426
  });
1427
+ });
1428
+ // Avoid blocking DB transactions with filesystem writes.
1429
+ if (result.imported > 0 && !db.inTransaction) {
1430
+ yield* syncPinsToTargetFiles();
1163
1431
  }
1164
1432
  return result;
1165
1433
  }),
@@ -1202,8 +1470,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1202
1470
  VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
1203
1471
  let orphanedCount = 0;
1204
1472
  const result = yield* importEntityJsonl(filePath, AnchorUpsertOpSchema, existingHashes, (ops) => {
1205
- db.exec("BEGIN IMMEDIATE");
1206
- try {
1473
+ return withWriteTransaction(() => {
1207
1474
  let count = 0;
1208
1475
  for (const op of ops) {
1209
1476
  const learningId = hashToLearningId.get(op.data.learningContentHash);
@@ -1214,16 +1481,8 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1214
1481
  insertStmt.run(learningId, op.data.anchorType, op.data.anchorValue, op.data.filePath, op.data.symbolFqname, op.data.lineStart, op.data.lineEnd, op.data.contentHash, op.data.contentPreview, op.data.status, op.data.pinned ? 1 : 0, op.ts);
1215
1482
  count++;
1216
1483
  }
1217
- db.exec("COMMIT");
1218
1484
  return count;
1219
- }
1220
- catch (e) {
1221
- try {
1222
- db.exec("ROLLBACK");
1223
- }
1224
- catch { /* no active transaction */ }
1225
- throw e;
1226
- }
1485
+ });
1227
1486
  });
1228
1487
  return { imported: result.imported, skipped: result.skipped + orphanedCount };
1229
1488
  }),
@@ -1246,23 +1505,14 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1246
1505
  (edge_type, source_type, source_id, target_type, target_id, weight, metadata, created_at)
1247
1506
  VALUES (?, ?, ?, ?, ?, ?, ?, ?)`);
1248
1507
  return yield* importEntityJsonl(filePath, EdgeUpsertOpSchema, existingHashes, (ops) => {
1249
- db.exec("BEGIN IMMEDIATE");
1250
- try {
1508
+ return withWriteTransaction(() => {
1251
1509
  let count = 0;
1252
1510
  for (const op of ops) {
1253
1511
  insertStmt.run(op.data.edgeType, op.data.sourceType, op.data.sourceId, op.data.targetType, op.data.targetId, op.data.weight, JSON.stringify(op.data.metadata), op.ts);
1254
1512
  count++;
1255
1513
  }
1256
- db.exec("COMMIT");
1257
1514
  return count;
1258
- }
1259
- catch (e) {
1260
- try {
1261
- db.exec("ROLLBACK");
1262
- }
1263
- catch { /* no active transaction */ }
1264
- throw e;
1265
- }
1515
+ });
1266
1516
  });
1267
1517
  }),
1268
1518
  exportDocs: (path) => Effect.gen(function* () {
@@ -1304,10 +1554,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1304
1554
  const importDocsFileExists = yield* fileExists(filePath);
1305
1555
  if (!importDocsFileExists)
1306
1556
  return EMPTY_ENTITY_IMPORT_RESULT;
1307
- const content = yield* Effect.tryPromise({
1308
- try: () => readFile(filePath, "utf-8"),
1309
- catch: (cause) => new DatabaseError({ cause })
1310
- });
1557
+ const content = yield* readUtf8FileWithLimit(filePath);
1311
1558
  const lines = content.trim().split("\n").filter(Boolean);
1312
1559
  if (lines.length === 0)
1313
1560
  return EMPTY_ENTITY_IMPORT_RESULT;
@@ -1378,8 +1625,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1378
1625
  const updateParentDocStmt = db.prepare("UPDATE docs SET parent_doc_id = ? WHERE id = ?");
1379
1626
  return yield* Effect.try({
1380
1627
  try: () => {
1381
- db.exec("BEGIN IMMEDIATE");
1382
- try {
1628
+ return withWriteTransaction(() => {
1383
1629
  let imported = 0;
1384
1630
  let skipped = 0;
1385
1631
  // 1. Import docs (dedup by content hash = kind:name:version)
@@ -1496,16 +1742,8 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1496
1742
  insertInvariantStmt.run(op.id, op.data.rule, op.data.enforcement, docId, op.data.subsystem, op.data.testRef, op.data.lintRule, op.data.promptRef, op.data.status, op.ts, JSON.stringify(op.data.metadata));
1497
1743
  imported++;
1498
1744
  }
1499
- db.exec("COMMIT");
1500
1745
  return { imported, skipped };
1501
- }
1502
- catch (e) {
1503
- try {
1504
- db.exec("ROLLBACK");
1505
- }
1506
- catch { /* no active transaction */ }
1507
- throw e;
1508
- }
1746
+ });
1509
1747
  },
1510
1748
  catch: (cause) => new DatabaseError({ cause })
1511
1749
  });
@@ -1541,10 +1779,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1541
1779
  const importLabelsFileExists = yield* fileExists(filePath);
1542
1780
  if (!importLabelsFileExists)
1543
1781
  return EMPTY_ENTITY_IMPORT_RESULT;
1544
- const content = yield* Effect.tryPromise({
1545
- try: () => readFile(filePath, "utf-8"),
1546
- catch: (cause) => new DatabaseError({ cause })
1547
- });
1782
+ const content = yield* readUtf8FileWithLimit(filePath);
1548
1783
  const lines = content.trim().split("\n").filter(Boolean);
1549
1784
  if (lines.length === 0)
1550
1785
  return EMPTY_ENTITY_IMPORT_RESULT;
@@ -1581,8 +1816,7 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1581
1816
  const insertAssignmentStmt = db.prepare("INSERT INTO task_label_assignments (task_id, label_id, created_at) VALUES (?, ?, ?)");
1582
1817
  return yield* Effect.try({
1583
1818
  try: () => {
1584
- db.exec("BEGIN IMMEDIATE");
1585
- try {
1819
+ return withWriteTransaction(() => {
1586
1820
  let imported = 0;
1587
1821
  let skipped = 0;
1588
1822
  const newLabelNameToId = new Map();
@@ -1634,80 +1868,301 @@ export const SyncServiceLive = Layer.effect(SyncService, Effect.gen(function* ()
1634
1868
  skipped++;
1635
1869
  }
1636
1870
  }
1637
- db.exec("COMMIT");
1638
1871
  return { imported, skipped };
1872
+ });
1873
+ },
1874
+ catch: (cause) => new DatabaseError({ cause })
1875
+ });
1876
+ }),
1877
+ export: (path) => Effect.gen(function* () {
1878
+ if (typeof path === "string") {
1879
+ const filePath = resolve(path ?? DEFAULT_JSONL_PATH);
1880
+ const ops = yield* collectLegacyTaskOpsForSync();
1881
+ const jsonl = ops.map(op => JSON.stringify(op)).join("\n");
1882
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
1883
+ yield* setConfig("last_export", new Date().toISOString());
1884
+ return {
1885
+ opCount: ops.length,
1886
+ path: filePath,
1887
+ };
1888
+ }
1889
+ const ops = yield* collectCurrentOpsForSync();
1890
+ const stream = yield* streamService.getInfo();
1891
+ const day = new Date().toISOString().slice(0, 10);
1892
+ const eventPath = resolve(stream.eventsDir, `events-${day}.jsonl`);
1893
+ if (ops.length === 0) {
1894
+ yield* setConfig("last_export", new Date().toISOString());
1895
+ return { eventCount: 0, streamId: stream.streamId, path: eventPath };
1896
+ }
1897
+ const reservation = yield* streamService.reserveSeq(ops.length);
1898
+ const events = [];
1899
+ let seq = reservation.startSeq;
1900
+ for (const op of ops) {
1901
+ const event = toSyncEvent(op, stream.streamId, seq);
1902
+ if (!event)
1903
+ continue;
1904
+ events.push(event);
1905
+ seq++;
1906
+ }
1907
+ if (events.length === 0) {
1908
+ yield* setConfig("last_export", new Date().toISOString());
1909
+ return { eventCount: 0, streamId: stream.streamId, path: eventPath };
1910
+ }
1911
+ yield* Effect.tryPromise({
1912
+ try: () => appendFile(eventPath, `${events.map(e => JSON.stringify(e)).join("\n")}\n`, "utf-8"),
1913
+ catch: (cause) => new DatabaseError({ cause })
1914
+ });
1915
+ yield* Effect.try({
1916
+ try: () => {
1917
+ const insertStmt = db.prepare(`INSERT OR IGNORE INTO sync_events (event_id, stream_id, seq, ts, type, entity_id, v, payload)
1918
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`);
1919
+ withWriteTransaction(() => {
1920
+ for (const event of events) {
1921
+ insertStmt.run(event.event_id, event.stream_id, event.seq, event.ts, event.type, event.entity_id, event.v, JSON.stringify(event.payload));
1922
+ }
1923
+ });
1924
+ },
1925
+ catch: (cause) => new DatabaseError({ cause })
1926
+ });
1927
+ const lastEvent = events[events.length - 1];
1928
+ yield* streamService.touchStream(stream.streamId, lastEvent.seq, lastEvent.ts);
1929
+ yield* setConfig("last_export", new Date().toISOString());
1930
+ return {
1931
+ eventCount: events.length,
1932
+ streamId: stream.streamId,
1933
+ path: eventPath,
1934
+ };
1935
+ }),
1936
+ import: (path) => Effect.gen(function* () {
1937
+ if (typeof path === "string") {
1938
+ return yield* syncService.importTaskOps(path);
1939
+ }
1940
+ const loaded = yield* loadEventsFromStreams("incremental");
1941
+ if (loaded.events.length === 0) {
1942
+ return { importedEvents: 0, appliedEvents: 0, streamCount: loaded.streamCount };
1943
+ }
1944
+ const buckets = bucketEventsToV1Ops(loaded.events);
1945
+ const tempFiles = yield* writeBucketsToTempFiles(buckets);
1946
+ let shouldSyncPinsToTargets = false;
1947
+ yield* Effect.acquireUseRelease(Effect.try({
1948
+ try: () => db.exec("BEGIN"),
1949
+ catch: (cause) => new DatabaseError({ cause })
1950
+ }), () => Effect.gen(function* () {
1951
+ if (buckets.tasks.length > 0)
1952
+ yield* syncService.importTaskOps(tempFiles.tasksPath);
1953
+ if (buckets.learnings.length > 0)
1954
+ yield* syncService.importLearnings(tempFiles.learningsPath);
1955
+ if (buckets.fileLearnings.length > 0)
1956
+ yield* syncService.importFileLearnings(tempFiles.fileLearningsPath);
1957
+ if (buckets.attempts.length > 0)
1958
+ yield* syncService.importAttempts(tempFiles.attemptsPath);
1959
+ if (buckets.pins.length > 0) {
1960
+ const pinImportResult = yield* syncService.importPins(tempFiles.pinsPath);
1961
+ shouldSyncPinsToTargets = shouldSyncPinsToTargets || pinImportResult.imported > 0;
1962
+ }
1963
+ if (buckets.anchors.length > 0)
1964
+ yield* syncService.importAnchors(tempFiles.anchorsPath);
1965
+ if (buckets.edges.length > 0)
1966
+ yield* syncService.importEdges(tempFiles.edgesPath);
1967
+ if (buckets.docs.length > 0)
1968
+ yield* syncService.importDocs(tempFiles.docsPath);
1969
+ if (buckets.labels.length > 0)
1970
+ yield* syncService.importLabels(tempFiles.labelsPath);
1971
+ const insertStmt = db.prepare(`INSERT OR IGNORE INTO sync_events (event_id, stream_id, seq, ts, type, entity_id, v, payload, imported_at)
1972
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, datetime('now'))`);
1973
+ yield* Effect.try({
1974
+ try: () => {
1975
+ for (const event of loaded.events) {
1976
+ insertStmt.run(event.event_id, event.stream_id, event.seq, event.ts, event.type, event.entity_id, event.v, JSON.stringify(event.payload));
1977
+ }
1978
+ },
1979
+ catch: (cause) => new DatabaseError({ cause })
1980
+ });
1981
+ yield* Effect.try({
1982
+ try: () => {
1983
+ const lastEventAtByStream = new Map();
1984
+ for (const event of loaded.events) {
1985
+ lastEventAtByStream.set(event.stream_id, event.ts);
1986
+ }
1987
+ for (const [streamId, maxSeq] of loaded.maxSeqByStream) {
1988
+ touchStreamProgress(streamId, maxSeq, lastEventAtByStream.get(streamId) ?? null);
1989
+ }
1990
+ },
1991
+ catch: (cause) => new DatabaseError({ cause })
1992
+ });
1993
+ yield* setWatermark(DEFAULT_SYNC_WATERMARK_KEY, new Date().toISOString());
1994
+ yield* setConfig("last_import", new Date().toISOString());
1995
+ }), (_acquire, exit) => Effect.sync(() => {
1996
+ if (Exit.isSuccess(exit)) {
1997
+ try {
1998
+ db.exec("COMMIT");
1639
1999
  }
1640
- catch (e) {
2000
+ catch {
1641
2001
  try {
1642
2002
  db.exec("ROLLBACK");
1643
2003
  }
1644
- catch { /* no active transaction */ }
1645
- throw e;
2004
+ catch { /* ignore */ }
1646
2005
  }
1647
- },
2006
+ }
2007
+ else {
2008
+ try {
2009
+ db.exec("ROLLBACK");
2010
+ }
2011
+ catch { /* ignore */ }
2012
+ }
2013
+ })).pipe(Effect.ensuring(cleanupTempDir(tempFiles.dir)));
2014
+ if (shouldSyncPinsToTargets) {
2015
+ yield* syncPinsToTargetFiles();
2016
+ }
2017
+ return {
2018
+ importedEvents: loaded.events.length,
2019
+ appliedEvents: loaded.events.length,
2020
+ streamCount: loaded.streamCount,
2021
+ };
2022
+ }),
2023
+ hydrate: () => Effect.gen(function* () {
2024
+ const loaded = yield* loadEventsFromStreams("all");
2025
+ if (loaded.events.length === 0) {
2026
+ return { importedEvents: 0, appliedEvents: 0, streamCount: loaded.streamCount, rebuilt: true };
2027
+ }
2028
+ const buckets = bucketEventsToV1Ops(loaded.events);
2029
+ const tempFiles = yield* writeBucketsToTempFiles(buckets);
2030
+ let shouldSyncPinsToTargets = false;
2031
+ yield* Effect.acquireUseRelease(Effect.try({
2032
+ try: () => db.exec("BEGIN"),
1648
2033
  catch: (cause) => new DatabaseError({ cause })
1649
- });
2034
+ }), () => Effect.gen(function* () {
2035
+ const insertStmt = db.prepare(`INSERT OR IGNORE INTO sync_events (event_id, stream_id, seq, ts, type, entity_id, v, payload, imported_at)
2036
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, datetime('now'))`);
2037
+ yield* Effect.try({
2038
+ try: () => {
2039
+ for (const event of loaded.events) {
2040
+ insertStmt.run(event.event_id, event.stream_id, event.seq, event.ts, event.type, event.entity_id, event.v, JSON.stringify(event.payload));
2041
+ }
2042
+ },
2043
+ catch: (cause) => new DatabaseError({ cause })
2044
+ });
2045
+ yield* clearMaterializedTables();
2046
+ if (buckets.tasks.length > 0)
2047
+ yield* syncService.importTaskOps(tempFiles.tasksPath);
2048
+ if (buckets.learnings.length > 0)
2049
+ yield* syncService.importLearnings(tempFiles.learningsPath);
2050
+ if (buckets.fileLearnings.length > 0)
2051
+ yield* syncService.importFileLearnings(tempFiles.fileLearningsPath);
2052
+ if (buckets.attempts.length > 0)
2053
+ yield* syncService.importAttempts(tempFiles.attemptsPath);
2054
+ if (buckets.pins.length > 0) {
2055
+ const pinImportResult = yield* syncService.importPins(tempFiles.pinsPath);
2056
+ shouldSyncPinsToTargets = shouldSyncPinsToTargets || pinImportResult.imported > 0;
2057
+ }
2058
+ if (buckets.anchors.length > 0)
2059
+ yield* syncService.importAnchors(tempFiles.anchorsPath);
2060
+ if (buckets.edges.length > 0)
2061
+ yield* syncService.importEdges(tempFiles.edgesPath);
2062
+ if (buckets.docs.length > 0)
2063
+ yield* syncService.importDocs(tempFiles.docsPath);
2064
+ if (buckets.labels.length > 0)
2065
+ yield* syncService.importLabels(tempFiles.labelsPath);
2066
+ yield* Effect.try({
2067
+ try: () => {
2068
+ const lastEventAtByStream = new Map();
2069
+ for (const event of loaded.events) {
2070
+ lastEventAtByStream.set(event.stream_id, event.ts);
2071
+ }
2072
+ for (const [streamId, maxSeq] of loaded.maxSeqByStream) {
2073
+ touchStreamProgress(streamId, maxSeq, lastEventAtByStream.get(streamId) ?? null);
2074
+ }
2075
+ },
2076
+ catch: (cause) => new DatabaseError({ cause })
2077
+ });
2078
+ yield* setWatermark(DEFAULT_SYNC_WATERMARK_KEY, new Date().toISOString());
2079
+ yield* setConfig("last_import", new Date().toISOString());
2080
+ }), (_acquire, exit) => Effect.sync(() => {
2081
+ if (Exit.isSuccess(exit)) {
2082
+ try {
2083
+ db.exec("COMMIT");
2084
+ }
2085
+ catch {
2086
+ try {
2087
+ db.exec("ROLLBACK");
2088
+ }
2089
+ catch { /* ignore */ }
2090
+ }
2091
+ }
2092
+ else {
2093
+ try {
2094
+ db.exec("ROLLBACK");
2095
+ }
2096
+ catch { /* ignore */ }
2097
+ }
2098
+ })).pipe(Effect.ensuring(cleanupTempDir(tempFiles.dir)));
2099
+ if (shouldSyncPinsToTargets) {
2100
+ yield* syncPinsToTargetFiles();
2101
+ }
2102
+ return {
2103
+ importedEvents: loaded.events.length,
2104
+ appliedEvents: loaded.events.length,
2105
+ streamCount: loaded.streamCount,
2106
+ rebuilt: true
2107
+ };
1650
2108
  }),
1651
- exportAll: (options) => Effect.gen(function* () {
1652
- const tasks = yield* syncService.export();
1653
- const learnings = options?.learnings !== false
1654
- ? yield* syncService.exportLearnings()
1655
- : undefined;
1656
- const fileLearnings = options?.fileLearnings !== false
1657
- ? yield* syncService.exportFileLearnings()
1658
- : undefined;
1659
- const attempts = options?.attempts !== false
1660
- ? yield* syncService.exportAttempts()
1661
- : undefined;
1662
- const pins = options?.pins !== false
1663
- ? yield* syncService.exportPins()
1664
- : undefined;
1665
- const anchors = options?.anchors !== false
1666
- ? yield* syncService.exportAnchors()
1667
- : undefined;
1668
- const edges = options?.edges !== false
1669
- ? yield* syncService.exportEdges()
1670
- : undefined;
1671
- const docs = options?.docs !== false
1672
- ? yield* syncService.exportDocs()
1673
- : undefined;
1674
- const labels = options?.labels !== false
1675
- ? yield* syncService.exportLabels()
1676
- : undefined;
1677
- return { tasks, learnings, fileLearnings, attempts, pins, anchors, edges, docs, labels };
2109
+ compact: (path) => Effect.gen(function* () {
2110
+ const filePath = resolve(path ?? DEFAULT_JSONL_PATH);
2111
+ const records = yield* readJsonlRecords(filePath);
2112
+ if (records.length === 0) {
2113
+ return { before: 0, after: 0, path: filePath };
2114
+ }
2115
+ const before = records.length;
2116
+ const taskStates = new Map();
2117
+ const depStates = new Map();
2118
+ for (const record of records) {
2119
+ const op = yield* Effect.try({
2120
+ try: () => Schema.decodeUnknownSync(TaskSyncOperationSchema)(record),
2121
+ catch: (cause) => new ValidationError({ reason: `Schema validation failed: ${cause}` })
2122
+ });
2123
+ if (op.op === "upsert" || op.op === "delete") {
2124
+ const existing = taskStates.get(op.id);
2125
+ if (!existing || compareSyncOrder(op, existing) > 0) {
2126
+ taskStates.set(op.id, { op: op, ts: op.ts, eventId: op.eventId });
2127
+ }
2128
+ }
2129
+ else if (op.op === "dep_add" || op.op === "dep_remove") {
2130
+ const key = `${op.blockerId}:${op.blockedId}`;
2131
+ const existing = depStates.get(key);
2132
+ if (!existing || compareSyncOrder(op, existing) > 0) {
2133
+ depStates.set(key, { op: op, ts: op.ts, eventId: op.eventId });
2134
+ }
2135
+ }
2136
+ }
2137
+ const compacted = [];
2138
+ for (const state of taskStates.values()) {
2139
+ if (state.op.op === "upsert") {
2140
+ compacted.push(state.op);
2141
+ }
2142
+ }
2143
+ for (const state of depStates.values()) {
2144
+ if (state.op.op === "dep_add") {
2145
+ compacted.push(state.op);
2146
+ }
2147
+ }
2148
+ compacted.sort(compareOpOrder);
2149
+ const jsonl = compacted.map(op => JSON.stringify(op)).join("\n");
2150
+ yield* atomicWrite(filePath, jsonl + (jsonl.length > 0 ? "\n" : ""));
2151
+ return { before, after: compacted.length, path: filePath };
1678
2152
  }),
1679
- importAll: (options) => Effect.gen(function* () {
1680
- // Import in dependency order: tasks → learnings → anchors → edges → file-learnings → attempts → pins → docs → labels
1681
- const tasks = yield* syncService.import();
1682
- const learnings = options?.learnings !== false
1683
- ? yield* syncService.importLearnings()
1684
- : undefined;
1685
- // Anchors depend on learnings (FK reference) — skip if learnings are disabled
1686
- const anchors = options?.anchors !== false && options?.learnings !== false
1687
- ? yield* syncService.importAnchors()
1688
- : undefined;
1689
- // Edges are a generic graph layer with no FK dependency on learnings
1690
- const edges = options?.edges !== false
1691
- ? yield* syncService.importEdges()
1692
- : undefined;
1693
- const fileLearnings = options?.fileLearnings !== false
1694
- ? yield* syncService.importFileLearnings()
1695
- : undefined;
1696
- const attempts = options?.attempts !== false
1697
- ? yield* syncService.importAttempts()
1698
- : undefined;
1699
- const pins = options?.pins !== false
1700
- ? yield* syncService.importPins()
1701
- : undefined;
1702
- const docs = options?.docs !== false
1703
- ? yield* syncService.importDocs()
1704
- : undefined;
1705
- const labels = options?.labels !== false
1706
- ? yield* syncService.importLabels()
1707
- : undefined;
1708
- return { tasks, learnings, fileLearnings, attempts, pins, anchors, edges, docs, labels };
2153
+ stream: () => Effect.gen(function* () {
2154
+ const info = yield* streamService.getInfo();
2155
+ const knownStreams = yield* streamService.listProgress();
2156
+ return {
2157
+ streamId: info.streamId,
2158
+ nextSeq: info.nextSeq,
2159
+ lastSeq: info.lastSeq,
2160
+ eventsDir: info.eventsDir,
2161
+ configPath: info.configPath,
2162
+ knownStreams
2163
+ };
1709
2164
  })
1710
- };
2165
+ }));
1711
2166
  return syncService;
1712
2167
  }));
1713
- //# sourceMappingURL=sync-service.js.map
2168
+ //# sourceMappingURL=service-impl.js.map