@goondocks/myco 0.20.2 → 0.21.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (216) hide show
  1. package/bin/myco-run +68 -7
  2. package/dist/agent-eval-YK2VP2S4.js +356 -0
  3. package/dist/agent-eval-YK2VP2S4.js.map +1 -0
  4. package/dist/{agent-run-X25Q2A6T.js → agent-run-GEJBD2YD.js} +10 -8
  5. package/dist/{agent-run-X25Q2A6T.js.map → agent-run-GEJBD2YD.js.map} +1 -1
  6. package/dist/{agent-tasks-7B6OFERB.js → agent-tasks-5XSRGTRX.js} +10 -8
  7. package/dist/{agent-tasks-7B6OFERB.js.map → agent-tasks-5XSRGTRX.js.map} +1 -1
  8. package/dist/{chunk-OD4AA7PV.js → chunk-53RPGOEN.js} +56 -8
  9. package/dist/chunk-53RPGOEN.js.map +1 -0
  10. package/dist/chunk-54SXG5HF.js +26 -0
  11. package/dist/chunk-54SXG5HF.js.map +1 -0
  12. package/dist/{chunk-XATDZX7U.js → chunk-6ALVMIB4.js} +19 -5
  13. package/dist/{chunk-XATDZX7U.js.map → chunk-6ALVMIB4.js.map} +1 -1
  14. package/dist/{chunk-FLLBJLHM.js → chunk-6C6QZ4PM.js} +9 -5
  15. package/dist/chunk-6C6QZ4PM.js.map +1 -0
  16. package/dist/{chunk-CCRGY3QW.js → chunk-AUIXX33A.js} +24 -95
  17. package/dist/chunk-AUIXX33A.js.map +1 -0
  18. package/dist/chunk-CISWUP5W.js +101 -0
  19. package/dist/chunk-CISWUP5W.js.map +1 -0
  20. package/dist/{chunk-MYOZLMB2.js → chunk-DTWUHHFI.js} +576 -20
  21. package/dist/chunk-DTWUHHFI.js.map +1 -0
  22. package/dist/chunk-EEOJWLMP.js +582 -0
  23. package/dist/chunk-EEOJWLMP.js.map +1 -0
  24. package/dist/{chunk-XG5RRUYF.js → chunk-ENZR5NG7.js} +2 -2
  25. package/dist/{chunk-6RFZWV4R.js → chunk-FCJ5JV54.js} +1 -1
  26. package/dist/{chunk-6RFZWV4R.js.map → chunk-FCJ5JV54.js.map} +1 -1
  27. package/dist/{chunk-US4LNCAT.js → chunk-IPPMYQ2Y.js} +5 -1
  28. package/dist/chunk-IPPMYQ2Y.js.map +1 -0
  29. package/dist/{chunk-VVNL26WX.js → chunk-KTTSXYEK.js} +22 -10
  30. package/dist/chunk-KTTSXYEK.js.map +1 -0
  31. package/dist/chunk-LQIPXVDH.js +17 -0
  32. package/dist/chunk-LQIPXVDH.js.map +1 -0
  33. package/dist/{chunk-DCSGJ7W4.js → chunk-N2DGFACQ.js} +3 -3
  34. package/dist/chunk-N7Z3LUEZ.js +858 -0
  35. package/dist/chunk-N7Z3LUEZ.js.map +1 -0
  36. package/dist/{chunk-2PDWCDKY.js → chunk-NFO7BRCO.js} +10 -7
  37. package/dist/{chunk-2PDWCDKY.js.map → chunk-NFO7BRCO.js.map} +1 -1
  38. package/dist/{chunk-Q36VMZST.js → chunk-OTQH5KZW.js} +89 -38
  39. package/dist/chunk-OTQH5KZW.js.map +1 -0
  40. package/dist/chunk-OUJSQSKE.js +113 -0
  41. package/dist/chunk-OUJSQSKE.js.map +1 -0
  42. package/dist/chunk-OZ3FBAK5.js +50 -0
  43. package/dist/chunk-OZ3FBAK5.js.map +1 -0
  44. package/dist/chunk-QATYARI5.js +408 -0
  45. package/dist/chunk-QATYARI5.js.map +1 -0
  46. package/dist/{chunk-KESLPBKV.js → chunk-QDLVIW2O.js} +4 -4
  47. package/dist/{chunk-5XIVBO25.js → chunk-QLLBJEM7.js} +6 -2
  48. package/dist/chunk-QLLBJEM7.js.map +1 -0
  49. package/dist/{chunk-EVDQKYCG.js → chunk-RQSJLWP4.js} +13 -2
  50. package/dist/chunk-RQSJLWP4.js.map +1 -0
  51. package/dist/{chunk-BPRIYNLE.js → chunk-TKAJ3JVF.js} +3 -3
  52. package/dist/chunk-TSM6VESW.js +25 -0
  53. package/dist/chunk-TSM6VESW.js.map +1 -0
  54. package/dist/{chunk-6X2ERTQV.js → chunk-USVFEWYL.js} +6 -4
  55. package/dist/{chunk-6X2ERTQV.js.map → chunk-USVFEWYL.js.map} +1 -1
  56. package/dist/{chunk-JZGN33AY.js → chunk-VRI56337.js} +4 -4
  57. package/dist/chunk-X2IRGXGF.js +14103 -0
  58. package/dist/chunk-X2IRGXGF.js.map +1 -0
  59. package/dist/{chunk-FMRZ26U5.js → chunk-X3IGT5RV.js} +5 -2
  60. package/dist/{chunk-FMRZ26U5.js.map → chunk-X3IGT5RV.js.map} +1 -1
  61. package/dist/{chunk-KHT24OWC.js → chunk-YDUOSRGD.js} +8 -94
  62. package/dist/{chunk-KHT24OWC.js.map → chunk-YDUOSRGD.js.map} +1 -1
  63. package/dist/{chunk-NGROSFOH.js → chunk-Z66IT5KL.js} +14 -9
  64. package/dist/chunk-Z66IT5KL.js.map +1 -0
  65. package/dist/{cli-GGPWH4UO.js → cli-HSLIG7EX.js} +50 -43
  66. package/dist/cli-HSLIG7EX.js.map +1 -0
  67. package/dist/{client-YXQUTXVZ.js → client-Z43DNLJH.js} +4 -4
  68. package/dist/{config-OMCYHG2S.js → config-VC4ACP42.js} +6 -4
  69. package/dist/{config-OMCYHG2S.js.map → config-VC4ACP42.js.map} +1 -1
  70. package/dist/{detect-PXNM6TA7.js → detect-7NUD5B5R.js} +2 -2
  71. package/dist/{detect-providers-5KOPZ7J2.js → detect-providers-ILLQZROY.js} +4 -4
  72. package/dist/{doctor-5JXJ36KA.js → doctor-HJCWHAU4.js} +49 -16
  73. package/dist/doctor-HJCWHAU4.js.map +1 -0
  74. package/dist/executor-DO6QFC6G.js +45 -0
  75. package/dist/{init-LMYOVZAV.js → init-4KVK7W2E.js} +16 -14
  76. package/dist/{init-LMYOVZAV.js.map → init-4KVK7W2E.js.map} +1 -1
  77. package/dist/{installer-FS257JRZ.js → installer-N4UTEACX.js} +6 -4
  78. package/dist/{llm-TH4NLIRM.js → llm-AGVEF5XD.js} +5 -4
  79. package/dist/{loader-CQYTFHEW.js → loader-LX7TFRM6.js} +5 -3
  80. package/dist/{loader-NOMBJUPW.js → loader-UDNUMEDA.js} +5 -3
  81. package/dist/{main-YTBVRTBI.js → main-4J4QZZTZ.js} +2518 -656
  82. package/dist/main-4J4QZZTZ.js.map +1 -0
  83. package/dist/{open-HG2DX6RN.js → open-7TXJQM3H.js} +10 -8
  84. package/dist/{open-HG2DX6RN.js.map → open-7TXJQM3H.js.map} +1 -1
  85. package/dist/{post-compact-JSECI44W.js → post-compact-7AEFVCZS.js} +8 -8
  86. package/dist/{post-tool-use-POGPTJBA.js → post-tool-use-TZINWWDH.js} +11 -9
  87. package/dist/post-tool-use-TZINWWDH.js.map +1 -0
  88. package/dist/{post-tool-use-failure-OT7BFWQW.js → post-tool-use-failure-TCFEU2GI.js} +8 -8
  89. package/dist/{pre-compact-OXVODKH4.js → pre-compact-LO2VZCGR.js} +8 -8
  90. package/dist/{provider-check-43LAMSMH.js → provider-check-ZEV5P4KM.js} +4 -4
  91. package/dist/{registry-U4CHXK6R.js → registry-F3THYC5M.js} +6 -4
  92. package/dist/{remove-N7ZPELFU.js → remove-F77AAALE.js} +12 -10
  93. package/dist/{remove-N7ZPELFU.js.map → remove-F77AAALE.js.map} +1 -1
  94. package/dist/{restart-ADG5GBTB.js → restart-UEFDPMLT.js} +11 -9
  95. package/dist/{restart-ADG5GBTB.js.map → restart-UEFDPMLT.js.map} +1 -1
  96. package/dist/{search-AHZEUNRR.js → search-NHNVUAQQ.js} +11 -9
  97. package/dist/{search-AHZEUNRR.js.map → search-NHNVUAQQ.js.map} +1 -1
  98. package/dist/{server-AGVYZVP5.js → server-AZJSTQEK.js} +369 -270
  99. package/dist/server-AZJSTQEK.js.map +1 -0
  100. package/dist/{session-6IU4AXYP.js → session-3HLC5KOD.js} +11 -9
  101. package/dist/{session-6IU4AXYP.js.map → session-3HLC5KOD.js.map} +1 -1
  102. package/dist/{session-end-FT27DWYZ.js → session-end-FS46UARX.js} +7 -7
  103. package/dist/session-start-46KPFV2H.js +134 -0
  104. package/dist/session-start-46KPFV2H.js.map +1 -0
  105. package/dist/{setup-llm-77MP4I2G.js → setup-llm-JMWSNQ2C.js} +11 -9
  106. package/dist/{setup-llm-77MP4I2G.js.map → setup-llm-JMWSNQ2C.js.map} +1 -1
  107. package/dist/src/agent/definitions/agent.yaml +9 -5
  108. package/dist/src/agent/definitions/tasks/cortex-instructions.yaml +115 -0
  109. package/dist/src/agent/definitions/tasks/cortex-prompt-builder.yaml +67 -0
  110. package/dist/src/agent/definitions/tasks/digest-only.yaml +1 -1
  111. package/dist/src/agent/definitions/tasks/extract-only.yaml +1 -1
  112. package/dist/src/agent/definitions/tasks/review-session.yaml +10 -39
  113. package/dist/src/agent/definitions/tasks/skill-evolve.yaml +181 -25
  114. package/dist/src/agent/definitions/tasks/skill-generate.yaml +21 -7
  115. package/dist/src/agent/definitions/tasks/skill-survey.yaml +2 -6
  116. package/dist/src/agent/definitions/tasks/supersession-sweep.yaml +1 -1
  117. package/dist/src/agent/definitions/tasks/title-summary.yaml +12 -19
  118. package/dist/src/agent/definitions/tasks/{full-intelligence.yaml → vault-evolve.yaml} +74 -129
  119. package/dist/src/agent/definitions/tasks/vault-seed.yaml +370 -0
  120. package/dist/src/agent/prompts/agent.md +12 -38
  121. package/dist/src/cli.js +1 -1
  122. package/dist/src/daemon/main.js +1 -1
  123. package/dist/src/hooks/post-tool-use.js +1 -1
  124. package/dist/src/hooks/session-end.js +1 -1
  125. package/dist/src/hooks/session-start.js +1 -1
  126. package/dist/src/hooks/stop.js +1 -1
  127. package/dist/src/hooks/user-prompt-submit.js +1 -1
  128. package/dist/src/mcp/server.js +1 -1
  129. package/dist/src/symbionts/manifests/claude-code.yaml +4 -0
  130. package/dist/src/symbionts/manifests/opencode.yaml +7 -0
  131. package/dist/src/symbionts/manifests/pi.yaml +22 -0
  132. package/dist/src/symbionts/templates/agents-starter.md +1 -1
  133. package/dist/src/symbionts/templates/pi/package.json +6 -0
  134. package/dist/src/symbionts/templates/pi/plugin.ts +559 -0
  135. package/dist/{stats-NVPWOYTE.js → stats-MKMETHMA.js} +11 -9
  136. package/dist/{stats-NVPWOYTE.js.map → stats-MKMETHMA.js.map} +1 -1
  137. package/dist/{stop-ZPIKVLH4.js → stop-OUEX6KA4.js} +7 -7
  138. package/dist/{stop-failure-2PX67YJC.js → stop-failure-2BWVNZEG.js} +8 -8
  139. package/dist/{subagent-start-UUE6EHQD.js → subagent-start-J4VV6DEE.js} +8 -8
  140. package/dist/{subagent-stop-KQWWWPE6.js → subagent-stop-JMLVEPIA.js} +8 -8
  141. package/dist/{task-completed-WMHOFQ7B.js → task-completed-65CHMMKA.js} +8 -8
  142. package/dist/{team-LRZ6GTQK.js → team-U2LDKIS4.js} +7 -5
  143. package/dist/{turns-YFNI5CQC.js → turns-HU2CTZAP.js} +2 -2
  144. package/dist/ui/assets/index-BUGor9dk.js +842 -0
  145. package/dist/ui/assets/index-_OP4ifzH.css +1 -0
  146. package/dist/ui/index.html +2 -2
  147. package/dist/{update-O6V4RC4W.js → update-ZSHVXWSQ.js} +12 -10
  148. package/dist/{update-O6V4RC4W.js.map → update-ZSHVXWSQ.js.map} +1 -1
  149. package/dist/{user-prompt-submit-N36KUPHI.js → user-prompt-submit-APMO6FVU.js} +10 -9
  150. package/dist/{user-prompt-submit-N36KUPHI.js.map → user-prompt-submit-APMO6FVU.js.map} +1 -1
  151. package/dist/{verify-LXPV7NYG.js → verify-R76ZFJSZ.js} +8 -5
  152. package/dist/{verify-LXPV7NYG.js.map → verify-R76ZFJSZ.js.map} +1 -1
  153. package/dist/{version-XMPPJQHR.js → version-TXPPS3L5.js} +2 -2
  154. package/dist/version-TXPPS3L5.js.map +1 -0
  155. package/package.json +3 -1
  156. package/skills/myco/SKILL.md +16 -1
  157. package/skills/myco/references/cli-usage.md +1 -1
  158. package/skills/myco-rules/SKILL.md +94 -0
  159. package/skills/{rules → myco-rules}/references/rules-bad-example.md +1 -1
  160. package/skills/{rules → myco-rules}/references/rules-good-example.md +1 -1
  161. package/dist/chunk-4YFKBL3F.js +0 -195
  162. package/dist/chunk-4YFKBL3F.js.map +0 -1
  163. package/dist/chunk-5XIVBO25.js.map +0 -1
  164. package/dist/chunk-CCRGY3QW.js.map +0 -1
  165. package/dist/chunk-CUDIZJY7.js +0 -36
  166. package/dist/chunk-CUDIZJY7.js.map +0 -1
  167. package/dist/chunk-EVDQKYCG.js.map +0 -1
  168. package/dist/chunk-FLLBJLHM.js.map +0 -1
  169. package/dist/chunk-MYOZLMB2.js.map +0 -1
  170. package/dist/chunk-NGROSFOH.js.map +0 -1
  171. package/dist/chunk-OD4AA7PV.js.map +0 -1
  172. package/dist/chunk-Q36VMZST.js.map +0 -1
  173. package/dist/chunk-US4LNCAT.js.map +0 -1
  174. package/dist/chunk-UYMFCYBF.js +0 -2326
  175. package/dist/chunk-UYMFCYBF.js.map +0 -1
  176. package/dist/chunk-VVNL26WX.js.map +0 -1
  177. package/dist/cli-GGPWH4UO.js.map +0 -1
  178. package/dist/doctor-5JXJ36KA.js.map +0 -1
  179. package/dist/executor-HWW2QNZQ.js +0 -2472
  180. package/dist/executor-HWW2QNZQ.js.map +0 -1
  181. package/dist/main-YTBVRTBI.js.map +0 -1
  182. package/dist/post-tool-use-POGPTJBA.js.map +0 -1
  183. package/dist/server-AGVYZVP5.js.map +0 -1
  184. package/dist/session-start-LAFICHII.js +0 -189
  185. package/dist/session-start-LAFICHII.js.map +0 -1
  186. package/dist/src/agent/definitions/tasks/graph-maintenance.yaml +0 -93
  187. package/dist/ui/assets/index-C2JuNtRB.css +0 -1
  188. package/dist/ui/assets/index-JLVaQKV2.js +0 -832
  189. package/skills/myco-curate/SKILL.md +0 -86
  190. package/skills/rules/SKILL.md +0 -214
  191. /package/dist/{chunk-XG5RRUYF.js.map → chunk-ENZR5NG7.js.map} +0 -0
  192. /package/dist/{chunk-DCSGJ7W4.js.map → chunk-N2DGFACQ.js.map} +0 -0
  193. /package/dist/{chunk-KESLPBKV.js.map → chunk-QDLVIW2O.js.map} +0 -0
  194. /package/dist/{chunk-BPRIYNLE.js.map → chunk-TKAJ3JVF.js.map} +0 -0
  195. /package/dist/{chunk-JZGN33AY.js.map → chunk-VRI56337.js.map} +0 -0
  196. /package/dist/{client-YXQUTXVZ.js.map → client-Z43DNLJH.js.map} +0 -0
  197. /package/dist/{detect-PXNM6TA7.js.map → detect-7NUD5B5R.js.map} +0 -0
  198. /package/dist/{detect-providers-5KOPZ7J2.js.map → detect-providers-ILLQZROY.js.map} +0 -0
  199. /package/dist/{installer-FS257JRZ.js.map → executor-DO6QFC6G.js.map} +0 -0
  200. /package/dist/{llm-TH4NLIRM.js.map → installer-N4UTEACX.js.map} +0 -0
  201. /package/dist/{loader-CQYTFHEW.js.map → llm-AGVEF5XD.js.map} +0 -0
  202. /package/dist/{loader-NOMBJUPW.js.map → loader-LX7TFRM6.js.map} +0 -0
  203. /package/dist/{provider-check-43LAMSMH.js.map → loader-UDNUMEDA.js.map} +0 -0
  204. /package/dist/{post-compact-JSECI44W.js.map → post-compact-7AEFVCZS.js.map} +0 -0
  205. /package/dist/{post-tool-use-failure-OT7BFWQW.js.map → post-tool-use-failure-TCFEU2GI.js.map} +0 -0
  206. /package/dist/{pre-compact-OXVODKH4.js.map → pre-compact-LO2VZCGR.js.map} +0 -0
  207. /package/dist/{registry-U4CHXK6R.js.map → provider-check-ZEV5P4KM.js.map} +0 -0
  208. /package/dist/{team-LRZ6GTQK.js.map → registry-F3THYC5M.js.map} +0 -0
  209. /package/dist/{session-end-FT27DWYZ.js.map → session-end-FS46UARX.js.map} +0 -0
  210. /package/dist/{stop-ZPIKVLH4.js.map → stop-OUEX6KA4.js.map} +0 -0
  211. /package/dist/{stop-failure-2PX67YJC.js.map → stop-failure-2BWVNZEG.js.map} +0 -0
  212. /package/dist/{subagent-start-UUE6EHQD.js.map → subagent-start-J4VV6DEE.js.map} +0 -0
  213. /package/dist/{subagent-stop-KQWWWPE6.js.map → subagent-stop-JMLVEPIA.js.map} +0 -0
  214. /package/dist/{task-completed-WMHOFQ7B.js.map → task-completed-65CHMMKA.js.map} +0 -0
  215. /package/dist/{turns-YFNI5CQC.js.map → team-U2LDKIS4.js.map} +0 -0
  216. /package/dist/{version-XMPPJQHR.js.map → turns-HU2CTZAP.js.map} +0 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/daemon/team-context.ts","../src/db/queries/team-outbox.ts","../src/db/queries/sessions.ts"],"sourcesContent":["/**\n * Module-level state for team sync.\n *\n * Initialized once by the daemon on startup. Query modules import\n * `isTeamSyncEnabled()` and `getTeamMachineId()` to decide whether\n * to enqueue outbox records on write.\n */\n\nimport { SYNC_PROTOCOL_VERSION, DEFAULT_MACHINE_ID } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Module state\n// ---------------------------------------------------------------------------\n\nlet teamSyncEnabled = false;\nlet teamMachineId = DEFAULT_MACHINE_ID;\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Initialize team context. Called once on daemon startup.\n */\nexport function initTeamContext(enabled: boolean, machineId: string): void {\n teamSyncEnabled = enabled;\n teamMachineId = machineId;\n}\n\n/**\n * Whether team sync is currently enabled.\n *\n * Query modules check this before enqueuing outbox records.\n */\nexport function isTeamSyncEnabled(): boolean {\n return teamSyncEnabled;\n}\n\n/**\n * The machine ID for this instance.\n */\nexport function getTeamMachineId(): string {\n return teamMachineId;\n}\n\n/**\n * The sync protocol version in use.\n */\nexport function getTeamSyncProtocolVersion(): number {\n return SYNC_PROTOCOL_VERSION;\n}\n\n/**\n * Reset team context (for testing).\n */\nexport function resetTeamContext(): void {\n teamSyncEnabled = false;\n teamMachineId = DEFAULT_MACHINE_ID;\n}\n","/**\n * Team outbox CRUD query helpers.\n *\n * The outbox pattern: write paths enqueue records here when team sync is enabled.\n * The sync client flushes pending records in batches to the Cloudflare Worker.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { isTeamSyncEnabled, getTeamMachineId } from '@myco/daemon/team-context.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Max records returned per listPending call. */\nconst BURST_BATCH_SIZE = 200;\n\n/** Age in seconds after which sent records are pruned (24 hours). */\nconst SENT_PRUNE_AGE_SECONDS = 86_400;\n\n/** Max retry attempts before a record is dead-lettered. */\nexport const MAX_OUTBOX_RETRIES = 10;\n\n/** Milliseconds-per-second multiplier for epoch math. */\nconst MS_PER_SECOND = 1000;\n\n/**\n * Tables that are intentionally *local-only* and must never be enqueued for\n * team sync. Attempting to enqueue one of these is a programming error and\n * throws so the bug surfaces at the call site instead of silently syncing\n * private state to the team.\n *\n * Add future local-only tables here (e.g. transient operational caches,\n * per-machine skill lookup indexes) alongside a comment describing why the\n * table is local-only.\n */\nexport const LOCAL_ONLY_OUTBOX_TABLES = new Set<string>([\n // Cortex instructions: per-machine operating guidance generated from local\n // digest substrate. Removed from team sync at schema v19. See\n // migrateV18ToV19 for the corresponding safety-net DELETE.\n 'cortex_instructions',\n]);\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required when enqueuing an outbox record. */\nexport interface OutboxInsert {\n table_name: string;\n row_id: string;\n operation?: string;\n payload: string;\n machine_id: string;\n created_at: number;\n}\n\n/** Row shape returned from outbox queries. */\nexport interface OutboxRow {\n id: number;\n table_name: string;\n row_id: string;\n operation: string;\n payload: string;\n machine_id: string;\n created_at: number;\n sent_at: number | null;\n retry_count: number;\n last_attempt_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst OUTBOX_COLUMNS = [\n 'id',\n 'table_name',\n 'row_id',\n 'operation',\n 'payload',\n 'machine_id',\n 'created_at',\n 'sent_at',\n 'retry_count',\n 'last_attempt_at',\n] as const;\n\nconst SELECT_COLUMNS = OUTBOX_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed OutboxRow. */\nfunction toOutboxRow(row: Record<string, unknown>): OutboxRow {\n return {\n id: row.id as number,\n table_name: row.table_name as string,\n row_id: row.row_id as string,\n operation: row.operation as string,\n payload: row.payload as string,\n machine_id: row.machine_id as string,\n created_at: row.created_at as number,\n sent_at: (row.sent_at as number) ?? null,\n retry_count: (row.retry_count as number) ?? 0,\n last_attempt_at: (row.last_attempt_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Convenience helper — used by query modules\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a row for team sync if sync is enabled.\n *\n * Centralizes the if-enabled / enqueue / serialize pattern that every\n * write-path query module previously duplicated inline.\n */\nexport function syncRow(tableName: string, row: { id: string | number; created_at?: number }): void {\n if (!isTeamSyncEnabled()) return;\n enqueueOutbox({\n table_name: tableName,\n row_id: String(row.id),\n payload: JSON.stringify(row),\n machine_id: getTeamMachineId(),\n created_at: row.created_at ?? Math.floor(Date.now() / 1000),\n });\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a record into the team outbox for later sync.\n *\n * Inserted with `sent_at = NULL` (pending). Rejects attempts to enqueue\n * tables listed in `LOCAL_ONLY_OUTBOX_TABLES` so private per-machine data\n * can never leak into team sync via a stray call site. Finding #58.\n */\nexport function enqueueOutbox(data: OutboxInsert): OutboxRow {\n if (LOCAL_ONLY_OUTBOX_TABLES.has(data.table_name)) {\n throw new Error(\n `enqueueOutbox: table '${data.table_name}' is local-only and must not be synced`,\n );\n }\n\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO team_outbox (\n table_name, row_id, operation, payload, machine_id, created_at\n ) VALUES (?, ?, ?, ?, ?, ?)`,\n ).run(\n data.table_name,\n data.row_id,\n data.operation ?? 'upsert',\n data.payload,\n data.machine_id,\n data.created_at,\n );\n\n const id = Number(info.lastInsertRowid);\n\n return toOutboxRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM team_outbox WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * List pending outbox records (oldest-first).\n *\n * Uses burst sizing: fetches BURST_BATCH_SIZE rows and returns them all.\n * If fewer than BURST_THRESHOLD rows come back, callers get a normal-size\n * batch; if more, the full burst. This avoids a separate COUNT query.\n */\nexport function listPending(limit?: number): OutboxRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM team_outbox\n WHERE sent_at IS NULL AND retry_count < ?\n ORDER BY created_at ASC\n LIMIT ?`,\n ).all(MAX_OUTBOX_RETRIES, limit ?? BURST_BATCH_SIZE) as Record<string, unknown>[];\n\n return rows.map(toOutboxRow);\n}\n\n/**\n * Mark outbox records as sent by setting sent_at.\n */\nexport function markSent(ids: number[], sentAt: number): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = ?\n WHERE id IN (${placeholders})`,\n ).run(sentAt, ...ids);\n}\n\n/**\n * Reset sent_at to NULL for records that need to be retried.\n *\n * This allows the sync client to re-enqueue specific records for retry.\n */\nexport function markForRetry(ids: number[]): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = NULL\n WHERE id IN (${placeholders})`,\n ).run(...ids);\n}\n\n/**\n * Increment retry_count and set last_attempt_at for failed outbox records.\n *\n * @returns IDs of records that have now reached MAX_OUTBOX_RETRIES (newly dead-lettered).\n */\nexport function incrementRetryCount(ids: number[], attemptAt: number): number[] {\n if (ids.length === 0) return [];\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET retry_count = retry_count + 1, last_attempt_at = ?\n WHERE id IN (${placeholders})`,\n ).run(attemptAt, ...ids);\n\n // Return IDs that just hit the threshold\n const deadLettered = db.prepare(\n `SELECT id FROM team_outbox\n WHERE id IN (${placeholders}) AND retry_count >= ?`,\n ).all(...ids, MAX_OUTBOX_RETRIES) as Array<{ id: number }>;\n\n return deadLettered.map((r) => r.id);\n}\n\n/**\n * Reset all dead-lettered records back to pending for retry.\n *\n * @returns the number of records reset.\n */\nexport function retryDeadLettered(): number {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE team_outbox\n SET retry_count = 0, last_attempt_at = NULL\n WHERE sent_at IS NULL AND retry_count >= ?`,\n ).run(MAX_OUTBOX_RETRIES);\n\n return info.changes;\n}\n\n/**\n * Count dead-lettered outbox records (exceeded max retries, never sent).\n */\nexport function countDeadLettered(): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count >= ?`,\n ).get(MAX_OUTBOX_RETRIES) as { count: number };\n\n return row.count;\n}\n\n/**\n * Prune old outbox records.\n *\n * Removes sent records older than 24 hours.\n *\n * @returns the number of records deleted.\n */\nexport function pruneOld(): number {\n const db = getDatabase();\n const cutoff = Math.floor(Date.now() / MS_PER_SECOND) - SENT_PRUNE_AGE_SECONDS;\n\n const info = db.prepare(\n `DELETE FROM team_outbox\n WHERE sent_at IS NOT NULL AND sent_at < ?`,\n ).run(cutoff);\n\n return info.changes;\n}\n\n/**\n * Count pending (unsent) outbox records.\n */\nexport function countPending(): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count < ?`,\n ).get(MAX_OUTBOX_RETRIES) as { count: number };\n\n return row.count;\n}\n\n// ---------------------------------------------------------------------------\n// Source-row sync bookkeeping\n// ---------------------------------------------------------------------------\n\n/** Tables eligible for backfill/sync (must have id, machine_id, synced_at columns). */\nconst BACKFILL_TABLES = [\n 'sessions',\n 'prompt_batches',\n 'spores',\n 'entities',\n 'graph_edges',\n 'resolution_events',\n 'plans',\n 'artifacts',\n 'digest_extracts',\n 'skill_candidates',\n 'skill_records',\n] as const;\n// entity_mentions excluded — no `id` column (composite key entity_id+note_id+note_type)\n// skill_usage excluded — no `synced_at` column (syncs via syncRow on insert)\n\nconst BACKFILL_TABLE_SET = new Set<string>(BACKFILL_TABLES);\n\n/**\n * Mark source rows as synced after successful outbox flush.\n *\n * Groups outbox records by table_name, then sets `synced_at` on the\n * corresponding source rows. This closes the re-enqueue loop: once\n * synced_at is non-NULL, `backfillUnsynced` skips the row even after\n * the outbox entry is pruned.\n */\nexport function markSourceRowsSynced(records: OutboxRow[], syncedAt: number): void {\n const db = getDatabase();\n\n // Group row_ids by table\n const byTable = new Map<string, string[]>();\n for (const rec of records) {\n if (!BACKFILL_TABLE_SET.has(rec.table_name)) continue;\n const ids = byTable.get(rec.table_name) ?? [];\n ids.push(rec.row_id);\n byTable.set(rec.table_name, ids);\n }\n\n for (const [table, ids] of byTable) {\n const placeholders = ids.map(() => '?').join(', ');\n db.prepare(\n `UPDATE ${table} SET synced_at = ? WHERE id IN (${placeholders}) AND synced_at IS NULL`,\n ).run(syncedAt, ...ids);\n }\n}\n\n// ---------------------------------------------------------------------------\n// Backfill\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue all unsynced records across all synced tables into the outbox.\n *\n * Scans each table for rows where `synced_at IS NULL`, serializes the full\n * row as JSON, and inserts into the outbox. Idempotent — re-running only\n * picks up rows not yet in the outbox (checked via existing outbox entries).\n *\n * @returns the total number of records enqueued.\n */\nexport function backfillUnsynced(machineId: string): number {\n const db = getDatabase();\n let total = 0;\n\n const now = Math.floor(Date.now() / MS_PER_SECOND);\n\n // Process one table at a time in separate transactions to avoid long locks\n for (const table of BACKFILL_TABLES) {\n const rows = db.prepare(\n `SELECT * FROM ${table}\n WHERE synced_at IS NULL\n AND NOT EXISTS (\n SELECT 1 FROM team_outbox\n WHERE team_outbox.table_name = ? AND team_outbox.row_id = CAST(${table}.id AS TEXT)\n )`,\n ).all(table) as Record<string, unknown>[];\n\n if (rows.length === 0) continue;\n\n const insertBatch = db.transaction((batchRows: Record<string, unknown>[]) => {\n const stmt = db.prepare(\n `INSERT INTO team_outbox (table_name, row_id, operation, payload, machine_id, created_at)\n VALUES (?, ?, 'upsert', ?, ?, ?)`,\n );\n for (const row of batchRows) {\n stmt.run(table, String(row.id), JSON.stringify(row), machineId, now);\n }\n });\n\n insertBatch(rows);\n total += rows.length;\n }\n\n return total;\n}\n\n","/**\n * Session CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { getTeamMachineId } from '@myco/daemon/team-context.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of sessions returned by listSessions when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Session status value when a session is closed normally. */\nconst STATUS_COMPLETED = 'completed';\n\n/** Default session status for new sessions. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default prompt count for new sessions. */\nconst DEFAULT_PROMPT_COUNT = 0;\n\n/** Default tool count for new sessions. */\nconst DEFAULT_TOOL_COUNT = 0;\n\n/** Default processed flag for new sessions. */\nconst DEFAULT_PROCESSED = 0;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting/upserting a session. */\nexport interface SessionInsert {\n id: string;\n agent: string;\n started_at: number;\n created_at: number;\n user?: string | null;\n project_root?: string | null;\n branch?: string | null;\n ended_at?: number | null;\n status?: string;\n prompt_count?: number;\n tool_count?: number;\n title?: string | null;\n summary?: string | null;\n transcript_path?: string | null;\n parent_session_id?: string | null;\n parent_session_reason?: string | null;\n processed?: number;\n content_hash?: string | null;\n machine_id?: string;\n}\n\n/** Row shape returned from session queries (all columns). */\nexport interface SessionRow {\n id: string;\n agent: string;\n user: string | null;\n project_root: string | null;\n branch: string | null;\n started_at: number;\n ended_at: number | null;\n status: string;\n prompt_count: number;\n tool_count: number;\n title: string | null;\n summary: string | null;\n transcript_path: string | null;\n parent_session_id: string | null;\n parent_session_reason: string | null;\n processed: number;\n content_hash: string | null;\n embedded: number;\n created_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Updatable fields for `updateSession`. */\nexport interface SessionUpdate {\n agent?: string;\n user?: string | null;\n project_root?: string | null;\n branch?: string | null;\n ended_at?: number | null;\n status?: string;\n prompt_count?: number;\n tool_count?: number;\n title?: string | null;\n summary?: string | null;\n transcript_path?: string | null;\n parent_session_id?: string | null;\n parent_session_reason?: string | null;\n processed?: number;\n content_hash?: string | null;\n}\n\n/** Filter options for `listSessions`. */\nexport interface ListSessionsOptions {\n limit?: number;\n offset?: number;\n status?: string;\n agent?: string;\n search?: string;\n /** Filter to sessions that ran on this git branch. */\n branch?: string;\n /** Filter to sessions authored by this user. */\n user?: string;\n /** Filter to this exact session id (used for plan→session resolution). */\n id?: string;\n /** Only return sessions created after this epoch-seconds timestamp. */\n since?: number;\n /**\n * When explicitly `false` and no `status` filter is set, exclude sessions\n * still in `status = 'active'` — intelligence-task reads opt in to this.\n * Defaults permissive so UI listings keep showing in-flight sessions.\n */\n includeActive?: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst SESSION_COLUMNS = [\n 'id',\n 'agent',\n '\"user\"',\n 'project_root',\n 'branch',\n 'started_at',\n 'ended_at',\n 'status',\n 'prompt_count',\n 'tool_count',\n 'title',\n 'summary',\n 'transcript_path',\n 'parent_session_id',\n 'parent_session_reason',\n 'processed',\n 'content_hash',\n 'embedded',\n 'created_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = SESSION_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Normalize a SQLite result row into a typed SessionRow.\n *\n * The quoted \"user\" column comes back as `user` in the result object.\n */\nfunction toSessionRow(row: Record<string, unknown>): SessionRow {\n return {\n id: row.id as string,\n agent: row.agent as string,\n user: (row.user as string) ?? null,\n project_root: (row.project_root as string) ?? null,\n branch: (row.branch as string) ?? null,\n started_at: row.started_at as number,\n ended_at: (row.ended_at as number) ?? null,\n status: row.status as string,\n prompt_count: row.prompt_count as number,\n tool_count: row.tool_count as number,\n title: (row.title as string) ?? null,\n summary: (row.summary as string) ?? null,\n transcript_path: (row.transcript_path as string) ?? null,\n parent_session_id: (row.parent_session_id as string) ?? null,\n parent_session_reason: (row.parent_session_reason as string) ?? null,\n processed: row.processed as number,\n content_hash: (row.content_hash as string) ?? null,\n embedded: (row.embedded as number) ?? 0,\n created_at: row.created_at as number,\n machine_id: (row.machine_id as string) ?? 'local',\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a session or update it if the id already exists.\n *\n * On conflict the row is updated with the values from `data`, preserving\n * any columns not supplied via COALESCE with EXCLUDED values.\n */\nexport function upsertSession(data: SessionInsert): SessionRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO sessions (\n id, agent, \"user\", project_root, branch,\n started_at, ended_at, status, prompt_count, tool_count,\n title, summary, transcript_path,\n parent_session_id, parent_session_reason,\n processed, content_hash, created_at, machine_id\n ) VALUES (\n ?, ?, ?, ?, ?,\n ?, ?, ?, ?, ?,\n ?, ?, ?,\n ?, ?,\n ?, ?, ?, ?\n )\n ON CONFLICT (id) DO UPDATE SET\n agent = EXCLUDED.agent,\n \"user\" = EXCLUDED.\"user\",\n project_root = EXCLUDED.project_root,\n branch = EXCLUDED.branch,\n started_at = EXCLUDED.started_at,\n ended_at = COALESCE(EXCLUDED.ended_at, sessions.ended_at),\n status = COALESCE(EXCLUDED.status, sessions.status),\n prompt_count = CASE WHEN ? THEN EXCLUDED.prompt_count ELSE sessions.prompt_count END,\n tool_count = CASE WHEN ? THEN EXCLUDED.tool_count ELSE sessions.tool_count END,\n title = COALESCE(EXCLUDED.title, sessions.title),\n summary = COALESCE(EXCLUDED.summary, sessions.summary),\n transcript_path = COALESCE(EXCLUDED.transcript_path, sessions.transcript_path),\n parent_session_id = EXCLUDED.parent_session_id,\n parent_session_reason = EXCLUDED.parent_session_reason,\n processed = COALESCE(EXCLUDED.processed, sessions.processed),\n content_hash = EXCLUDED.content_hash`,\n ).run(\n data.id,\n data.agent,\n data.user ?? null,\n data.project_root ?? null,\n data.branch ?? null,\n data.started_at,\n data.ended_at ?? null,\n data.status ?? DEFAULT_STATUS,\n data.prompt_count ?? DEFAULT_PROMPT_COUNT,\n data.tool_count ?? DEFAULT_TOOL_COUNT,\n data.title ?? null,\n data.summary ?? null,\n data.transcript_path ?? null,\n data.parent_session_id ?? null,\n data.parent_session_reason ?? null,\n data.processed ?? DEFAULT_PROCESSED,\n data.content_hash ?? null,\n data.created_at,\n data.machine_id ?? getTeamMachineId(),\n data.prompt_count !== undefined ? 1 : 0,\n data.tool_count !== undefined ? 1 : 0,\n );\n\n const row = toSessionRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n syncRow('sessions', row);\n\n return row;\n}\n\n/**\n * Retrieve a single session by id.\n *\n * @returns the session row, or null if not found.\n */\nexport function getSession(id: string): SessionRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toSessionRow(row);\n}\n\n/** Build WHERE clause and bound params from session filter options. */\nfunction buildSessionsWhere(\n options: Omit<ListSessionsOptions, 'limit' | 'offset'>,\n): { where: string; params: unknown[] } {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n\n if (options.agent !== undefined) {\n conditions.push(`agent = ?`);\n params.push(options.agent);\n }\n\n if (options.branch !== undefined) {\n conditions.push(`branch = ?`);\n params.push(options.branch);\n }\n\n if (options.user !== undefined) {\n conditions.push(`\"user\" = ?`);\n params.push(options.user);\n }\n\n if (options.id !== undefined) {\n conditions.push(`id = ?`);\n params.push(options.id);\n }\n\n if (options.search !== undefined && options.search.length > 0) {\n conditions.push(`(title LIKE ? OR id LIKE ?)`);\n const pattern = `%${options.search}%`;\n params.push(pattern, pattern);\n }\n if (options.since !== undefined) {\n conditions.push('created_at > ?');\n params.push(options.since);\n }\n\n // Exclude active sessions only when the caller explicitly opts in and\n // hasn't already constrained `status`. Intelligence-task reads set this\n // to avoid picking up in-flight work; UI/CLI leave it unset.\n if (options.includeActive === false && options.status === undefined) {\n conditions.push(`status != 'active'`);\n }\n\n return {\n where: conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '',\n params,\n };\n}\n\n/**\n * List sessions with optional filters, ordered by created_at DESC.\n */\nexport function listSessions(\n options: ListSessionsOptions = {},\n): SessionRow[] {\n const db = getDatabase();\n const { where, params } = buildSessionsWhere(options);\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n const offset = options.offset ?? 0;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM sessions\n ${where}\n ORDER BY created_at DESC\n LIMIT ?\n OFFSET ?`,\n ).all(...params, limit, offset) as Record<string, unknown>[];\n\n return rows.map(toSessionRow);\n}\n\n/**\n * Count sessions matching optional filters (for pagination totals).\n */\nexport function countSessions(\n options: Omit<ListSessionsOptions, 'limit' | 'offset'> = {},\n): number {\n const db = getDatabase();\n const { where, params } = buildSessionsWhere(options);\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM sessions ${where}`,\n ).get(...params) as { count: number };\n\n return row.count;\n}\n\n/**\n * Return the set of session IDs currently in `status = 'active'`.\n *\n * Used by the semantic-search path, which can't apply a SQL join against\n * session status (the vector store is a separate concern), so it filters\n * results in-memory against this set instead. Bounded by the number of\n * concurrent in-flight sessions — typically small.\n */\nexport function getActiveSessionIds(): Set<string> {\n const db = getDatabase();\n const rows = db.prepare(\n `SELECT id FROM sessions WHERE status = 'active'`,\n ).all() as Array<{ id: string }>;\n return new Set(rows.map((r) => r.id));\n}\n\n/**\n * Flip a session back to `status = 'active'` if it's currently `'completed'`.\n *\n * Called on live user activity (`user_prompt` events) so a session that was\n * auto-completed by the stale sweep or manually completed via the API snaps\n * back to active transparently when the user resumes. No-op for sessions\n * that are already active or don't exist.\n *\n * The `ended_at` column is intentionally preserved — it records the most\n * recent completion time, and the next completion will overwrite it.\n *\n * @returns true if a row was updated (session was completed and is now active)\n */\nexport function reactivateSessionIfCompleted(id: string): boolean {\n const db = getDatabase();\n const info = db.prepare(\n `UPDATE sessions SET status = 'active' WHERE id = ? AND status = 'completed'`,\n ).run(id);\n if (info.changes === 0) return false;\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n if (row) syncRow('sessions', toSessionRow(row));\n\n return true;\n}\n\n/**\n * Update specific fields on an existing session.\n *\n * @returns the updated row, or null if the session does not exist.\n */\nexport function updateSession(\n id: string,\n updates: SessionUpdate,\n): SessionRow | null {\n const db = getDatabase();\n\n const setClauses: string[] = [];\n const params: unknown[] = [];\n\n const fieldMap: Record<string, string> = {\n agent: 'agent',\n user: '\"user\"',\n project_root: 'project_root',\n branch: 'branch',\n ended_at: 'ended_at',\n status: 'status',\n prompt_count: 'prompt_count',\n tool_count: 'tool_count',\n title: 'title',\n summary: 'summary',\n transcript_path: 'transcript_path',\n parent_session_id: 'parent_session_id',\n parent_session_reason: 'parent_session_reason',\n processed: 'processed',\n content_hash: 'content_hash',\n };\n\n for (const [key, column] of Object.entries(fieldMap)) {\n if (key in updates) {\n setClauses.push(`${column} = ?`);\n params.push((updates as Record<string, unknown>)[key] ?? null);\n }\n }\n\n if (setClauses.length === 0) return getSession(id);\n\n params.push(id);\n\n db.prepare(\n `UPDATE sessions\n SET ${setClauses.join(', ')}\n WHERE id = ?`,\n ).run(...params);\n\n const updated = getSession(id);\n\n if (updated) syncRow('sessions', updated);\n\n return updated;\n}\n\n/**\n * Atomically increment tool_count for a session.\n *\n * Uses SQL `tool_count + 1` to avoid read-modify-write races.\n */\nexport function incrementSessionToolCount(id: string): void {\n const db = getDatabase();\n db.prepare(\n `UPDATE sessions SET tool_count = COALESCE(tool_count, 0) + 1 WHERE id = ?`,\n ).run(id);\n}\n\n/**\n * Close a session — set status to 'completed' and record the end time.\n *\n * @returns the updated row, or null if the session does not exist.\n */\nexport function closeSession(\n id: string,\n endedAt: number,\n): SessionRow | null {\n const db = getDatabase();\n\n db.prepare(\n `UPDATE sessions\n SET status = ?, ended_at = ?\n WHERE id = ?`,\n ).run(STATUS_COMPLETED, endedAt, id);\n\n const closed = getSession(id);\n\n if (closed) syncRow('sessions', closed);\n\n return closed;\n}\n\n/**\n * Delete a session and all its child rows (batches, activities, attachments).\n *\n * No ON DELETE CASCADE in the schema, so we delete children first.\n * Returns true if the session existed and was deleted.\n */\nexport function deleteSession(id: string): boolean {\n const db = getDatabase();\n\n db.prepare(`DELETE FROM activities WHERE session_id = ?`).run(id);\n db.prepare(`DELETE FROM attachments WHERE session_id = ?`).run(id);\n db.prepare(`DELETE FROM prompt_batches WHERE session_id = ?`).run(id);\n const info = db.prepare(`DELETE FROM sessions WHERE id = ?`).run(id);\n\n return info.changes > 0;\n}\n\n// ---------------------------------------------------------------------------\n// Cascade delete + impact query\n// ---------------------------------------------------------------------------\n\n/** Counts of related data that would be affected by a session delete. */\nexport interface SessionImpact {\n promptCount: number;\n sporeCount: number;\n attachmentCount: number;\n graphEdgeCount: number;\n}\n\n/** Result of a cascade delete operation. */\nexport interface DeleteCascadeResult {\n deleted: boolean;\n counts: {\n prompts: number;\n spores: number;\n attachments: number;\n graphEdges: number;\n resolutionEvents: number;\n };\n /** Spore IDs that were deleted (needed for vault file + vector cleanup). */\n deletedSporeIds: string[];\n /** Attachment file paths that were deleted from DB (needed for disk cleanup). */\n deletedAttachmentPaths: string[];\n}\n\n/**\n * Get counts of all data related to a session, for pre-delete impact display.\n */\nexport function getSessionImpact(sessionId: string): SessionImpact {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT\n (SELECT COUNT(*) FROM prompt_batches WHERE session_id = ?) AS promptCount,\n (SELECT COUNT(*) FROM spores WHERE session_id = ?) AS sporeCount,\n (SELECT COUNT(*) FROM attachments WHERE session_id = ?) AS attachmentCount,\n (SELECT COUNT(*) FROM graph_edges WHERE session_id = ?) AS graphEdgeCount`,\n ).get(sessionId, sessionId, sessionId, sessionId) as SessionImpact;\n\n return row;\n}\n\n/**\n * Delete a session and ALL related data in a single transaction.\n *\n * Returns counts of deleted rows and IDs needed for post-transaction\n * cleanup (vault files, embedding vectors).\n */\nexport function deleteSessionCascade(sessionId: string): DeleteCascadeResult {\n const db = getDatabase();\n\n const zeroCounts: DeleteCascadeResult = {\n deleted: false,\n counts: { prompts: 0, spores: 0, attachments: 0, graphEdges: 0, resolutionEvents: 0 },\n deletedSporeIds: [],\n deletedAttachmentPaths: [],\n };\n\n // Check session exists first\n const exists = db.prepare(`SELECT id FROM sessions WHERE id = ?`).get(sessionId);\n if (!exists) return zeroCounts;\n\n // Collect IDs/paths needed for post-transaction cleanup before deleting.\n // Spores can reference prompt_batches from a different session (cross-session\n // spore linkage), so we must also collect spores linked via prompt_batch_id.\n const sporeIds = (db.prepare(\n `SELECT id FROM spores\n WHERE session_id = ?\n OR prompt_batch_id IN (SELECT id FROM prompt_batches WHERE session_id = ?)`,\n ).all(sessionId, sessionId) as { id: string }[]).map((r) => r.id);\n\n const attachmentPaths = (db.prepare(\n `SELECT file_path FROM attachments WHERE session_id = ?`,\n ).all(sessionId) as { file_path: string }[]).map((r) => r.file_path);\n\n // Run all deletes in a single transaction.\n //\n // Order matters — foreign_keys = ON is set in client.ts, so every DELETE\n // is checked immediately. Child rows must be removed before their parents:\n // - spores.prompt_batch_id → prompt_batches(id) [spores BEFORE prompt_batches]\n // - plans.prompt_batch_id → prompt_batches(id) [plans BEFORE prompt_batches]\n // - resolution_events.spore_id → spores(id) [resolution_events BEFORE spores]\n // - skill_usage.session_id → sessions(id) NOT NULL\n // - plans.session_id → sessions(id)\n // resolution_events can reference spores across sessions (e.g. a later\n // session supersedes an earlier session's spore), so we match by either\n // session_id OR spore_id-in-this-session to catch cross-session references.\n //\n // Spores can also reference prompt_batches from a different session\n // (cross-session prompt_batch_id linkage). We must delete those spores\n // BEFORE deleting prompt_batches to avoid FK violations.\n const result = db.transaction(() => {\n db.prepare(`DELETE FROM activities WHERE session_id = ?`).run(sessionId);\n const attachments = db.prepare(`DELETE FROM attachments WHERE session_id = ?`).run(sessionId);\n db.prepare(`DELETE FROM plans WHERE session_id = ?`).run(sessionId);\n db.prepare(`DELETE FROM skill_usage WHERE session_id = ?`).run(sessionId);\n const resEvents = db.prepare(\n `DELETE FROM resolution_events\n WHERE session_id = ?\n OR spore_id IN (\n SELECT id FROM spores\n WHERE session_id = ?\n OR prompt_batch_id IN (SELECT id FROM prompt_batches WHERE session_id = ?)\n )`,\n ).run(sessionId, sessionId, sessionId);\n const edges = db.prepare(`DELETE FROM graph_edges WHERE session_id = ?`).run(sessionId);\n const spores = db.prepare(\n `DELETE FROM spores\n WHERE session_id = ?\n OR prompt_batch_id IN (SELECT id FROM prompt_batches WHERE session_id = ?)`,\n ).run(sessionId, sessionId);\n const prompts = db.prepare(`DELETE FROM prompt_batches WHERE session_id = ?`).run(sessionId);\n const session = db.prepare(`DELETE FROM sessions WHERE id = ?`).run(sessionId);\n\n return {\n deleted: session.changes > 0,\n counts: {\n prompts: prompts.changes,\n spores: spores.changes,\n attachments: attachments.changes,\n graphEdges: edges.changes,\n resolutionEvents: resEvents.changes,\n },\n };\n })();\n\n return {\n ...result,\n deletedSporeIds: sporeIds,\n deletedAttachmentPaths: attachmentPaths,\n };\n}\n"],"mappings":";;;;;;;;;AAcA,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AASb,SAAS,gBAAgB,SAAkB,WAAyB;AACzE,oBAAkB;AAClB,kBAAgB;AAClB;AAOO,SAAS,oBAA6B;AAC3C,SAAO;AACT;AAKO,SAAS,mBAA2B;AACzC,SAAO;AACT;;;ACzBA,IAAM,mBAAmB;AAGzB,IAAM,yBAAyB;AAGxB,IAAM,qBAAqB;AAGlC,IAAM,gBAAgB;AAYf,IAAM,2BAA2B,oBAAI,IAAY;AAAA;AAAA;AAAA;AAAA,EAItD;AACF,CAAC;AAkCD,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,eAAe,KAAK,IAAI;AAO/C,SAAS,YAAY,KAAyC;AAC5D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,YAAY,IAAI;AAAA,IAChB,QAAQ,IAAI;AAAA,IACZ,WAAW,IAAI;AAAA,IACf,SAAS,IAAI;AAAA,IACb,YAAY,IAAI;AAAA,IAChB,YAAY,IAAI;AAAA,IAChB,SAAU,IAAI,WAAsB;AAAA,IACpC,aAAc,IAAI,eAA0B;AAAA,IAC5C,iBAAkB,IAAI,mBAA8B;AAAA,EACtD;AACF;AAYO,SAAS,QAAQ,WAAmB,KAAyD;AAClG,MAAI,CAAC,kBAAkB,EAAG;AAC1B,gBAAc;AAAA,IACZ,YAAY;AAAA,IACZ,QAAQ,OAAO,IAAI,EAAE;AAAA,IACrB,SAAS,KAAK,UAAU,GAAG;AAAA,IAC3B,YAAY,iBAAiB;AAAA,IAC7B,YAAY,IAAI,cAAc,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAAA,EAC5D,CAAC;AACH;AAaO,SAAS,cAAc,MAA+B;AAC3D,MAAI,yBAAyB,IAAI,KAAK,UAAU,GAAG;AACjD,UAAM,IAAI;AAAA,MACR,yBAAyB,KAAK,UAAU;AAAA,IAC1C;AAAA,EACF;AAEA,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,aAAa;AAAA,IAClB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,EACP;AAEA,QAAM,KAAK,OAAO,KAAK,eAAe;AAEtC,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,EAAE;AAAA,EAC7E;AACF;AASO,SAAS,YAAY,OAA6B;AACvD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA,EAK1B,EAAE,IAAI,oBAAoB,SAAS,gBAAgB;AAEnD,SAAO,KAAK,IAAI,WAAW;AAC7B;AAKO,SAAS,SAAS,KAAe,QAAsB;AAC5D,MAAI,IAAI,WAAW,EAAG;AAEtB,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,KAAG;AAAA,IACD;AAAA;AAAA,oBAEgB,YAAY;AAAA,EAC9B,EAAE,IAAI,QAAQ,GAAG,GAAG;AACtB;AAyBO,SAAS,oBAAoB,KAAe,WAA6B;AAC9E,MAAI,IAAI,WAAW,EAAG,QAAO,CAAC;AAE9B,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,KAAG;AAAA,IACD;AAAA;AAAA,oBAEgB,YAAY;AAAA,EAC9B,EAAE,IAAI,WAAW,GAAG,GAAG;AAGvB,QAAM,eAAe,GAAG;AAAA,IACtB;AAAA,oBACgB,YAAY;AAAA,EAC9B,EAAE,IAAI,GAAG,KAAK,kBAAkB;AAEhC,SAAO,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE;AACrC;AAOO,SAAS,oBAA4B;AAC1C,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,kBAAkB;AAExB,SAAO,KAAK;AACd;AAKO,SAAS,oBAA4B;AAC1C,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,kBAAkB;AAExB,SAAO,IAAI;AACb;AASO,SAAS,WAAmB;AACjC,QAAM,KAAK,YAAY;AACvB,QAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa,IAAI;AAExD,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,EAEF,EAAE,IAAI,MAAM;AAEZ,SAAO,KAAK;AACd;AAKO,SAAS,eAAuB;AACrC,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,kBAAkB;AAExB,SAAO,IAAI;AACb;AAOA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAIA,IAAM,qBAAqB,IAAI,IAAY,eAAe;AAUnD,SAAS,qBAAqB,SAAsB,UAAwB;AACjF,QAAM,KAAK,YAAY;AAGvB,QAAM,UAAU,oBAAI,IAAsB;AAC1C,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,mBAAmB,IAAI,IAAI,UAAU,EAAG;AAC7C,UAAM,MAAM,QAAQ,IAAI,IAAI,UAAU,KAAK,CAAC;AAC5C,QAAI,KAAK,IAAI,MAAM;AACnB,YAAQ,IAAI,IAAI,YAAY,GAAG;AAAA,EACjC;AAEA,aAAW,CAAC,OAAO,GAAG,KAAK,SAAS;AAClC,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACjD,OAAG;AAAA,MACD,UAAU,KAAK,mCAAmC,YAAY;AAAA,IAChE,EAAE,IAAI,UAAU,GAAG,GAAG;AAAA,EACxB;AACF;AAeO,SAAS,iBAAiB,WAA2B;AAC1D,QAAM,KAAK,YAAY;AACvB,MAAI,QAAQ;AAEZ,QAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa;AAGjD,aAAW,SAAS,iBAAiB;AACnC,UAAM,OAAO,GAAG;AAAA,MACd,iBAAiB,KAAK;AAAA;AAAA;AAAA;AAAA,0EAI8C,KAAK;AAAA;AAAA,IAE3E,EAAE,IAAI,KAAK;AAEX,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,cAAc,GAAG,YAAY,CAAC,cAAyC;AAC3E,YAAM,OAAO,GAAG;AAAA,QACd;AAAA;AAAA,MAEF;AACA,iBAAW,OAAO,WAAW;AAC3B,aAAK,IAAI,OAAO,OAAO,IAAI,EAAE,GAAG,KAAK,UAAU,GAAG,GAAG,WAAW,GAAG;AAAA,MACrE;AAAA,IACF,CAAC;AAED,gBAAY,IAAI;AAChB,aAAS,KAAK;AAAA,EAChB;AAEA,SAAO;AACT;;;AC/YA,IAAM,qBAAqB;AAG3B,IAAM,mBAAmB;AAGzB,IAAM,iBAAiB;AAGvB,IAAM,uBAAuB;AAG7B,IAAM,qBAAqB;AAG3B,IAAM,oBAAoB;AAoG1B,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMA,kBAAiB,gBAAgB,KAAK,IAAI;AAWhD,SAAS,aAAa,KAA0C;AAC9D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,MAAO,IAAI,QAAmB;AAAA,IAC9B,cAAe,IAAI,gBAA2B;AAAA,IAC9C,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAY,IAAI;AAAA,IAChB,UAAW,IAAI,YAAuB;AAAA,IACtC,QAAQ,IAAI;AAAA,IACZ,cAAc,IAAI;AAAA,IAClB,YAAY,IAAI;AAAA,IAChB,OAAQ,IAAI,SAAoB;AAAA,IAChC,SAAU,IAAI,WAAsB;AAAA,IACpC,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,mBAAoB,IAAI,qBAAgC;AAAA,IACxD,uBAAwB,IAAI,yBAAoC;AAAA,IAChE,WAAW,IAAI;AAAA,IACf,cAAe,IAAI,gBAA2B;AAAA,IAC9C,UAAW,IAAI,YAAuB;AAAA,IACtC,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAYO,SAAS,cAAc,MAAiC;AAC7D,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,QAAQ;AAAA,IACb,KAAK,gBAAgB;AAAA,IACrB,KAAK,UAAU;AAAA,IACf,KAAK;AAAA,IACL,KAAK,YAAY;AAAA,IACjB,KAAK,UAAU;AAAA,IACf,KAAK,gBAAgB;AAAA,IACrB,KAAK,cAAc;AAAA,IACnB,KAAK,SAAS;AAAA,IACd,KAAK,WAAW;AAAA,IAChB,KAAK,mBAAmB;AAAA,IACxB,KAAK,qBAAqB;AAAA,IAC1B,KAAK,yBAAyB;AAAA,IAC9B,KAAK,aAAa;AAAA,IAClB,KAAK,gBAAgB;AAAA,IACrB,KAAK;AAAA,IACL,KAAK,cAAc,iBAAiB;AAAA,IACpC,KAAK,iBAAiB,SAAY,IAAI;AAAA,IACtC,KAAK,eAAe,SAAY,IAAI;AAAA,EACtC;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUA,eAAc,6BAA6B,EAAE,IAAI,KAAK,EAAE;AAAA,EAC/E;AAEA,UAAQ,YAAY,GAAG;AAEvB,SAAO;AACT;AAOO,SAAS,WAAW,IAA+B;AACxD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,aAAa,GAAG;AACzB;AAGA,SAAS,mBACP,SACsC;AACtC,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AAEA,MAAI,QAAQ,UAAU,QAAW;AAC/B,eAAW,KAAK,WAAW;AAC3B,WAAO,KAAK,QAAQ,KAAK;AAAA,EAC3B;AAEA,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AAEA,MAAI,QAAQ,SAAS,QAAW;AAC9B,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,IAAI;AAAA,EAC1B;AAEA,MAAI,QAAQ,OAAO,QAAW;AAC5B,eAAW,KAAK,QAAQ;AACxB,WAAO,KAAK,QAAQ,EAAE;AAAA,EACxB;AAEA,MAAI,QAAQ,WAAW,UAAa,QAAQ,OAAO,SAAS,GAAG;AAC7D,eAAW,KAAK,6BAA6B;AAC7C,UAAM,UAAU,IAAI,QAAQ,MAAM;AAClC,WAAO,KAAK,SAAS,OAAO;AAAA,EAC9B;AACA,MAAI,QAAQ,UAAU,QAAW;AAC/B,eAAW,KAAK,gBAAgB;AAChC,WAAO,KAAK,QAAQ,KAAK;AAAA,EAC3B;AAKA,MAAI,QAAQ,kBAAkB,SAAS,QAAQ,WAAW,QAAW;AACnE,eAAW,KAAK,oBAAoB;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,OAAO,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAAA,IACrE;AAAA,EACF;AACF;AAKO,SAAS,aACd,UAA+B,CAAC,GAClB;AACd,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,mBAAmB,OAAO;AACpD,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,QAAQ,OAAO,MAAM;AAE9B,SAAO,KAAK,IAAI,YAAY;AAC9B;AAKO,SAAS,cACd,UAAyD,CAAC,GAClD;AACR,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,mBAAmB,OAAO;AAEpD,QAAM,MAAM,GAAG;AAAA,IACb,0CAA0C,KAAK;AAAA,EACjD,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,IAAI;AACb;AAUO,SAAS,sBAAmC;AACjD,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI;AACN,SAAO,IAAI,IAAI,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AACtC;AAeO,SAAS,6BAA6B,IAAqB;AAChE,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI,EAAE;AACR,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AACR,MAAI,IAAK,SAAQ,YAAY,aAAa,GAAG,CAAC;AAE9C,SAAO;AACT;AAOO,SAAS,cACd,IACA,SACmB;AACnB,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,QAAM,WAAmC;AAAA,IACvC,OAAO;AAAA,IACP,MAAM;AAAA,IACN,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY;AAAA,IACZ,OAAO;AAAA,IACP,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,uBAAuB;AAAA,IACvB,WAAW;AAAA,IACX,cAAc;AAAA,EAChB;AAEA,aAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACpD,QAAI,OAAO,SAAS;AAClB,iBAAW,KAAK,GAAG,MAAM,MAAM;AAC/B,aAAO,KAAM,QAAoC,GAAG,KAAK,IAAI;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO,WAAW,EAAE;AAEjD,SAAO,KAAK,EAAE;AAEd,KAAG;AAAA,IACD;AAAA,WACO,WAAW,KAAK,IAAI,CAAC;AAAA;AAAA,EAE9B,EAAE,IAAI,GAAG,MAAM;AAEf,QAAM,UAAU,WAAW,EAAE;AAE7B,MAAI,QAAS,SAAQ,YAAY,OAAO;AAExC,SAAO;AACT;AAOO,SAAS,0BAA0B,IAAkB;AAC1D,QAAM,KAAK,YAAY;AACvB,KAAG;AAAA,IACD;AAAA,EACF,EAAE,IAAI,EAAE;AACV;AAOO,SAAS,aACd,IACA,SACmB;AACnB,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,kBAAkB,SAAS,EAAE;AAEnC,QAAM,SAAS,WAAW,EAAE;AAE5B,MAAI,OAAQ,SAAQ,YAAY,MAAM;AAEtC,SAAO;AACT;AAkDO,SAAS,iBAAiB,WAAkC;AACjE,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE,IAAI,WAAW,WAAW,WAAW,SAAS;AAEhD,SAAO;AACT;AAQO,SAAS,qBAAqB,WAAwC;AAC3E,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAkC;AAAA,IACtC,SAAS;AAAA,IACT,QAAQ,EAAE,SAAS,GAAG,QAAQ,GAAG,aAAa,GAAG,YAAY,GAAG,kBAAkB,EAAE;AAAA,IACpF,iBAAiB,CAAC;AAAA,IAClB,wBAAwB,CAAC;AAAA,EAC3B;AAGA,QAAM,SAAS,GAAG,QAAQ,sCAAsC,EAAE,IAAI,SAAS;AAC/E,MAAI,CAAC,OAAQ,QAAO;AAKpB,QAAM,WAAY,GAAG;AAAA,IACnB;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,WAAW,SAAS,EAAuB,IAAI,CAAC,MAAM,EAAE,EAAE;AAEhE,QAAM,kBAAmB,GAAG;AAAA,IAC1B;AAAA,EACF,EAAE,IAAI,SAAS,EAA8B,IAAI,CAAC,MAAM,EAAE,SAAS;AAkBnE,QAAM,SAAS,GAAG,YAAY,MAAM;AAClC,OAAG,QAAQ,6CAA6C,EAAE,IAAI,SAAS;AACvE,UAAM,cAAc,GAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AAC5F,OAAG,QAAQ,wCAAwC,EAAE,IAAI,SAAS;AAClE,OAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AACxE,UAAM,YAAY,GAAG;AAAA,MACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOF,EAAE,IAAI,WAAW,WAAW,SAAS;AACrC,UAAM,QAAQ,GAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AACtF,UAAM,SAAS,GAAG;AAAA,MAChB;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,WAAW,SAAS;AAC1B,UAAM,UAAU,GAAG,QAAQ,iDAAiD,EAAE,IAAI,SAAS;AAC3F,UAAM,UAAU,GAAG,QAAQ,mCAAmC,EAAE,IAAI,SAAS;AAE7E,WAAO;AAAA,MACL,SAAS,QAAQ,UAAU;AAAA,MAC3B,QAAQ;AAAA,QACN,SAAS,QAAQ;AAAA,QACjB,QAAQ,OAAO;AAAA,QACf,aAAa,YAAY;AAAA,QACzB,YAAY,MAAM;AAAA,QAClB,kBAAkB,UAAU;AAAA,MAC9B;AAAA,IACF;AAAA,EACF,CAAC,EAAE;AAEH,SAAO;AAAA,IACL,GAAG;AAAA,IACH,iBAAiB;AAAA,IACjB,wBAAwB;AAAA,EAC1B;AACF;","names":["SELECT_COLUMNS"]}
@@ -2,10 +2,10 @@ import { createRequire as __cr } from 'node:module'; const require = __cr(import
2
2
  import {
3
3
  LmStudioBackend,
4
4
  OllamaBackend
5
- } from "./chunk-FMRZ26U5.js";
5
+ } from "./chunk-X3IGT5RV.js";
6
6
  import {
7
7
  PROVIDER_DETECT_TIMEOUT_MS
8
- } from "./chunk-FLLBJLHM.js";
8
+ } from "./chunk-6C6QZ4PM.js";
9
9
 
10
10
  // src/intelligence/provider-check.ts
11
11
  async function checkLocalProvider(type, baseUrl) {
@@ -19,4 +19,4 @@ async function checkLocalProvider(type, baseUrl) {
19
19
  export {
20
20
  checkLocalProvider
21
21
  };
22
- //# sourceMappingURL=chunk-BPRIYNLE.js.map
22
+ //# sourceMappingURL=chunk-TKAJ3JVF.js.map
@@ -0,0 +1,25 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+
3
+ // src/vault/resolve.ts
4
+ import path from "path";
5
+ import { execFileSync } from "child_process";
6
+ function resolveVaultDir(cwd = process.cwd()) {
7
+ return path.join(resolveRepoRoot(cwd), ".myco");
8
+ }
9
+ function resolveRepoRoot(cwd) {
10
+ try {
11
+ const gitCommon = execFileSync(
12
+ "git",
13
+ ["rev-parse", "--git-common-dir"],
14
+ { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
15
+ ).trim();
16
+ return path.resolve(cwd, gitCommon, "..");
17
+ } catch {
18
+ return cwd;
19
+ }
20
+ }
21
+
22
+ export {
23
+ resolveVaultDir
24
+ };
25
+ //# sourceMappingURL=chunk-TSM6VESW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/vault/resolve.ts"],"sourcesContent":["import path from 'node:path';\nimport { execFileSync } from 'node:child_process';\n\n/**\n * Resolve the vault directory.\n *\n * Always `.myco/` in the project root. The vault is a SQLite database\n * that lives with the project — there is no escape hatch.\n *\n * Uses git to find the repo root so this works correctly in\n * git worktrees — worktree agents resolve to the same vault\n * as the main working tree.\n */\nexport function resolveVaultDir(cwd: string = process.cwd()): string {\n return path.join(resolveRepoRoot(cwd), '.myco');\n}\n\n/**\n * Find the main repo root, even from a git worktree.\n *\n * `git rev-parse --git-common-dir` returns the shared .git directory:\n * - In a normal repo: \".git\" (relative)\n * - In a worktree: \"/abs/path/to/main-repo/.git\" (absolute)\n *\n * The repo root is the parent of that path.\n * Falls back to cwd if not in a git repo.\n */\nfunction resolveRepoRoot(cwd: string): string {\n try {\n const gitCommon = execFileSync(\n 'git', ['rev-parse', '--git-common-dir'],\n { cwd, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] },\n ).trim();\n return path.resolve(cwd, gitCommon, '..');\n } catch {\n return cwd;\n }\n}\n"],"mappings":";;;AAAA,OAAO,UAAU;AACjB,SAAS,oBAAoB;AAYtB,SAAS,gBAAgB,MAAc,QAAQ,IAAI,GAAW;AACnE,SAAO,KAAK,KAAK,gBAAgB,GAAG,GAAG,OAAO;AAChD;AAYA,SAAS,gBAAgB,KAAqB;AAC5C,MAAI;AACF,UAAM,YAAY;AAAA,MAChB;AAAA,MAAO,CAAC,aAAa,kBAAkB;AAAA,MACvC,EAAE,KAAK,UAAU,SAAS,OAAO,CAAC,QAAQ,QAAQ,MAAM,EAAE;AAAA,IAC5D,EAAE,KAAK;AACP,WAAO,KAAK,QAAQ,KAAK,WAAW,IAAI;AAAA,EAC1C,QAAQ;AACN,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -1,16 +1,18 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
2
  import {
3
- AgentTaskSchema,
4
3
  loadAgentTasks,
5
4
  taskFromParsed
6
- } from "./chunk-CCRGY3QW.js";
5
+ } from "./chunk-AUIXX33A.js";
6
+ import {
7
+ AgentTaskSchema
8
+ } from "./chunk-OUJSQSKE.js";
7
9
  import {
8
10
  BUILT_IN_SOURCE,
9
11
  MAX_TASK_NAME_LENGTH,
10
12
  TASK_NAME_PATTERN,
11
13
  USER_TASKS_DIR,
12
14
  USER_TASK_SOURCE
13
- } from "./chunk-FLLBJLHM.js";
15
+ } from "./chunk-6C6QZ4PM.js";
14
16
  import {
15
17
  require_dist
16
18
  } from "./chunk-6LQIMRTC.js";
@@ -100,4 +102,4 @@ export {
100
102
  deleteUserTask,
101
103
  copyTaskToUser
102
104
  };
103
- //# sourceMappingURL=chunk-6X2ERTQV.js.map
105
+ //# sourceMappingURL=chunk-USVFEWYL.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/agent/registry.ts"],"sourcesContent":["/**\n * User task registry.\n *\n * Loads built-in tasks from the definitions directory and user-created tasks\n * from the vault's tasks/ subdirectory. User tasks with the same name as a\n * built-in task override the built-in.\n *\n * No module-level cache — always reads from disk.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { parse as parseYaml, stringify as stringifyYaml } from 'yaml';\nimport { USER_TASKS_DIR, USER_TASK_SOURCE, BUILT_IN_SOURCE, TASK_NAME_PATTERN, MAX_TASK_NAME_LENGTH } from '@myco/constants.js';\nimport { loadAgentTasks, taskFromParsed } from './loader.js';\nimport { AgentTaskSchema } from './schemas.js';\nimport type { AgentTask } from './types.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n// BUILT_IN_SOURCE imported from @myco/constants.js\n\n/** Suffix appended to the task name when copying a built-in task for the user. */\nconst COPY_SUFFIX = '-custom';\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Load all tasks: built-in definitions merged with user-created overrides.\n *\n * Built-in tasks are loaded from `definitionsDir/tasks/*.yaml`.\n * User tasks are loaded from `vaultDir/tasks/*.yaml`.\n * A user task with the same name as a built-in task replaces it entirely.\n *\n * Always reads from disk — no caching.\n *\n * @param definitionsDir — path to `src/agent/definitions/` (or dist equivalent).\n * @param vaultDir — optional vault directory; user tasks skipped if not provided.\n * @returns map from task name → AgentTask.\n */\nexport function loadAllTasks(definitionsDir: string, vaultDir?: string): Map<string, AgentTask> {\n const result = new Map<string, AgentTask>();\n\n // Load built-in tasks first\n const builtIn = loadAgentTasks(definitionsDir);\n for (const task of builtIn) {\n result.set(task.name, { ...task, isBuiltin: true, source: BUILT_IN_SOURCE });\n }\n\n // Overlay user tasks (override built-in if same name)\n if (vaultDir) {\n const userTasksDir = path.join(vaultDir, USER_TASKS_DIR);\n if (fs.existsSync(userTasksDir)) {\n const files = fs.readdirSync(userTasksDir).filter((f) => f.endsWith('.yaml'));\n for (const file of files) {\n const filePath = path.join(userTasksDir, file);\n try {\n const raw = fs.readFileSync(filePath, 'utf-8');\n const parsed = AgentTaskSchema.parse(parseYaml(raw));\n const task: AgentTask = {\n ...taskFromParsed(parsed),\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n result.set(task.name, task);\n } catch (err) {\n console.warn(`[registry] Skipping malformed user task file: ${filePath}`, err);\n }\n }\n }\n }\n\n return result;\n}\n\n/**\n * Validate a task name against the allowed pattern and length limit.\n *\n * Valid names: lowercase letters, digits, and hyphens. Must start and end\n * with a letter or digit. Single character names (a–z, 0–9) are allowed.\n *\n * @param name — candidate task name.\n * @returns true if valid.\n */\nexport function validateTaskName(name: string): boolean {\n if (name.length > MAX_TASK_NAME_LENGTH) return false;\n return TASK_NAME_PATTERN.test(name);\n}\n\n/**\n * Serialize an AgentTask to YAML and write it to `vaultDir/tasks/<name>.yaml`.\n *\n * Validates the task through AgentTaskSchema before writing.\n * Creates the tasks directory if it does not exist (idempotent).\n * Strips the internal `source` and `isBuiltin` fields from the serialized output.\n *\n * @param vaultDir — path to the vault root directory.\n * @param task — task to write.\n * @returns absolute path to the written file.\n * @throws if schema validation fails.\n */\nexport function writeUserTask(vaultDir: string, task: AgentTask): string {\n // Validate before touching the filesystem\n AgentTaskSchema.parse(task);\n\n const tasksDir = path.join(vaultDir, USER_TASKS_DIR);\n fs.mkdirSync(tasksDir, { recursive: true });\n\n // Strip internal-only fields before serializing\n const { isBuiltin: _isBuiltin, source: _source, ...serializable } = task;\n const yaml = stringifyYaml(serializable);\n\n const filePath = path.join(tasksDir, `${task.name}.yaml`);\n fs.writeFileSync(filePath, yaml, 'utf-8');\n return filePath;\n}\n\n/**\n * Delete a user task YAML file from `vaultDir/tasks/<taskName>.yaml`.\n *\n * @param vaultDir — path to the vault root directory.\n * @param taskName — name of the task to delete.\n * @returns true if the file existed and was removed, false if it did not exist.\n */\nexport function deleteUserTask(vaultDir: string, taskName: string): boolean {\n const filePath = path.join(vaultDir, USER_TASKS_DIR, `${taskName}.yaml`);\n if (!fs.existsSync(filePath)) return false;\n fs.rmSync(filePath);\n return true;\n}\n\n/**\n * Create a user copy of an existing task.\n *\n * Loads all tasks (built-in + user), locates `sourceName`, then writes a new\n * user task with the given name (or `sourceName + COPY_SUFFIX` if omitted),\n * `isDefault: false`, `isBuiltin: false`, and `source: 'user'`.\n *\n * @param definitionsDir — path to built-in definitions directory.\n * @param vaultDir — path to vault root directory.\n * @param sourceName — name of the task to copy.\n * @param newName — optional name for the copy; defaults to `sourceName + '-custom'`.\n * @returns the newly written AgentTask.\n * @throws if the source task is not found.\n * @throws if the new name is invalid.\n */\nexport function copyTaskToUser(\n definitionsDir: string,\n vaultDir: string,\n sourceName: string,\n newName?: string,\n): AgentTask {\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n\n const source = allTasks.get(sourceName);\n if (!source) {\n throw new Error(`Task not found: ${sourceName}`);\n }\n\n const targetName = newName ?? `${sourceName}${COPY_SUFFIX}`;\n if (!validateTaskName(targetName)) {\n throw new Error(`Invalid task name: ${targetName}`);\n }\n\n const copy: AgentTask = {\n ...source,\n name: targetName,\n isDefault: false,\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n\n writeUserTask(vaultDir, copy);\n return copy;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAYA,kBAA+D;AAF/D,OAAO,QAAQ;AACf,OAAO,UAAU;AAcjB,IAAM,cAAc;AAmBb,SAAS,aAAa,gBAAwB,UAA2C;AAC9F,QAAM,SAAS,oBAAI,IAAuB;AAG1C,QAAM,UAAU,eAAe,cAAc;AAC7C,aAAW,QAAQ,SAAS;AAC1B,WAAO,IAAI,KAAK,MAAM,EAAE,GAAG,MAAM,WAAW,MAAM,QAAQ,gBAAgB,CAAC;AAAA,EAC7E;AAGA,MAAI,UAAU;AACZ,UAAM,eAAe,KAAK,KAAK,UAAU,cAAc;AACvD,QAAI,GAAG,WAAW,YAAY,GAAG;AAC/B,YAAM,QAAQ,GAAG,YAAY,YAAY,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAC5E,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,KAAK,KAAK,cAAc,IAAI;AAC7C,YAAI;AACF,gBAAM,MAAM,GAAG,aAAa,UAAU,OAAO;AAC7C,gBAAM,SAAS,gBAAgB,UAAM,YAAAA,OAAU,GAAG,CAAC;AACnD,gBAAM,OAAkB;AAAA,YACtB,GAAG,eAAe,MAAM;AAAA,YACxB,WAAW;AAAA,YACX,QAAQ;AAAA,UACV;AACA,iBAAO,IAAI,KAAK,MAAM,IAAI;AAAA,QAC5B,SAAS,KAAK;AACZ,kBAAQ,KAAK,iDAAiD,QAAQ,IAAI,GAAG;AAAA,QAC/E;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,iBAAiB,MAAuB;AACtD,MAAI,KAAK,SAAS,qBAAsB,QAAO;AAC/C,SAAO,kBAAkB,KAAK,IAAI;AACpC;AAcO,SAAS,cAAc,UAAkB,MAAyB;AAEvE,kBAAgB,MAAM,IAAI;AAE1B,QAAM,WAAW,KAAK,KAAK,UAAU,cAAc;AACnD,KAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAG1C,QAAM,EAAE,WAAW,YAAY,QAAQ,SAAS,GAAG,aAAa,IAAI;AACpE,QAAM,WAAO,YAAAC,WAAc,YAAY;AAEvC,QAAM,WAAW,KAAK,KAAK,UAAU,GAAG,KAAK,IAAI,OAAO;AACxD,KAAG,cAAc,UAAU,MAAM,OAAO;AACxC,SAAO;AACT;AASO,SAAS,eAAe,UAAkB,UAA2B;AAC1E,QAAM,WAAW,KAAK,KAAK,UAAU,gBAAgB,GAAG,QAAQ,OAAO;AACvE,MAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,QAAO;AACrC,KAAG,OAAO,QAAQ;AAClB,SAAO;AACT;AAiBO,SAAS,eACd,gBACA,UACA,YACA,SACW;AACX,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AAEtD,QAAM,SAAS,SAAS,IAAI,UAAU;AACtC,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,mBAAmB,UAAU,EAAE;AAAA,EACjD;AAEA,QAAM,aAAa,WAAW,GAAG,UAAU,GAAG,WAAW;AACzD,MAAI,CAAC,iBAAiB,UAAU,GAAG;AACjC,UAAM,IAAI,MAAM,sBAAsB,UAAU,EAAE;AAAA,EACpD;AAEA,QAAM,OAAkB;AAAA,IACtB,GAAG;AAAA,IACH,MAAM;AAAA,IACN,WAAW;AAAA,IACX,WAAW;AAAA,IACX,QAAQ;AAAA,EACV;AAEA,gBAAc,UAAU,IAAI;AAC5B,SAAO;AACT;","names":["parseYaml","stringifyYaml"]}
1
+ {"version":3,"sources":["../src/agent/registry.ts"],"sourcesContent":["/**\n * User task registry.\n *\n * Loads built-in tasks from the definitions directory and user-created tasks\n * from the vault's tasks/ subdirectory. User tasks with the same name as a\n * built-in task override the built-in.\n *\n * No module-level cache — always reads from disk.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { parse as parseYaml, stringify as stringifyYaml } from 'yaml';\nimport { USER_TASKS_DIR, USER_TASK_SOURCE, BUILT_IN_SOURCE, TASK_NAME_PATTERN, MAX_TASK_NAME_LENGTH } from '@myco/constants.js';\nimport { loadAgentTasks, taskFromParsed } from './loader.js';\nimport { AgentTaskSchema } from './schemas.js';\nimport type { AgentTask } from './types.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n// BUILT_IN_SOURCE imported from @myco/constants.js\n\n/** Suffix appended to the task name when copying a built-in task for the user. */\nconst COPY_SUFFIX = '-custom';\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Load all tasks: built-in definitions merged with user-created overrides.\n *\n * Built-in tasks are loaded from `definitionsDir/tasks/*.yaml`.\n * User tasks are loaded from `vaultDir/tasks/*.yaml`.\n * A user task with the same name as a built-in task replaces it entirely.\n *\n * Always reads from disk — no caching.\n *\n * @param definitionsDir — path to `src/agent/definitions/` (or dist equivalent).\n * @param vaultDir — optional vault directory; user tasks skipped if not provided.\n * @returns map from task name → AgentTask.\n */\nexport function loadAllTasks(definitionsDir: string, vaultDir?: string): Map<string, AgentTask> {\n const result = new Map<string, AgentTask>();\n\n // Load built-in tasks first\n const builtIn = loadAgentTasks(definitionsDir);\n for (const task of builtIn) {\n result.set(task.name, { ...task, isBuiltin: true, source: BUILT_IN_SOURCE });\n }\n\n // Overlay user tasks (override built-in if same name)\n if (vaultDir) {\n const userTasksDir = path.join(vaultDir, USER_TASKS_DIR);\n if (fs.existsSync(userTasksDir)) {\n const files = fs.readdirSync(userTasksDir).filter((f) => f.endsWith('.yaml'));\n for (const file of files) {\n const filePath = path.join(userTasksDir, file);\n try {\n const raw = fs.readFileSync(filePath, 'utf-8');\n const parsed = AgentTaskSchema.parse(parseYaml(raw));\n const task: AgentTask = {\n ...taskFromParsed(parsed),\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n result.set(task.name, task);\n } catch (err) {\n console.warn(`[registry] Skipping malformed user task file: ${filePath}`, err);\n }\n }\n }\n }\n\n return result;\n}\n\n/**\n * Validate a task name against the allowed pattern and length limit.\n *\n * Valid names: lowercase letters, digits, and hyphens. Must start and end\n * with a letter or digit. Single character names (a–z, 0–9) are allowed.\n *\n * @param name — candidate task name.\n * @returns true if valid.\n */\nexport function validateTaskName(name: string): boolean {\n if (name.length > MAX_TASK_NAME_LENGTH) return false;\n return TASK_NAME_PATTERN.test(name);\n}\n\n/**\n * Serialize an AgentTask to YAML and write it to `vaultDir/tasks/<name>.yaml`.\n *\n * Validates the task through AgentTaskSchema before writing.\n * Creates the tasks directory if it does not exist (idempotent).\n * Strips the internal `source` and `isBuiltin` fields from the serialized output.\n *\n * @param vaultDir — path to the vault root directory.\n * @param task — task to write.\n * @returns absolute path to the written file.\n * @throws if schema validation fails.\n */\nexport function writeUserTask(vaultDir: string, task: AgentTask): string {\n // Validate before touching the filesystem\n AgentTaskSchema.parse(task);\n\n const tasksDir = path.join(vaultDir, USER_TASKS_DIR);\n fs.mkdirSync(tasksDir, { recursive: true });\n\n // Strip internal-only fields before serializing\n const { isBuiltin: _isBuiltin, source: _source, ...serializable } = task;\n const yaml = stringifyYaml(serializable);\n\n const filePath = path.join(tasksDir, `${task.name}.yaml`);\n fs.writeFileSync(filePath, yaml, 'utf-8');\n return filePath;\n}\n\n/**\n * Delete a user task YAML file from `vaultDir/tasks/<taskName>.yaml`.\n *\n * @param vaultDir — path to the vault root directory.\n * @param taskName — name of the task to delete.\n * @returns true if the file existed and was removed, false if it did not exist.\n */\nexport function deleteUserTask(vaultDir: string, taskName: string): boolean {\n const filePath = path.join(vaultDir, USER_TASKS_DIR, `${taskName}.yaml`);\n if (!fs.existsSync(filePath)) return false;\n fs.rmSync(filePath);\n return true;\n}\n\n/**\n * Create a user copy of an existing task.\n *\n * Loads all tasks (built-in + user), locates `sourceName`, then writes a new\n * user task with the given name (or `sourceName + COPY_SUFFIX` if omitted),\n * `isDefault: false`, `isBuiltin: false`, and `source: 'user'`.\n *\n * @param definitionsDir — path to built-in definitions directory.\n * @param vaultDir — path to vault root directory.\n * @param sourceName — name of the task to copy.\n * @param newName — optional name for the copy; defaults to `sourceName + '-custom'`.\n * @returns the newly written AgentTask.\n * @throws if the source task is not found.\n * @throws if the new name is invalid.\n */\nexport function copyTaskToUser(\n definitionsDir: string,\n vaultDir: string,\n sourceName: string,\n newName?: string,\n): AgentTask {\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n\n const source = allTasks.get(sourceName);\n if (!source) {\n throw new Error(`Task not found: ${sourceName}`);\n }\n\n const targetName = newName ?? `${sourceName}${COPY_SUFFIX}`;\n if (!validateTaskName(targetName)) {\n throw new Error(`Invalid task name: ${targetName}`);\n }\n\n const copy: AgentTask = {\n ...source,\n name: targetName,\n isDefault: false,\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n\n writeUserTask(vaultDir, copy);\n return copy;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAYA,kBAA+D;AAF/D,OAAO,QAAQ;AACf,OAAO,UAAU;AAcjB,IAAM,cAAc;AAmBb,SAAS,aAAa,gBAAwB,UAA2C;AAC9F,QAAM,SAAS,oBAAI,IAAuB;AAG1C,QAAM,UAAU,eAAe,cAAc;AAC7C,aAAW,QAAQ,SAAS;AAC1B,WAAO,IAAI,KAAK,MAAM,EAAE,GAAG,MAAM,WAAW,MAAM,QAAQ,gBAAgB,CAAC;AAAA,EAC7E;AAGA,MAAI,UAAU;AACZ,UAAM,eAAe,KAAK,KAAK,UAAU,cAAc;AACvD,QAAI,GAAG,WAAW,YAAY,GAAG;AAC/B,YAAM,QAAQ,GAAG,YAAY,YAAY,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAC5E,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,KAAK,KAAK,cAAc,IAAI;AAC7C,YAAI;AACF,gBAAM,MAAM,GAAG,aAAa,UAAU,OAAO;AAC7C,gBAAM,SAAS,gBAAgB,UAAM,YAAAA,OAAU,GAAG,CAAC;AACnD,gBAAM,OAAkB;AAAA,YACtB,GAAG,eAAe,MAAM;AAAA,YACxB,WAAW;AAAA,YACX,QAAQ;AAAA,UACV;AACA,iBAAO,IAAI,KAAK,MAAM,IAAI;AAAA,QAC5B,SAAS,KAAK;AACZ,kBAAQ,KAAK,iDAAiD,QAAQ,IAAI,GAAG;AAAA,QAC/E;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,iBAAiB,MAAuB;AACtD,MAAI,KAAK,SAAS,qBAAsB,QAAO;AAC/C,SAAO,kBAAkB,KAAK,IAAI;AACpC;AAcO,SAAS,cAAc,UAAkB,MAAyB;AAEvE,kBAAgB,MAAM,IAAI;AAE1B,QAAM,WAAW,KAAK,KAAK,UAAU,cAAc;AACnD,KAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAG1C,QAAM,EAAE,WAAW,YAAY,QAAQ,SAAS,GAAG,aAAa,IAAI;AACpE,QAAM,WAAO,YAAAC,WAAc,YAAY;AAEvC,QAAM,WAAW,KAAK,KAAK,UAAU,GAAG,KAAK,IAAI,OAAO;AACxD,KAAG,cAAc,UAAU,MAAM,OAAO;AACxC,SAAO;AACT;AASO,SAAS,eAAe,UAAkB,UAA2B;AAC1E,QAAM,WAAW,KAAK,KAAK,UAAU,gBAAgB,GAAG,QAAQ,OAAO;AACvE,MAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,QAAO;AACrC,KAAG,OAAO,QAAQ;AAClB,SAAO;AACT;AAiBO,SAAS,eACd,gBACA,UACA,YACA,SACW;AACX,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AAEtD,QAAM,SAAS,SAAS,IAAI,UAAU;AACtC,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,mBAAmB,UAAU,EAAE;AAAA,EACjD;AAEA,QAAM,aAAa,WAAW,GAAG,UAAU,GAAG,WAAW;AACzD,MAAI,CAAC,iBAAiB,UAAU,GAAG;AACjC,UAAM,IAAI,MAAM,sBAAsB,UAAU,EAAE;AAAA,EACpD;AAEA,QAAM,OAAkB;AAAA,IACtB,GAAG;AAAA,IACH,MAAM;AAAA,IACN,WAAW;AAAA,IACX,WAAW;AAAA,IACX,QAAQ;AAAA,EACV;AAEA,gBAAc,UAAU,IAAI;AAC5B,SAAO;AACT;","names":["parseYaml","stringifyYaml"]}
@@ -1,16 +1,16 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
2
  import {
3
3
  isProcessAlive
4
- } from "./chunk-KESLPBKV.js";
4
+ } from "./chunk-QDLVIW2O.js";
5
5
  import {
6
6
  loadMergedConfig
7
- } from "./chunk-OD4AA7PV.js";
7
+ } from "./chunk-53RPGOEN.js";
8
8
  import {
9
9
  getDatabase
10
10
  } from "./chunk-MYX5NCRH.js";
11
11
  import {
12
12
  DIGEST_TIERS
13
- } from "./chunk-FLLBJLHM.js";
13
+ } from "./chunk-6C6QZ4PM.js";
14
14
 
15
15
  // src/db/queries/embeddings.ts
16
16
  var EMBEDDABLE_TABLES = ["sessions", "spores", "plans", "artifacts", "skill_records"];
@@ -187,4 +187,4 @@ export {
187
187
  getEmbeddingQueueDepth,
188
188
  gatherStats
189
189
  };
190
- //# sourceMappingURL=chunk-JZGN33AY.js.map
190
+ //# sourceMappingURL=chunk-VRI56337.js.map