@goondocks/myco 0.20.2 → 0.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. package/dist/agent-eval-RJSQI5S2.js +355 -0
  2. package/dist/agent-eval-RJSQI5S2.js.map +1 -0
  3. package/dist/{agent-run-X25Q2A6T.js → agent-run-2JSYFOKU.js} +10 -8
  4. package/dist/{agent-run-X25Q2A6T.js.map → agent-run-2JSYFOKU.js.map} +1 -1
  5. package/dist/{agent-tasks-7B6OFERB.js → agent-tasks-APFJIM2T.js} +10 -8
  6. package/dist/{agent-tasks-7B6OFERB.js.map → agent-tasks-APFJIM2T.js.map} +1 -1
  7. package/dist/{chunk-OD4AA7PV.js → chunk-53RPGOEN.js} +56 -8
  8. package/dist/chunk-53RPGOEN.js.map +1 -0
  9. package/dist/chunk-54SXG5HF.js +26 -0
  10. package/dist/chunk-54SXG5HF.js.map +1 -0
  11. package/dist/{chunk-DCSGJ7W4.js → chunk-5ZG4RMUH.js} +2 -2
  12. package/dist/{chunk-FLLBJLHM.js → chunk-6C6QZ4PM.js} +9 -5
  13. package/dist/chunk-6C6QZ4PM.js.map +1 -0
  14. package/dist/chunk-6LB7XELY.js +406 -0
  15. package/dist/chunk-6LB7XELY.js.map +1 -0
  16. package/dist/{chunk-JZGN33AY.js → chunk-75Z7UKDY.js} +4 -4
  17. package/dist/{chunk-XG5RRUYF.js → chunk-BUTL6IFS.js} +2 -2
  18. package/dist/chunk-CESKJD44.js +586 -0
  19. package/dist/chunk-CESKJD44.js.map +1 -0
  20. package/dist/chunk-CISWUP5W.js +101 -0
  21. package/dist/chunk-CISWUP5W.js.map +1 -0
  22. package/dist/chunk-DJ3IHNYO.js +50 -0
  23. package/dist/chunk-DJ3IHNYO.js.map +1 -0
  24. package/dist/chunk-F3OEQYLS.js +847 -0
  25. package/dist/chunk-F3OEQYLS.js.map +1 -0
  26. package/dist/{chunk-6RFZWV4R.js → chunk-FCJ5JV54.js} +1 -1
  27. package/dist/{chunk-6RFZWV4R.js.map → chunk-FCJ5JV54.js.map} +1 -1
  28. package/dist/{chunk-2PDWCDKY.js → chunk-G6QIBNZM.js} +9 -6
  29. package/dist/{chunk-2PDWCDKY.js.map → chunk-G6QIBNZM.js.map} +1 -1
  30. package/dist/{chunk-6X2ERTQV.js → chunk-ILJPRYES.js} +6 -4
  31. package/dist/{chunk-6X2ERTQV.js.map → chunk-ILJPRYES.js.map} +1 -1
  32. package/dist/{chunk-US4LNCAT.js → chunk-IPPMYQ2Y.js} +5 -1
  33. package/dist/chunk-IPPMYQ2Y.js.map +1 -0
  34. package/dist/{chunk-KESLPBKV.js → chunk-JR54LTPP.js} +4 -4
  35. package/dist/{chunk-CCRGY3QW.js → chunk-JZS6GZ6T.js} +16 -94
  36. package/dist/chunk-JZS6GZ6T.js.map +1 -0
  37. package/dist/{chunk-5XIVBO25.js → chunk-LVIY7P35.js} +2 -2
  38. package/dist/chunk-NGH7U6A3.js +13844 -0
  39. package/dist/chunk-NGH7U6A3.js.map +1 -0
  40. package/dist/chunk-OUJSQSKE.js +113 -0
  41. package/dist/chunk-OUJSQSKE.js.map +1 -0
  42. package/dist/{chunk-VVNL26WX.js → chunk-P66DLD6G.js} +22 -10
  43. package/dist/chunk-P66DLD6G.js.map +1 -0
  44. package/dist/{chunk-XATDZX7U.js → chunk-R2JIJBCL.js} +18 -4
  45. package/dist/{chunk-XATDZX7U.js.map → chunk-R2JIJBCL.js.map} +1 -1
  46. package/dist/{chunk-MYOZLMB2.js → chunk-RL5R4CQU.js} +538 -19
  47. package/dist/chunk-RL5R4CQU.js.map +1 -0
  48. package/dist/{chunk-EVDQKYCG.js → chunk-RQSJLWP4.js} +13 -2
  49. package/dist/chunk-RQSJLWP4.js.map +1 -0
  50. package/dist/{chunk-BPRIYNLE.js → chunk-TKAJ3JVF.js} +3 -3
  51. package/dist/{chunk-Q36VMZST.js → chunk-VHNRMM4O.js} +3 -2
  52. package/dist/{chunk-FMRZ26U5.js → chunk-X3IGT5RV.js} +5 -2
  53. package/dist/{chunk-FMRZ26U5.js.map → chunk-X3IGT5RV.js.map} +1 -1
  54. package/dist/{chunk-KHT24OWC.js → chunk-YDUOSRGD.js} +8 -94
  55. package/dist/{chunk-KHT24OWC.js.map → chunk-YDUOSRGD.js.map} +1 -1
  56. package/dist/{cli-GGPWH4UO.js → cli-LNYSTDQM.js} +49 -42
  57. package/dist/cli-LNYSTDQM.js.map +1 -0
  58. package/dist/{client-YXQUTXVZ.js → client-NWE4TCNO.js} +4 -4
  59. package/dist/{config-OMCYHG2S.js → config-VC4ACP42.js} +6 -4
  60. package/dist/{config-OMCYHG2S.js.map → config-VC4ACP42.js.map} +1 -1
  61. package/dist/{detect-providers-5KOPZ7J2.js → detect-providers-ILLQZROY.js} +4 -4
  62. package/dist/{doctor-5JXJ36KA.js → doctor-TI7EZ3RW.js} +48 -15
  63. package/dist/doctor-TI7EZ3RW.js.map +1 -0
  64. package/dist/executor-F2YU7HXJ.js +44 -0
  65. package/dist/{init-LMYOVZAV.js → init-KG3TYVGE.js} +14 -12
  66. package/dist/{init-LMYOVZAV.js.map → init-KG3TYVGE.js.map} +1 -1
  67. package/dist/{installer-FS257JRZ.js → installer-UMH7OJ5A.js} +6 -4
  68. package/dist/{llm-TH4NLIRM.js → llm-AGVEF5XD.js} +5 -4
  69. package/dist/{loader-CQYTFHEW.js → loader-LX7TFRM6.js} +5 -3
  70. package/dist/{loader-NOMBJUPW.js → loader-NAVVZK63.js} +4 -3
  71. package/dist/{main-YTBVRTBI.js → main-5PRQNEEE.js} +2453 -650
  72. package/dist/main-5PRQNEEE.js.map +1 -0
  73. package/dist/{open-HG2DX6RN.js → open-5A27BCSB.js} +10 -8
  74. package/dist/{open-HG2DX6RN.js.map → open-5A27BCSB.js.map} +1 -1
  75. package/dist/{post-compact-JSECI44W.js → post-compact-USAODKPQ.js} +6 -6
  76. package/dist/{post-tool-use-POGPTJBA.js → post-tool-use-GMMSYBII.js} +9 -7
  77. package/dist/post-tool-use-GMMSYBII.js.map +1 -0
  78. package/dist/{post-tool-use-failure-OT7BFWQW.js → post-tool-use-failure-NZVSL2PO.js} +6 -6
  79. package/dist/{pre-compact-OXVODKH4.js → pre-compact-LZ57DLUS.js} +6 -6
  80. package/dist/{provider-check-43LAMSMH.js → provider-check-ZEV5P4KM.js} +4 -4
  81. package/dist/{registry-U4CHXK6R.js → registry-M2Z5QBWH.js} +5 -4
  82. package/dist/{remove-N7ZPELFU.js → remove-T3KE6C5N.js} +10 -8
  83. package/dist/{remove-N7ZPELFU.js.map → remove-T3KE6C5N.js.map} +1 -1
  84. package/dist/{restart-ADG5GBTB.js → restart-YWDEVZUJ.js} +11 -9
  85. package/dist/{restart-ADG5GBTB.js.map → restart-YWDEVZUJ.js.map} +1 -1
  86. package/dist/{search-AHZEUNRR.js → search-GKFDGELR.js} +11 -9
  87. package/dist/{search-AHZEUNRR.js.map → search-GKFDGELR.js.map} +1 -1
  88. package/dist/{server-AGVYZVP5.js → server-AHUR6CWF.js} +368 -269
  89. package/dist/server-AHUR6CWF.js.map +1 -0
  90. package/dist/{session-6IU4AXYP.js → session-2ZEPLWW6.js} +11 -9
  91. package/dist/{session-6IU4AXYP.js.map → session-2ZEPLWW6.js.map} +1 -1
  92. package/dist/{session-end-FT27DWYZ.js → session-end-LWJYQAXX.js} +5 -5
  93. package/dist/session-start-WTA6GCOQ.js +134 -0
  94. package/dist/session-start-WTA6GCOQ.js.map +1 -0
  95. package/dist/{setup-llm-77MP4I2G.js → setup-llm-E7UU5IO7.js} +11 -9
  96. package/dist/{setup-llm-77MP4I2G.js.map → setup-llm-E7UU5IO7.js.map} +1 -1
  97. package/dist/src/agent/definitions/agent.yaml +9 -5
  98. package/dist/src/agent/definitions/tasks/cortex-instructions.yaml +93 -0
  99. package/dist/src/agent/definitions/tasks/cortex-prompt-builder.yaml +67 -0
  100. package/dist/src/agent/definitions/tasks/digest-only.yaml +1 -1
  101. package/dist/src/agent/definitions/tasks/extract-only.yaml +1 -1
  102. package/dist/src/agent/definitions/tasks/review-session.yaml +10 -39
  103. package/dist/src/agent/definitions/tasks/skill-evolve.yaml +4 -4
  104. package/dist/src/agent/definitions/tasks/skill-generate.yaml +1 -1
  105. package/dist/src/agent/definitions/tasks/skill-survey.yaml +2 -6
  106. package/dist/src/agent/definitions/tasks/supersession-sweep.yaml +1 -1
  107. package/dist/src/agent/definitions/tasks/title-summary.yaml +12 -19
  108. package/dist/src/agent/definitions/tasks/{full-intelligence.yaml → vault-evolve.yaml} +17 -82
  109. package/dist/src/agent/definitions/tasks/vault-seed.yaml +370 -0
  110. package/dist/src/agent/prompts/agent.md +12 -38
  111. package/dist/src/cli.js +1 -1
  112. package/dist/src/daemon/main.js +1 -1
  113. package/dist/src/hooks/post-tool-use.js +1 -1
  114. package/dist/src/hooks/session-end.js +1 -1
  115. package/dist/src/hooks/session-start.js +1 -1
  116. package/dist/src/hooks/stop.js +1 -1
  117. package/dist/src/hooks/user-prompt-submit.js +1 -1
  118. package/dist/src/mcp/server.js +1 -1
  119. package/dist/src/symbionts/manifests/claude-code.yaml +4 -0
  120. package/dist/src/symbionts/manifests/pi.yaml +22 -0
  121. package/dist/src/symbionts/templates/pi/package.json +6 -0
  122. package/dist/src/symbionts/templates/pi/plugin.ts +559 -0
  123. package/dist/{stats-NVPWOYTE.js → stats-DFG6S23S.js} +11 -9
  124. package/dist/{stats-NVPWOYTE.js.map → stats-DFG6S23S.js.map} +1 -1
  125. package/dist/{stop-ZPIKVLH4.js → stop-WRBTXEVT.js} +5 -5
  126. package/dist/{stop-failure-2PX67YJC.js → stop-failure-32MGIG2Q.js} +6 -6
  127. package/dist/{subagent-start-UUE6EHQD.js → subagent-start-VFGHQFVL.js} +6 -6
  128. package/dist/{subagent-stop-KQWWWPE6.js → subagent-stop-663FXG3P.js} +6 -6
  129. package/dist/{task-completed-WMHOFQ7B.js → task-completed-ZCQYEFMZ.js} +6 -6
  130. package/dist/{team-LRZ6GTQK.js → team-JTI5CDUO.js} +7 -5
  131. package/dist/{turns-YFNI5CQC.js → turns-HU2CTZAP.js} +2 -2
  132. package/dist/ui/assets/index-DGf1h-Ha.js +842 -0
  133. package/dist/ui/assets/index-_OP4ifzH.css +1 -0
  134. package/dist/ui/index.html +2 -2
  135. package/dist/{update-O6V4RC4W.js → update-3NBQTG32.js} +10 -8
  136. package/dist/{update-O6V4RC4W.js.map → update-3NBQTG32.js.map} +1 -1
  137. package/dist/{user-prompt-submit-N36KUPHI.js → user-prompt-submit-ME2TBKOS.js} +8 -7
  138. package/dist/{user-prompt-submit-N36KUPHI.js.map → user-prompt-submit-ME2TBKOS.js.map} +1 -1
  139. package/dist/{verify-LXPV7NYG.js → verify-R76ZFJSZ.js} +8 -5
  140. package/dist/{verify-LXPV7NYG.js.map → verify-R76ZFJSZ.js.map} +1 -1
  141. package/dist/{version-XMPPJQHR.js → version-GQAFBBPX.js} +2 -2
  142. package/dist/version-GQAFBBPX.js.map +1 -0
  143. package/package.json +3 -1
  144. package/skills/myco/SKILL.md +16 -1
  145. package/skills/myco/references/cli-usage.md +1 -1
  146. package/skills/myco-curate/SKILL.md +1 -1
  147. package/dist/chunk-4YFKBL3F.js +0 -195
  148. package/dist/chunk-4YFKBL3F.js.map +0 -1
  149. package/dist/chunk-CCRGY3QW.js.map +0 -1
  150. package/dist/chunk-EVDQKYCG.js.map +0 -1
  151. package/dist/chunk-FLLBJLHM.js.map +0 -1
  152. package/dist/chunk-MYOZLMB2.js.map +0 -1
  153. package/dist/chunk-OD4AA7PV.js.map +0 -1
  154. package/dist/chunk-US4LNCAT.js.map +0 -1
  155. package/dist/chunk-UYMFCYBF.js +0 -2326
  156. package/dist/chunk-UYMFCYBF.js.map +0 -1
  157. package/dist/chunk-VVNL26WX.js.map +0 -1
  158. package/dist/cli-GGPWH4UO.js.map +0 -1
  159. package/dist/doctor-5JXJ36KA.js.map +0 -1
  160. package/dist/executor-HWW2QNZQ.js +0 -2472
  161. package/dist/executor-HWW2QNZQ.js.map +0 -1
  162. package/dist/main-YTBVRTBI.js.map +0 -1
  163. package/dist/post-tool-use-POGPTJBA.js.map +0 -1
  164. package/dist/server-AGVYZVP5.js.map +0 -1
  165. package/dist/session-start-LAFICHII.js +0 -189
  166. package/dist/session-start-LAFICHII.js.map +0 -1
  167. package/dist/src/agent/definitions/tasks/graph-maintenance.yaml +0 -93
  168. package/dist/ui/assets/index-C2JuNtRB.css +0 -1
  169. package/dist/ui/assets/index-JLVaQKV2.js +0 -832
  170. /package/dist/{chunk-DCSGJ7W4.js.map → chunk-5ZG4RMUH.js.map} +0 -0
  171. /package/dist/{chunk-JZGN33AY.js.map → chunk-75Z7UKDY.js.map} +0 -0
  172. /package/dist/{chunk-XG5RRUYF.js.map → chunk-BUTL6IFS.js.map} +0 -0
  173. /package/dist/{chunk-KESLPBKV.js.map → chunk-JR54LTPP.js.map} +0 -0
  174. /package/dist/{chunk-5XIVBO25.js.map → chunk-LVIY7P35.js.map} +0 -0
  175. /package/dist/{chunk-BPRIYNLE.js.map → chunk-TKAJ3JVF.js.map} +0 -0
  176. /package/dist/{chunk-Q36VMZST.js.map → chunk-VHNRMM4O.js.map} +0 -0
  177. /package/dist/{client-YXQUTXVZ.js.map → client-NWE4TCNO.js.map} +0 -0
  178. /package/dist/{detect-providers-5KOPZ7J2.js.map → detect-providers-ILLQZROY.js.map} +0 -0
  179. /package/dist/{installer-FS257JRZ.js.map → executor-F2YU7HXJ.js.map} +0 -0
  180. /package/dist/{llm-TH4NLIRM.js.map → installer-UMH7OJ5A.js.map} +0 -0
  181. /package/dist/{loader-CQYTFHEW.js.map → llm-AGVEF5XD.js.map} +0 -0
  182. /package/dist/{loader-NOMBJUPW.js.map → loader-LX7TFRM6.js.map} +0 -0
  183. /package/dist/{provider-check-43LAMSMH.js.map → loader-NAVVZK63.js.map} +0 -0
  184. /package/dist/{post-compact-JSECI44W.js.map → post-compact-USAODKPQ.js.map} +0 -0
  185. /package/dist/{post-tool-use-failure-OT7BFWQW.js.map → post-tool-use-failure-NZVSL2PO.js.map} +0 -0
  186. /package/dist/{pre-compact-OXVODKH4.js.map → pre-compact-LZ57DLUS.js.map} +0 -0
  187. /package/dist/{registry-U4CHXK6R.js.map → provider-check-ZEV5P4KM.js.map} +0 -0
  188. /package/dist/{team-LRZ6GTQK.js.map → registry-M2Z5QBWH.js.map} +0 -0
  189. /package/dist/{session-end-FT27DWYZ.js.map → session-end-LWJYQAXX.js.map} +0 -0
  190. /package/dist/{stop-ZPIKVLH4.js.map → stop-WRBTXEVT.js.map} +0 -0
  191. /package/dist/{stop-failure-2PX67YJC.js.map → stop-failure-32MGIG2Q.js.map} +0 -0
  192. /package/dist/{subagent-start-UUE6EHQD.js.map → subagent-start-VFGHQFVL.js.map} +0 -0
  193. /package/dist/{subagent-stop-KQWWWPE6.js.map → subagent-stop-663FXG3P.js.map} +0 -0
  194. /package/dist/{task-completed-WMHOFQ7B.js.map → task-completed-ZCQYEFMZ.js.map} +0 -0
  195. /package/dist/{turns-YFNI5CQC.js.map → team-JTI5CDUO.js.map} +0 -0
  196. /package/dist/{version-XMPPJQHR.js.map → turns-HU2CTZAP.js.map} +0 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/db/queries/cortex-instructions.ts","../src/db/queries/digest-extracts.ts","../src/db/queries/plans.ts","../src/db/queries/spores.ts","../src/context/cortex-brief.ts"],"sourcesContent":["import { getDatabase } from '@myco/db/client.js';\nimport { getTeamMachineId } from '@myco/daemon/team-context.js';\n\nconst CORTEX_INSTRUCTION_COLUMNS = [\n 'id',\n 'agent_id',\n 'content',\n 'input_hash',\n 'source_run_id',\n 'generated_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = CORTEX_INSTRUCTION_COLUMNS.join(', ');\nconst DEFAULT_CORTEX_INSTRUCTIONS_ID = 'session-start';\n\nexport interface CortexInstructionsUpsert {\n agent_id: string;\n content: string;\n input_hash: string;\n generated_at: number;\n id?: string;\n machine_id?: string;\n source_run_id?: string | null;\n}\n\nexport interface CortexInstructionsRow {\n id: string;\n agent_id: string;\n content: string;\n input_hash: string;\n source_run_id: string | null;\n generated_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\nfunction toCortexInstructionsRow(row: Record<string, unknown>): CortexInstructionsRow {\n return {\n id: row.id as string,\n agent_id: row.agent_id as string,\n content: row.content as string,\n input_hash: row.input_hash as string,\n source_run_id: (row.source_run_id as string) ?? null,\n generated_at: row.generated_at as number,\n machine_id: (row.machine_id as string) ?? 'local',\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\nexport function upsertCortexInstructions(input: CortexInstructionsUpsert): CortexInstructionsRow {\n const db = getDatabase();\n const id = input.id ?? `${input.agent_id}:${DEFAULT_CORTEX_INSTRUCTIONS_ID}`;\n\n const row = db.prepare(\n `INSERT INTO cortex_instructions (\n id, agent_id, content, input_hash, source_run_id, generated_at, machine_id\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT (id) DO UPDATE SET\n content = EXCLUDED.content,\n input_hash = EXCLUDED.input_hash,\n source_run_id = EXCLUDED.source_run_id,\n generated_at = EXCLUDED.generated_at,\n machine_id = EXCLUDED.machine_id\n RETURNING ${SELECT_COLUMNS}`,\n ).get(\n id,\n input.agent_id,\n input.content,\n input.input_hash,\n input.source_run_id ?? null,\n input.generated_at,\n input.machine_id ?? getTeamMachineId(),\n ) as Record<string, unknown>;\n\n return toCortexInstructionsRow(row);\n}\n\nexport function getCortexInstructions(agentId: string): CortexInstructionsRow | null {\n const db = getDatabase();\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM cortex_instructions\n WHERE agent_id = ?\n ORDER BY generated_at DESC\n LIMIT 1`,\n ).get(agentId) as Record<string, unknown> | undefined;\n return row ? toCortexInstructionsRow(row) : null;\n}\n","/**\n * Digest extract CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DIGEST_TIERS, epochSeconds } from '@myco/constants.js';\nimport { getTeamMachineId } from '@myco/daemon/team-context.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required when upserting a digest extract. */\nexport interface DigestExtractUpsert {\n agent_id: string;\n tier: number;\n content: string;\n generated_at: number;\n machine_id?: string;\n}\n\n/**\n * Options that control whether the upsert actually writes and how the\n * revision history is recorded. Added in schema v15.\n */\nexport interface DigestExtractUpsertOptions {\n /**\n * When true, the upsert is a no-op: nothing is written, no revision is\n * recorded, and `null` is returned. Used by dry-run tooling so we can\n * preview writes without touching persistent state.\n */\n dryRun?: boolean;\n /**\n * Id of the agent_run that produced this write. Recorded on the\n * revision row so operators can roll a specific run back.\n */\n runId?: string | null;\n /**\n * Optional JSON-encoded metadata to store with the revision.\n */\n metadata?: string | null;\n}\n\n/** Row shape for entries in digest_extract_revisions. */\nexport interface DigestExtractRevisionRow {\n id: number;\n agent_id: string;\n tier: number;\n content: string;\n metadata: string | null;\n run_id: string | null;\n parent_revision_id: number | null;\n created_at: number;\n}\n\n/** Options accepted by rollbackDigestExtract. */\nexport interface RollbackDigestExtractOptions {\n revisionId: number;\n /** Id of the run performing the rollback (recorded on the new revision). */\n runId?: string | null;\n}\n\n/** Row shape returned from digest_extracts queries (all columns). */\nexport interface DigestExtractRow {\n id: number;\n agent_id: string;\n tier: number;\n content: string;\n substrate_hash: string | null;\n generated_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst EXTRACT_COLUMNS = [\n 'id',\n 'agent_id',\n 'tier',\n 'content',\n 'substrate_hash',\n 'generated_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = EXTRACT_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed DigestExtractRow. */\nfunction toDigestExtractRow(row: Record<string, unknown>): DigestExtractRow {\n return {\n id: row.id as number,\n agent_id: row.agent_id as string,\n tier: row.tier as number,\n content: row.content as string,\n substrate_hash: (row.substrate_hash as string) ?? null,\n generated_at: row.generated_at as number,\n machine_id: (row.machine_id as string) ?? 'local',\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Upsert a digest extract. Uses ON CONFLICT on (agent_id, tier).\n *\n * Schema v15 behaviour: when an existing row would be overwritten, the\n * prior content is copied into digest_extract_revisions (linked to the\n * previous revision if any) *before* the upsert runs. This makes the\n * revision log append-only and preserves the state the agent is replacing.\n *\n * When `options.dryRun === true`, the function is a no-op: nothing is\n * written to digest_extracts or digest_extract_revisions, and `null` is\n * returned. Call sites that care about the hydrated row should skip\n * follow-up reads when dry-running.\n */\nexport function upsertDigestExtract(\n data: DigestExtractUpsert,\n options: DigestExtractUpsertOptions = {},\n): DigestExtractRow | null {\n if (options.dryRun) return null;\n\n const db = getDatabase();\n\n // The revision snapshot and the live-row upsert MUST be atomic. Without\n // a transaction, a crash between the two writes would leave the revision\n // log out of sync with `digest_extracts` — the exact invariant this log\n // exists to guarantee. Matches the pattern used in sessions.ts /\n // skill-records.ts for multi-table writes.\n return db.transaction(() => {\n // Capture the row we're about to overwrite (if any) so we can copy it\n // into the revision history before mutating the live table.\n const existingRow = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM digest_extracts WHERE agent_id = ? AND tier = ?`,\n ).get(data.agent_id, data.tier) as Record<string, unknown> | undefined;\n\n if (existingRow) {\n const priorRevisionId = db.prepare(\n `SELECT id FROM digest_extract_revisions\n WHERE agent_id = ? AND tier = ?\n ORDER BY id DESC\n LIMIT 1`,\n ).get(data.agent_id, data.tier) as { id: number } | undefined;\n\n db.prepare(\n `INSERT INTO digest_extract_revisions\n (agent_id, tier, content, metadata, run_id, parent_revision_id, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)`,\n ).run(\n data.agent_id,\n data.tier,\n existingRow.content as string,\n options.metadata ?? null,\n options.runId ?? null,\n priorRevisionId?.id ?? null,\n epochSeconds(),\n );\n }\n\n db.prepare(\n `INSERT INTO digest_extracts (agent_id, tier, content, generated_at)\n VALUES (?, ?, ?, ?)\n ON CONFLICT (agent_id, tier) DO UPDATE SET\n content = EXCLUDED.content,\n generated_at = EXCLUDED.generated_at`,\n ).run(data.agent_id, data.tier, data.content, data.generated_at);\n\n // Always look up by composite unique key — works for both insert and update cases.\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM digest_extracts WHERE agent_id = ? AND tier = ?`,\n ).get(data.agent_id, data.tier);\n\n return toDigestExtractRow(row as Record<string, unknown>);\n })();\n}\n\n/**\n * Get a digest extract for a specific agent and tier.\n *\n * @returns the extract row, or null if not found.\n */\nexport function getDigestExtract(\n agentId: string,\n tier: number,\n): DigestExtractRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM digest_extracts\n WHERE agent_id = ? AND tier = ?`,\n ).get(agentId, tier) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toDigestExtractRow(row);\n}\n\n/**\n * List digest extracts for an agent, filtered to configured tiers, ordered by tier ASC.\n */\nexport function listDigestExtracts(\n agentId: string,\n): DigestExtractRow[] {\n const db = getDatabase();\n const tierPlaceholders = DIGEST_TIERS.map(() => '?').join(', ');\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM digest_extracts\n WHERE agent_id = ? AND tier IN (${tierPlaceholders})\n ORDER BY tier ASC`,\n ).all(agentId, ...DIGEST_TIERS) as Record<string, unknown>[];\n\n return rows.map(toDigestExtractRow);\n}\n\n// ---------------------------------------------------------------------------\n// Revision history (schema v15)\n// ---------------------------------------------------------------------------\n\nconst REVISION_COLUMNS = [\n 'id',\n 'agent_id',\n 'tier',\n 'content',\n 'metadata',\n 'run_id',\n 'parent_revision_id',\n 'created_at',\n] as const;\n\nconst REVISION_SELECT = REVISION_COLUMNS.join(', ');\n\nfunction toRevisionRow(row: Record<string, unknown>): DigestExtractRevisionRow {\n return {\n id: row.id as number,\n agent_id: row.agent_id as string,\n tier: row.tier as number,\n content: row.content as string,\n metadata: (row.metadata as string) ?? null,\n run_id: (row.run_id as string) ?? null,\n parent_revision_id: (row.parent_revision_id as number) ?? null,\n created_at: row.created_at as number,\n };\n}\n\n/**\n * List revisions for a specific (agent_id, tier) pair, newest first.\n * Used by operators who want to roll back a digest to an earlier state.\n */\nexport function listDigestRevisions(\n options: { agentId: string; tier: number; limit?: number },\n): DigestExtractRevisionRow[] {\n const db = getDatabase();\n const limit = options.limit ?? 50;\n const rows = db.prepare(\n `SELECT ${REVISION_SELECT}\n FROM digest_extract_revisions\n WHERE agent_id = ? AND tier = ?\n ORDER BY created_at DESC, id DESC\n LIMIT ?`,\n ).all(options.agentId, options.tier, limit) as Record<string, unknown>[];\n return rows.map(toRevisionRow);\n}\n\n/** Result of a successful rollback. */\nexport interface RollbackDigestExtractResult {\n /** The restored digest_extracts row (content now matches the target revision). */\n row: DigestExtractRow;\n /**\n * Id of the newly-appended revision that captures the pre-rollback live\n * content (so the rollback itself is reversible). `null` when no live row\n * existed before the rollback (nothing to preserve).\n */\n newRevisionId: number | null;\n}\n\n/**\n * Restore an earlier revision's content back into digest_extracts, and\n * append a *new* revision row so the revision history remains append-only.\n *\n * The newly-appended revision captures what was live before the rollback\n * (so the rollback itself is reversible), with its parent set to the last\n * revision for (agent_id, tier).\n *\n * Returns the restored digest_extracts row plus the newly-minted revision\n * id, or null if the revision id doesn't exist.\n */\nexport function rollbackDigestExtract(\n options: RollbackDigestExtractOptions,\n): RollbackDigestExtractResult | null {\n const db = getDatabase();\n\n const revision = db.prepare(\n `SELECT ${REVISION_SELECT}\n FROM digest_extract_revisions\n WHERE id = ?`,\n ).get(options.revisionId) as Record<string, unknown> | undefined;\n\n if (!revision) return null;\n\n const agentId = revision.agent_id as string;\n const tier = revision.tier as number;\n const targetContent = revision.content as string;\n const now = epochSeconds();\n\n // Preservation of the pre-rollback state and the live-row restore must\n // be atomic — same invariant as `upsertDigestExtract`.\n return db.transaction(() => {\n // 1) Append a new revision that preserves the *current* live content\n // (pre-rollback state) so the rollback itself is reversible.\n const currentRow = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM digest_extracts WHERE agent_id = ? AND tier = ?`,\n ).get(agentId, tier) as Record<string, unknown> | undefined;\n\n let newRevisionId: number | null = null;\n if (currentRow) {\n const priorRevisionId = db.prepare(\n `SELECT id FROM digest_extract_revisions\n WHERE agent_id = ? AND tier = ?\n ORDER BY id DESC\n LIMIT 1`,\n ).get(agentId, tier) as { id: number } | undefined;\n\n const info = db.prepare(\n `INSERT INTO digest_extract_revisions\n (agent_id, tier, content, metadata, run_id, parent_revision_id, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)`,\n ).run(\n agentId,\n tier,\n currentRow.content as string,\n JSON.stringify({ rollback_of: options.revisionId }),\n options.runId ?? null,\n priorRevisionId?.id ?? null,\n now,\n );\n newRevisionId = Number(info.lastInsertRowid);\n }\n\n // 2) Restore the target revision's content into the live row.\n db.prepare(\n `INSERT INTO digest_extracts (agent_id, tier, content, generated_at)\n VALUES (?, ?, ?, ?)\n ON CONFLICT (agent_id, tier) DO UPDATE SET\n content = EXCLUDED.content,\n generated_at = EXCLUDED.generated_at`,\n ).run(agentId, tier, targetContent, now);\n\n const restored = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM digest_extracts WHERE agent_id = ? AND tier = ?`,\n ).get(agentId, tier) as Record<string, unknown>;\n\n return {\n row: toDigestExtractRow(restored),\n newRevisionId,\n };\n })();\n}\n","/**\n * Plan CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { epochSeconds } from '@myco/constants.js';\nimport { getTeamMachineId, isTeamSyncEnabled } from '@myco/daemon/team-context.js';\nimport { enqueueOutbox } from '@myco/db/queries/team-outbox.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of plans returned by listPlans when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Default plan status for new plans. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default processed flag for new plans. */\nconst DEFAULT_PROCESSED = 0;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting/upserting a plan. */\nexport interface PlanInsert {\n id: string;\n logical_key: string;\n created_at: number;\n status?: string;\n author?: string | null;\n title?: string | null;\n content?: string | null;\n source_path?: string | null;\n tags?: string | null;\n session_id?: string | null;\n prompt_batch_id?: number | null;\n content_hash?: string | null;\n processed?: number;\n updated_at?: number | null;\n machine_id?: string;\n}\n\n/** Row shape returned from plan queries. */\nexport interface PlanRow {\n id: string;\n logical_key: string;\n status: string;\n author: string | null;\n title: string | null;\n content: string | null;\n source_path: string | null;\n tags: string | null;\n session_id: string | null;\n prompt_batch_id: number | null;\n content_hash: string | null;\n processed: number;\n embedded: number;\n created_at: number;\n updated_at: number | null;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Filter options for `listPlans`. */\nexport interface ListPlansOptions {\n status?: string;\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst PLAN_COLUMNS = [\n 'id',\n 'logical_key',\n 'status',\n 'author',\n 'title',\n 'content',\n 'source_path',\n 'tags',\n 'session_id',\n 'prompt_batch_id',\n 'content_hash',\n 'processed',\n 'embedded',\n 'created_at',\n 'updated_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = PLAN_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed PlanRow. */\nfunction toPlanRow(row: Record<string, unknown>): PlanRow {\n return {\n id: row.id as string,\n logical_key: row.logical_key as string,\n status: row.status as string,\n author: (row.author as string) ?? null,\n title: (row.title as string) ?? null,\n content: (row.content as string) ?? null,\n source_path: (row.source_path as string) ?? null,\n tags: (row.tags as string) ?? null,\n session_id: (row.session_id as string) ?? null,\n prompt_batch_id: (row.prompt_batch_id as number) ?? null,\n content_hash: (row.content_hash as string) ?? null,\n processed: row.processed as number,\n embedded: (row.embedded as number) ?? 0,\n created_at: row.created_at as number,\n updated_at: (row.updated_at as number) ?? null,\n machine_id: (row.machine_id as string) ?? 'local',\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a plan or update it if the id already exists.\n *\n * On conflict the row is updated with the values from `data`.\n */\nexport function upsertPlan(data: PlanInsert): PlanRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO plans (\n id, logical_key, status, author, title, content,\n source_path, tags, session_id, prompt_batch_id, content_hash,\n processed, created_at, updated_at, machine_id\n ) VALUES (\n ?, ?, ?, ?, ?, ?,\n ?, ?, ?, ?, ?,\n ?, ?, ?, ?\n )\n ON CONFLICT (logical_key) DO UPDATE SET\n id = EXCLUDED.id,\n status = EXCLUDED.status,\n author = EXCLUDED.author,\n title = EXCLUDED.title,\n content = EXCLUDED.content,\n source_path = EXCLUDED.source_path,\n tags = EXCLUDED.tags,\n session_id = EXCLUDED.session_id,\n prompt_batch_id = EXCLUDED.prompt_batch_id,\n content_hash = EXCLUDED.content_hash,\n processed = EXCLUDED.processed,\n updated_at = EXCLUDED.updated_at,\n embedded = CASE\n WHEN EXCLUDED.content_hash != plans.content_hash THEN 0\n ELSE plans.embedded\n END`,\n ).run(\n data.id,\n data.logical_key,\n data.status ?? DEFAULT_STATUS,\n data.author ?? null,\n data.title ?? null,\n data.content ?? null,\n data.source_path ?? null,\n data.tags ?? null,\n data.session_id ?? null,\n data.prompt_batch_id ?? null,\n data.content_hash ?? null,\n data.processed ?? DEFAULT_PROCESSED,\n data.created_at,\n data.updated_at ?? null,\n data.machine_id ?? getTeamMachineId(),\n );\n\n const row = toPlanRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM plans WHERE logical_key = ?`).get(data.logical_key) as Record<string, unknown>,\n );\n\n syncRow('plans', row);\n\n return row;\n}\n\n/**\n * Retrieve a single plan by id.\n *\n * @returns the plan row, or null if not found.\n */\nexport function getPlan(id: string): PlanRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM plans WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toPlanRow(row);\n}\n\n/**\n * Retrieve a single plan by logical key.\n *\n * @returns the plan row, or null if not found.\n */\nexport function getPlanByLogicalKey(logicalKey: string): PlanRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM plans WHERE logical_key = ?`,\n ).get(logicalKey) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toPlanRow(row);\n}\n\n/**\n * Delete a single plan by id and enqueue a team-sync tombstone when enabled.\n *\n * @returns the deleted plan row, or null if not found.\n */\nexport function deletePlan(id: string): PlanRow | null {\n const db = getDatabase();\n const row = getPlan(id);\n if (!row) return null;\n\n const info = db.prepare(`DELETE FROM plans WHERE id = ?`).run(id);\n if (info.changes === 0) return null;\n\n if (isTeamSyncEnabled()) {\n enqueueOutbox({\n table_name: 'plans',\n row_id: row.id,\n operation: 'delete',\n payload: JSON.stringify({\n id: row.id,\n logical_key: row.logical_key,\n title: row.title,\n }),\n machine_id: getTeamMachineId(),\n created_at: epochSeconds(),\n });\n }\n\n return row;\n}\n\n/**\n * List plans with optional filters, ordered by created_at DESC.\n */\nexport function listPlans(\n options: ListPlansOptions = {},\n): PlanRow[] {\n const db = getDatabase();\n\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n\n params.push(limit);\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM plans\n ${where}\n ORDER BY created_at DESC\n LIMIT ?`,\n ).all(...params) as Record<string, unknown>[];\n\n return rows.map(toPlanRow);\n}\n\n/**\n * List all plans associated with a specific session, ordered by created_at DESC.\n */\nexport function listPlansBySession(sessionId: string): PlanRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM plans\n WHERE session_id = ?\n ORDER BY created_at DESC`,\n ).all(sessionId) as Record<string, unknown>[];\n\n return rows.map(toPlanRow);\n}\n","/**\n * Spore CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { getTeamMachineId } from '@myco/daemon/team-context.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of spores returned by listSpores when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Default spore status for new spores. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default importance score for new spores. */\nexport const DEFAULT_IMPORTANCE = 5;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a spore. */\nexport interface SporeInsert {\n id: string;\n agent_id: string;\n observation_type: string;\n content: string;\n created_at: number;\n session_id?: string | null;\n prompt_batch_id?: number | null;\n status?: string;\n context?: string | null;\n importance?: number;\n file_path?: string | null;\n tags?: string | null;\n content_hash?: string | null;\n properties?: string | null;\n updated_at?: number | null;\n machine_id?: string;\n}\n\n/** Row shape returned from spore queries (all columns). */\nexport interface SporeRow {\n id: string;\n agent_id: string;\n session_id: string | null;\n prompt_batch_id: number | null;\n observation_type: string;\n status: string;\n content: string;\n context: string | null;\n importance: number;\n file_path: string | null;\n tags: string | null;\n content_hash: string | null;\n properties: string | null;\n embedded: number;\n created_at: number;\n updated_at: number | null;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Filter options for `listSpores`. */\nexport interface ListSporesOptions {\n agent_id?: string;\n observation_type?: string;\n status?: string;\n session_id?: string;\n search?: string;\n /** Only return spores created after this epoch-seconds timestamp. */\n since?: number;\n limit?: number;\n offset?: number;\n /**\n * When explicitly `false`, exclude spores whose source session is still\n * `status = 'active'` — intelligence-task reads (agent tools, context\n * queries) should opt in to this. Defaults to permissive so UI listings\n * and prompt-time context injection keep seeing in-flight work. Explicit\n * `session_id` filters bypass this check: a direct lookup of one session's\n * spores is always permitted.\n */\n includeActive?: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst SPORE_COLUMNS = [\n 'id',\n 'agent_id',\n 'session_id',\n 'prompt_batch_id',\n 'observation_type',\n 'status',\n 'content',\n 'context',\n 'importance',\n 'file_path',\n 'tags',\n 'content_hash',\n 'properties',\n 'embedded',\n 'created_at',\n 'updated_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = SPORE_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed SporeRow. */\nfunction toSporeRow(row: Record<string, unknown>): SporeRow {\n return {\n id: row.id as string,\n agent_id: row.agent_id as string,\n session_id: (row.session_id as string) ?? null,\n prompt_batch_id: (row.prompt_batch_id as number) ?? null,\n observation_type: row.observation_type as string,\n status: row.status as string,\n content: row.content as string,\n context: (row.context as string) ?? null,\n importance: row.importance as number,\n file_path: (row.file_path as string) ?? null,\n tags: (row.tags as string) ?? null,\n content_hash: (row.content_hash as string) ?? null,\n properties: (row.properties as string) ?? null,\n embedded: (row.embedded as number) ?? 0,\n created_at: row.created_at as number,\n updated_at: (row.updated_at as number) ?? null,\n machine_id: (row.machine_id as string) ?? 'local',\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new spore.\n *\n * Requires a valid `agent_id` (foreign key to agents table).\n */\nexport function insertSpore(data: SporeInsert): SporeRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO spores (\n id, agent_id, session_id, prompt_batch_id,\n observation_type, status, content, context,\n importance, file_path, tags, content_hash,\n properties, created_at, updated_at, machine_id\n ) VALUES (\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?, ?, ?\n )`,\n ).run(\n data.id,\n data.agent_id,\n data.session_id ?? null,\n data.prompt_batch_id ?? null,\n data.observation_type,\n data.status ?? DEFAULT_STATUS,\n data.content,\n data.context ?? null,\n data.importance ?? DEFAULT_IMPORTANCE,\n data.file_path ?? null,\n data.tags ?? null,\n data.content_hash ?? null,\n data.properties ?? null,\n data.created_at,\n data.updated_at ?? null,\n data.machine_id ?? getTeamMachineId(),\n );\n\n const row = toSporeRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM spores WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n syncRow('spores', row);\n\n return row;\n}\n\n/**\n * Retrieve a single spore by id.\n *\n * @returns the spore row, or null if not found.\n */\nexport function getSpore(id: string): SporeRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM spores WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toSporeRow(row);\n}\n\n/**\n * List spores with optional filters, ordered by created_at DESC.\n */\n/** Build WHERE clause and bound params from spore filter options. */\nfunction buildSporeWhere(\n options: Omit<ListSporesOptions, 'limit' | 'offset'>,\n): { where: string; params: unknown[] } {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.agent_id !== undefined) {\n conditions.push(`agent_id = ?`);\n params.push(options.agent_id);\n }\n if (options.observation_type !== undefined) {\n conditions.push(`observation_type = ?`);\n params.push(options.observation_type);\n }\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n if (options.session_id !== undefined) {\n conditions.push(`session_id = ?`);\n params.push(options.session_id);\n }\n if (options.search !== undefined && options.search.length > 0) {\n conditions.push(`(content LIKE ? OR observation_type LIKE ?)`);\n const pattern = `%${options.search}%`;\n params.push(pattern, pattern);\n }\n if (options.since !== undefined) {\n conditions.push('created_at > ?');\n params.push(options.since);\n }\n\n // Only exclude spores from in-flight sessions when the caller explicitly\n // asks for it (intelligence tasks). UI and hook-level context injection\n // leave this unset so they see everything. A direct session_id filter\n // bypasses the gate — that lookup is always permitted.\n if (options.includeActive === false && options.session_id === undefined) {\n conditions.push(\n `(session_id IS NULL OR EXISTS (SELECT 1 FROM sessions s WHERE s.id = spores.session_id AND s.status != 'active'))`,\n );\n }\n\n return {\n where: conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '',\n params,\n };\n}\n\n/**\n * List spores with optional filters, ordered by created_at DESC.\n */\nexport function listSpores(\n options: ListSporesOptions = {},\n): SporeRow[] {\n const db = getDatabase();\n const { where, params } = buildSporeWhere(options);\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n const offset = options.offset ?? 0;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM spores\n ${where}\n ORDER BY created_at DESC\n LIMIT ?\n OFFSET ?`,\n ).all(...params, limit, offset) as Record<string, unknown>[];\n\n return rows.map(toSporeRow);\n}\n\n/**\n * Count spores matching optional filters (for pagination totals).\n */\nexport function countSpores(\n options: Omit<ListSporesOptions, 'limit' | 'offset'> = {},\n): number {\n const db = getDatabase();\n const { where, params } = buildSporeWhere(options);\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM spores ${where}`,\n ).get(...params) as { count: number };\n\n return row.count;\n}\n\n/**\n * Count active spores created after a given timestamp.\n * Used by skill-evolve to detect new knowledge since last assessment.\n */\nexport function countSporesSince(sinceEpoch: number): number {\n const db = getDatabase();\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM spores WHERE created_at > ? AND status = 'active'`,\n ).get(sinceEpoch) as { count: number };\n return row.count;\n}\n\n/**\n * List active spore IDs created after a given timestamp, ordered newest first.\n */\nexport function listSporeIdsSince(sinceEpoch: number, limit = 20): string[] {\n const db = getDatabase();\n const rows = db.prepare(\n `SELECT id FROM spores WHERE created_at > ? AND status = 'active' ORDER BY created_at DESC LIMIT ?`,\n ).all(sinceEpoch, limit) as Array<{ id: string }>;\n return rows.map(r => r.id);\n}\n\n/**\n * Update the status and updated_at timestamp of a spore.\n *\n * @returns the updated row, or null if the spore does not exist.\n */\nexport function updateSporeStatus(\n id: string,\n status: string,\n updatedAt: number,\n): SporeRow | null {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE spores\n SET status = ?, updated_at = ?\n WHERE id = ?`,\n ).run(status, updatedAt, id);\n\n if (info.changes === 0) return null;\n\n const row = toSporeRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM spores WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n\n syncRow('spores', row);\n\n return row;\n}\n","/**\n * Cortex content assembly.\n *\n * Builds the material that the Cortex agent consumes and emits:\n * - Capability resolution (team/collective availability)\n * - Delivery-decision logic (inline vs session-start injection)\n * - Retrieval guidance derived from MCP tool definitions\n * - Instruction-input prompt for the `cortex-instructions` agent task\n *\n * Pure content layer — orchestration (agent run launch, snapshot reads,\n * prompt builder) lives in `@myco/daemon/cortex`.\n */\nimport { createHash } from 'node:crypto';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport {\n CONTENT_HASH_ALGORITHM,\n DEFAULT_AGENT_ID,\n DIGEST_FALLBACK_TIER,\n} from '@myco/constants.js';\nimport { getCortexInstructions } from '@myco/db/queries/cortex-instructions.js';\nimport { getDigestExtract } from '@myco/db/queries/digest-extracts.js';\nimport { listPlans } from '@myco/db/queries/plans.js';\nimport { listSessions } from '@myco/db/queries/sessions.js';\nimport { listSpores } from '@myco/db/queries/spores.js';\nimport type { TeamSyncClient } from '../daemon/team-sync.js';\nimport {\n TOOL_DEFINITIONS,\n COLLECTIVE_TOOL_DEFINITIONS,\n getToolCortexPriority,\n type ToolDefinition,\n} from '../mcp/tool-definitions.js';\n\nconst MAX_COLLECTIVE_CAPABILITY_LABELS = 4;\nconst ALL_CORTEX_TOOL_DEFINITIONS = [...TOOL_DEFINITIONS, ...COLLECTIVE_TOOL_DEFINITIONS];\n\nconst RECENT_SESSION_LIMIT = 3;\nconst RECENT_SPORE_LIMIT = 4;\nconst RECENT_PLAN_LIMIT = 3;\nconst CONTENT_PREVIEW_MAX_CHARS = 240;\nconst DIGEST_EXCERPT_MAX_CHARS = 900;\nconst JSON_INDENT = 2;\n\nexport const CORTEX_SKILLS_NOTE = 'Project and Myco skills are already registered with the agent separately. Tell the agent to use those skills directly when relevant, and do not instruct it to call `myco_skills`.';\n\n// ---------------------------------------------------------------------------\n// Capability resolution\n// ---------------------------------------------------------------------------\n\nexport interface CortexCapabilities {\n teamEnabled: boolean;\n collectiveConnected: boolean;\n collectiveCapabilities: string[];\n}\n\nexport interface CortexToolGuidance {\n tool: string;\n guidance: string;\n requiresTeam?: boolean;\n requiresCollective?: boolean;\n priority: number;\n}\n\nexport interface DeliveryDecision {\n inlineInstructions: boolean;\n reason: 'missing-symbiont' | 'session-start-supported' | 'session-start-disabled' | 'no-session-start';\n}\n\nfunction toCortexToolGuidance(\n tool: Pick<ToolDefinition, 'name' | 'cortex'>,\n): CortexToolGuidance | null {\n const cortex = tool.cortex;\n if (!cortex) return null;\n return {\n tool: tool.name,\n guidance: cortex.guidance,\n requiresTeam: cortex.requiresTeam,\n requiresCollective: cortex.requiresCollective,\n priority: getToolCortexPriority(tool),\n };\n}\n\nexport const RETRIEVAL_GUIDANCE: CortexToolGuidance[] = ALL_CORTEX_TOOL_DEFINITIONS\n .map(toCortexToolGuidance)\n .filter((entry): entry is CortexToolGuidance => entry !== null)\n .sort((left, right) => left.priority - right.priority);\n\nexport async function resolveCortexCapabilities(\n config: Pick<MycoConfig, 'team'>,\n getTeamClient?: () => TeamSyncClient | null,\n): Promise<CortexCapabilities> {\n const teamClient = getTeamClient?.() ?? null;\n const teamEnabled = Boolean(config.team.enabled && teamClient);\n let collectiveConnected = false;\n let collectiveCapabilities: string[] = [];\n\n if (teamEnabled && teamClient) {\n try {\n const status = await teamClient.getCollectiveStatus();\n collectiveConnected = Boolean(status?.connected);\n collectiveCapabilities = status?.capabilities ?? [];\n } catch {\n collectiveConnected = false;\n collectiveCapabilities = [];\n }\n }\n\n return {\n teamEnabled,\n collectiveConnected,\n collectiveCapabilities,\n };\n}\n\nexport function shouldInjectCortex(\n config: MycoConfig['context'],\n): boolean {\n return config.cortex_enabled;\n}\n\nexport function resolveInstructionDelivery(\n config: MycoConfig['context'],\n symbiont: {\n supportsSessionStartInjection: boolean;\n } | null,\n): DeliveryDecision {\n if (!symbiont) {\n return { inlineInstructions: true, reason: 'missing-symbiont' };\n }\n if (!config.cortex_enabled) {\n return { inlineInstructions: true, reason: 'session-start-disabled' };\n }\n if (symbiont.supportsSessionStartInjection) {\n return { inlineInstructions: false, reason: 'session-start-supported' };\n }\n return { inlineInstructions: true, reason: 'no-session-start' };\n}\n\nexport function buildCapabilitySummary(capabilities: CortexCapabilities): string[] {\n const summary = [\n capabilities.collectiveConnected\n ? 'Myco can retrieve local, team, and collective knowledge in this project.'\n : capabilities.teamEnabled\n ? 'Myco can retrieve local and shared team knowledge in this project.'\n : 'Myco can retrieve local project knowledge in this project.',\n 'Use only the currently available Myco MCP tools described below, and omit any surfaces that are offline.',\n ];\n\n if (capabilities.collectiveConnected && capabilities.collectiveCapabilities.length > 0) {\n const labels = capabilities.collectiveCapabilities.slice(0, MAX_COLLECTIVE_CAPABILITY_LABELS);\n const remaining = Math.max(\n 0,\n capabilities.collectiveCapabilities.length - MAX_COLLECTIVE_CAPABILITY_LABELS,\n );\n const suffix = remaining > 0 ? ` (+${remaining} more)` : '';\n summary.push(`Collective capabilities online: ${labels.join(', ')}${suffix}.`);\n }\n\n return summary;\n}\n\nexport function buildRetrievalGuidanceLines(capabilities: CortexCapabilities): string[] {\n const lines: string[] = [];\n\n for (const entry of RETRIEVAL_GUIDANCE) {\n if (entry.requiresTeam && !capabilities.teamEnabled) continue;\n if (entry.requiresCollective && !capabilities.collectiveConnected) continue;\n lines.push(`- \\`${entry.tool}\\`: ${entry.guidance}`);\n }\n\n return lines;\n}\n\n// ---------------------------------------------------------------------------\n// Instruction-input prompt assembly (for the `cortex-instructions` task)\n// ---------------------------------------------------------------------------\n\nfunction hashInput(value: unknown): string {\n return createHash(CONTENT_HASH_ALGORITHM)\n .update(JSON.stringify(value))\n .digest('hex');\n}\n\nfunction truncatePreview(text: string | null, maxChars: number = CONTENT_PREVIEW_MAX_CHARS): string | null {\n if (!text) return null;\n return text.length > maxChars\n ? `${text.slice(0, maxChars)}...`\n : text;\n}\n\nfunction formatRecentSessions(): string {\n const sessions = listSessions({\n includeActive: true,\n limit: RECENT_SESSION_LIMIT,\n });\n if (sessions.length === 0) return 'No recent sessions are available.';\n\n return sessions.map((session) => {\n const parts = [\n `- ${session.title ?? session.id}`,\n session.branch ? `branch=${session.branch}` : null,\n truncatePreview(session.summary),\n ].filter(Boolean);\n return parts.join(' — ');\n }).join('\\n');\n}\n\nfunction formatRecentSpores(): string {\n const spores = listSpores({\n includeActive: true,\n status: 'active',\n limit: RECENT_SPORE_LIMIT,\n });\n if (spores.length === 0) return 'No recent spores are available.';\n\n return spores.map((spore) => {\n const parts = [\n `- [${spore.observation_type}] ${truncatePreview(spore.content)}`,\n spore.session_id ? `session=${spore.session_id}` : null,\n ].filter(Boolean);\n return parts.join(' — ');\n }).join('\\n');\n}\n\nfunction formatRecentPlans(): string {\n const plans = listPlans({\n status: 'active',\n limit: RECENT_PLAN_LIMIT,\n });\n if (plans.length === 0) return 'No active plans are available.';\n\n return plans.map((plan) => {\n const parts = [\n `- ${plan.title ?? plan.id}`,\n `status=${plan.status}`,\n truncatePreview(plan.content),\n ].filter(Boolean);\n return parts.join(' — ');\n }).join('\\n');\n}\n\nfunction formatDigestExcerpt(config: MycoConfig): string {\n const preferredTier = config.context.digest_tier;\n const extract =\n getDigestExtract(DEFAULT_AGENT_ID, preferredTier) ??\n getDigestExtract(DEFAULT_AGENT_ID, DIGEST_FALLBACK_TIER);\n if (!extract) return 'No current digest extract is available.';\n\n const excerpt = truncatePreview(extract.content, DIGEST_EXCERPT_MAX_CHARS) ?? '';\n return excerpt\n ? `Tier ${extract.tier} digest excerpt:\\n${excerpt}`\n : `Tier ${extract.tier} digest extract is empty.`;\n}\n\nexport interface CortexInstructionPayload {\n inputHash: string;\n instruction: string;\n}\n\nexport async function buildCortexInstructionsInput(\n config: MycoConfig,\n getTeamClient?: () => TeamSyncClient | null,\n): Promise<CortexInstructionPayload> {\n const capabilities = await resolveCortexCapabilities(config, getTeamClient);\n const capabilitySummary = buildCapabilitySummary(capabilities);\n const retrievalGuidance = buildRetrievalGuidanceLines(capabilities);\n const recentSessions = formatRecentSessions();\n const recentSpores = formatRecentSpores();\n const recentPlans = formatRecentPlans();\n const digestExcerpt = formatDigestExcerpt(config);\n const input = {\n context: {\n digest_tier: config.context.digest_tier,\n cortex_enabled: config.context.cortex_enabled,\n prompt_search: config.context.prompt_search,\n prompt_max_spores: config.context.prompt_max_spores,\n },\n capabilities,\n digestExcerpt,\n recentSessions,\n recentSpores,\n recentPlans,\n skillsNote: CORTEX_SKILLS_NOTE,\n };\n\n return {\n inputHash: hashInput(input),\n instruction: [\n 'Author compact session-start instructions for another coding agent.',\n 'Focus on teaching how to use the highest-signal Myco tools correctly, especially retrieval and plan persistence.',\n 'Do not restate AGENTS.md or static installation details.',\n '',\n '## Runtime config',\n JSON.stringify(input.context, null, JSON_INDENT),\n '',\n '## Authoring requirements',\n '- Start with the heading `## Myco-Enabled Project`.',\n '- Follow the heading with one brief sentence explaining that Myco provides project memory, prior decisions, plans, and retrieval tools for this repository.',\n '- Teach the most useful current Myco MCP tool behavior, especially retrieval and plan persistence.',\n '- Use the recent vault activity below to mention live project hotspots when that improves usefulness.',\n `- ${CORTEX_SKILLS_NOTE}`,\n '- Keep the heading and description brief so most of the budget goes to retrieval guidance.',\n '- Keep the output compact and ready for direct injection.',\n '',\n '## Capability summary',\n ...capabilitySummary,\n '',\n '## Tool guidance to encode',\n ...retrievalGuidance,\n '',\n '## Current digest excerpt',\n digestExcerpt,\n '',\n '## Recent sessions',\n recentSessions,\n '',\n '## Recent spores',\n recentSpores,\n '',\n '## Active plans',\n recentPlans,\n ].join('\\n'),\n };\n}\n\nexport async function buildScheduledCortexInstruction(\n config: MycoConfig,\n getTeamClient?: () => TeamSyncClient | null,\n): Promise<CortexInstructionPayload | undefined> {\n const built = await buildCortexInstructionsInput(config, getTeamClient);\n const existing = getCortexInstructions(DEFAULT_AGENT_ID);\n if (existing?.input_hash === built.inputHash) {\n return undefined;\n }\n return built;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAGA,IAAM,6BAA6B;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,2BAA2B,KAAK,IAAI;AAC3D,IAAM,iCAAiC;AAuBvC,SAAS,wBAAwB,KAAqD;AACpF,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,SAAS,IAAI;AAAA,IACb,YAAY,IAAI;AAAA,IAChB,eAAgB,IAAI,iBAA4B;AAAA,IAChD,cAAc,IAAI;AAAA,IAClB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAEO,SAAS,yBAAyB,OAAwD;AAC/F,QAAM,KAAK,YAAY;AACvB,QAAM,KAAK,MAAM,MAAM,GAAG,MAAM,QAAQ,IAAI,8BAA8B;AAE1E,QAAM,MAAM,GAAG;AAAA,IACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAWa,cAAc;AAAA,EAC7B,EAAE;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM,iBAAiB;AAAA,IACvB,MAAM;AAAA,IACN,MAAM,cAAc,iBAAiB;AAAA,EACvC;AAEA,SAAO,wBAAwB,GAAG;AACpC;AAEO,SAAS,sBAAsB,SAA+C;AACnF,QAAM,KAAK,YAAY;AACvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA,EAK1B,EAAE,IAAI,OAAO;AACb,SAAO,MAAM,wBAAwB,GAAG,IAAI;AAC9C;;;ACVA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMA,kBAAiB,gBAAgB,KAAK,IAAI;AAOhD,SAAS,mBAAmB,KAAgD;AAC1E,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,MAAM,IAAI;AAAA,IACV,SAAS,IAAI;AAAA,IACb,gBAAiB,IAAI,kBAA6B;AAAA,IAClD,cAAc,IAAI;AAAA,IAClB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAmBO,SAAS,oBACd,MACA,UAAsC,CAAC,GACd;AACzB,MAAI,QAAQ,OAAQ,QAAO;AAE3B,QAAM,KAAK,YAAY;AAOvB,SAAO,GAAG,YAAY,MAAM;AAG1B,UAAM,cAAc,GAAG;AAAA,MACrB,UAAUA,eAAc;AAAA,IAC1B,EAAE,IAAI,KAAK,UAAU,KAAK,IAAI;AAE9B,QAAI,aAAa;AACf,YAAM,kBAAkB,GAAG;AAAA,QACzB;AAAA;AAAA;AAAA;AAAA,MAIF,EAAE,IAAI,KAAK,UAAU,KAAK,IAAI;AAE9B,SAAG;AAAA,QACD;AAAA;AAAA;AAAA,MAGF,EAAE;AAAA,QACA,KAAK;AAAA,QACL,KAAK;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,YAAY;AAAA,QACpB,QAAQ,SAAS;AAAA,QACjB,iBAAiB,MAAM;AAAA,QACvB,aAAa;AAAA,MACf;AAAA,IACF;AAEA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EAAE,IAAI,KAAK,UAAU,KAAK,MAAM,KAAK,SAAS,KAAK,YAAY;AAG/D,UAAM,MAAM,GAAG;AAAA,MACb,UAAUA,eAAc;AAAA,IAC1B,EAAE,IAAI,KAAK,UAAU,KAAK,IAAI;AAE9B,WAAO,mBAAmB,GAA8B;AAAA,EAC1D,CAAC,EAAE;AACL;AAOO,SAAS,iBACd,SACA,MACyB;AACzB,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA;AAAA,EAE1B,EAAE,IAAI,SAAS,IAAI;AAEnB,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,mBAAmB,GAAG;AAC/B;AAKO,SAAS,mBACd,SACoB;AACpB,QAAM,KAAK,YAAY;AACvB,QAAM,mBAAmB,aAAa,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAE9D,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA,uCAEW,gBAAgB;AAAA;AAAA,EAErD,EAAE,IAAI,SAAS,GAAG,YAAY;AAE9B,SAAO,KAAK,IAAI,kBAAkB;AACpC;AAMA,IAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,kBAAkB,iBAAiB,KAAK,IAAI;AAElD,SAAS,cAAc,KAAwD;AAC7E,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,MAAM,IAAI;AAAA,IACV,SAAS,IAAI;AAAA,IACb,UAAW,IAAI,YAAuB;AAAA,IACtC,QAAS,IAAI,UAAqB;AAAA,IAClC,oBAAqB,IAAI,sBAAiC;AAAA,IAC1D,YAAY,IAAI;AAAA,EAClB;AACF;AAMO,SAAS,oBACd,SAC4B;AAC5B,QAAM,KAAK,YAAY;AACvB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3B,EAAE,IAAI,QAAQ,SAAS,QAAQ,MAAM,KAAK;AAC1C,SAAO,KAAK,IAAI,aAAa;AAC/B;AAyBO,SAAS,sBACd,SACoC;AACpC,QAAM,KAAK,YAAY;AAEvB,QAAM,WAAW,GAAG;AAAA,IAClB,UAAU,eAAe;AAAA;AAAA;AAAA,EAG3B,EAAE,IAAI,QAAQ,UAAU;AAExB,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,UAAU,SAAS;AACzB,QAAM,OAAO,SAAS;AACtB,QAAM,gBAAgB,SAAS;AAC/B,QAAM,MAAM,aAAa;AAIzB,SAAO,GAAG,YAAY,MAAM;AAG1B,UAAM,aAAa,GAAG;AAAA,MACpB,UAAUA,eAAc;AAAA,IAC1B,EAAE,IAAI,SAAS,IAAI;AAEnB,QAAI,gBAA+B;AACnC,QAAI,YAAY;AACd,YAAM,kBAAkB,GAAG;AAAA,QACzB;AAAA;AAAA;AAAA;AAAA,MAIF,EAAE,IAAI,SAAS,IAAI;AAEnB,YAAM,OAAO,GAAG;AAAA,QACd;AAAA;AAAA;AAAA,MAGF,EAAE;AAAA,QACA;AAAA,QACA;AAAA,QACA,WAAW;AAAA,QACX,KAAK,UAAU,EAAE,aAAa,QAAQ,WAAW,CAAC;AAAA,QAClD,QAAQ,SAAS;AAAA,QACjB,iBAAiB,MAAM;AAAA,QACvB;AAAA,MACF;AACA,sBAAgB,OAAO,KAAK,eAAe;AAAA,IAC7C;AAGA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EAAE,IAAI,SAAS,MAAM,eAAe,GAAG;AAEvC,UAAM,WAAW,GAAG;AAAA,MAClB,UAAUA,eAAc;AAAA,IAC1B,EAAE,IAAI,SAAS,IAAI;AAEnB,WAAO;AAAA,MACL,KAAK,mBAAmB,QAAQ;AAAA,MAChC;AAAA,IACF;AAAA,EACF,CAAC,EAAE;AACL;;;AChWA,IAAM,qBAAqB;AAG3B,IAAM,iBAAiB;AAGvB,IAAM,oBAAoB;AAwD1B,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,aAAa,KAAK,IAAI;AAO7C,SAAS,UAAU,KAAuC;AACxD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,aAAa,IAAI;AAAA,IACjB,QAAQ,IAAI;AAAA,IACZ,QAAS,IAAI,UAAqB;AAAA,IAClC,OAAQ,IAAI,SAAoB;AAAA,IAChC,SAAU,IAAI,WAAsB;AAAA,IACpC,aAAc,IAAI,eAA0B;AAAA,IAC5C,MAAO,IAAI,QAAmB;AAAA,IAC9B,YAAa,IAAI,cAAyB;AAAA,IAC1C,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,cAAe,IAAI,gBAA2B;AAAA,IAC9C,WAAW,IAAI;AAAA,IACf,UAAW,IAAI,YAAuB;AAAA,IACtC,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAWO,SAAS,WAAW,MAA2B;AACpD,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0BF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,UAAU;AAAA,IACf,KAAK,UAAU;AAAA,IACf,KAAK,SAAS;AAAA,IACd,KAAK,WAAW;AAAA,IAChB,KAAK,eAAe;AAAA,IACpB,KAAK,QAAQ;AAAA,IACb,KAAK,cAAc;AAAA,IACnB,KAAK,mBAAmB;AAAA,IACxB,KAAK,gBAAgB;AAAA,IACrB,KAAK,aAAa;AAAA,IAClB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,cAAc,iBAAiB;AAAA,EACtC;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUA,eAAc,mCAAmC,EAAE,IAAI,KAAK,WAAW;AAAA,EAC9F;AAEA,UAAQ,SAAS,GAAG;AAEpB,SAAO;AACT;AAOO,SAAS,QAAQ,IAA4B;AAClD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,UAAU,GAAG;AACtB;AAOO,SAAS,oBAAoB,YAAoC;AACtE,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,UAAU;AAEhB,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,UAAU,GAAG;AACtB;AAOO,SAAS,WAAW,IAA4B;AACrD,QAAM,KAAK,YAAY;AACvB,QAAM,MAAM,QAAQ,EAAE;AACtB,MAAI,CAAC,IAAK,QAAO;AAEjB,QAAM,OAAO,GAAG,QAAQ,gCAAgC,EAAE,IAAI,EAAE;AAChE,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,MAAI,kBAAkB,GAAG;AACvB,kBAAc;AAAA,MACZ,YAAY;AAAA,MACZ,QAAQ,IAAI;AAAA,MACZ,WAAW;AAAA,MACX,SAAS,KAAK,UAAU;AAAA,QACtB,IAAI,IAAI;AAAA,QACR,aAAa,IAAI;AAAA,QACjB,OAAO,IAAI;AAAA,MACb,CAAC;AAAA,MACD,YAAY,iBAAiB;AAAA,MAC7B,YAAY,aAAa;AAAA,IAC3B,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKO,SAAS,UACd,UAA4B,CAAC,GAClB;AACX,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AAEA,QAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAC5E,QAAM,QAAQ,QAAQ,SAAS;AAE/B,SAAO,KAAK,KAAK;AAEjB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA,EAGV,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,KAAK,IAAI,SAAS;AAC3B;AAKO,SAAS,mBAAmB,WAA8B;AAC/D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA;AAAA;AAAA,EAI1B,EAAE,IAAI,SAAS;AAEf,SAAO,KAAK,IAAI,SAAS;AAC3B;;;AChSA,IAAMC,sBAAqB;AAG3B,IAAMC,kBAAiB;AAGhB,IAAM,qBAAqB;AA0ElC,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,cAAc,KAAK,IAAI;AAO9C,SAAS,WAAW,KAAwC;AAC1D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,YAAa,IAAI,cAAyB;AAAA,IAC1C,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,kBAAkB,IAAI;AAAA,IACtB,QAAQ,IAAI;AAAA,IACZ,SAAS,IAAI;AAAA,IACb,SAAU,IAAI,WAAsB;AAAA,IACpC,YAAY,IAAI;AAAA,IAChB,WAAY,IAAI,aAAwB;AAAA,IACxC,MAAO,IAAI,QAAmB;AAAA,IAC9B,cAAe,IAAI,gBAA2B;AAAA,IAC9C,YAAa,IAAI,cAAyB;AAAA,IAC1C,UAAW,IAAI,YAAuB;AAAA,IACtC,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAWO,SAAS,YAAY,MAA6B;AACvD,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,mBAAmB;AAAA,IACxB,KAAK;AAAA,IACL,KAAK,UAAUD;AAAA,IACf,KAAK;AAAA,IACL,KAAK,WAAW;AAAA,IAChB,KAAK,cAAc;AAAA,IACnB,KAAK,aAAa;AAAA,IAClB,KAAK,QAAQ;AAAA,IACb,KAAK,gBAAgB;AAAA,IACrB,KAAK,cAAc;AAAA,IACnB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,cAAc,iBAAiB;AAAA,EACtC;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUC,eAAc,2BAA2B,EAAE,IAAI,KAAK,EAAE;AAAA,EAC7E;AAEA,UAAQ,UAAU,GAAG;AAErB,SAAO;AACT;AAOO,SAAS,SAAS,IAA6B;AACpD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,WAAW,GAAG;AACvB;AAMA,SAAS,gBACP,SACsC;AACtC,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AACA,MAAI,QAAQ,qBAAqB,QAAW;AAC1C,eAAW,KAAK,sBAAsB;AACtC,WAAO,KAAK,QAAQ,gBAAgB;AAAA,EACtC;AACA,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AACA,MAAI,QAAQ,eAAe,QAAW;AACpC,eAAW,KAAK,gBAAgB;AAChC,WAAO,KAAK,QAAQ,UAAU;AAAA,EAChC;AACA,MAAI,QAAQ,WAAW,UAAa,QAAQ,OAAO,SAAS,GAAG;AAC7D,eAAW,KAAK,6CAA6C;AAC7D,UAAM,UAAU,IAAI,QAAQ,MAAM;AAClC,WAAO,KAAK,SAAS,OAAO;AAAA,EAC9B;AACA,MAAI,QAAQ,UAAU,QAAW;AAC/B,eAAW,KAAK,gBAAgB;AAChC,WAAO,KAAK,QAAQ,KAAK;AAAA,EAC3B;AAMA,MAAI,QAAQ,kBAAkB,SAAS,QAAQ,eAAe,QAAW;AACvE,eAAW;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAAA,IACrE;AAAA,EACF;AACF;AAKO,SAAS,WACd,UAA6B,CAAC,GAClB;AACZ,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,gBAAgB,OAAO;AACjD,QAAM,QAAQ,QAAQ,SAASF;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,QAAM,OAAO,GAAG;AAAA,IACd,UAAUE,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,QAAQ,OAAO,MAAM;AAE9B,SAAO,KAAK,IAAI,UAAU;AAC5B;AAKO,SAAS,YACd,UAAuD,CAAC,GAChD;AACR,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,gBAAgB,OAAO;AAEjD,QAAM,MAAM,GAAG;AAAA,IACb,wCAAwC,KAAK;AAAA,EAC/C,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,IAAI;AACb;AAiBO,SAAS,kBAAkB,YAAoB,QAAQ,IAAc;AAC1E,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI,YAAY,KAAK;AACvB,SAAO,KAAK,IAAI,OAAK,EAAE,EAAE;AAC3B;AAOO,SAAS,kBACd,IACA,QACA,WACiB;AACjB,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,QAAQ,WAAW,EAAE;AAE3B,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUC,eAAc,2BAA2B,EAAE,IAAI,EAAE;AAAA,EACxE;AAEA,UAAQ,UAAU,GAAG;AAErB,SAAO;AACT;;;ACxVA,SAAS,kBAAkB;AAoB3B,IAAM,mCAAmC;AACzC,IAAM,8BAA8B,CAAC,GAAG,kBAAkB,GAAG,2BAA2B;AAExF,IAAM,uBAAuB;AAC7B,IAAM,qBAAqB;AAC3B,IAAM,oBAAoB;AAC1B,IAAM,4BAA4B;AAClC,IAAM,2BAA2B;AACjC,IAAM,cAAc;AAEb,IAAM,qBAAqB;AAyBlC,SAAS,qBACP,MAC2B;AAC3B,QAAM,SAAS,KAAK;AACpB,MAAI,CAAC,OAAQ,QAAO;AACpB,SAAO;AAAA,IACL,MAAM,KAAK;AAAA,IACX,UAAU,OAAO;AAAA,IACjB,cAAc,OAAO;AAAA,IACrB,oBAAoB,OAAO;AAAA,IAC3B,UAAU,sBAAsB,IAAI;AAAA,EACtC;AACF;AAEO,IAAM,qBAA2C,4BACrD,IAAI,oBAAoB,EACxB,OAAO,CAAC,UAAuC,UAAU,IAAI,EAC7D,KAAK,CAAC,MAAM,UAAU,KAAK,WAAW,MAAM,QAAQ;AAEvD,eAAsB,0BACpB,QACA,eAC6B;AAC7B,QAAM,aAAa,gBAAgB,KAAK;AACxC,QAAM,cAAc,QAAQ,OAAO,KAAK,WAAW,UAAU;AAC7D,MAAI,sBAAsB;AAC1B,MAAI,yBAAmC,CAAC;AAExC,MAAI,eAAe,YAAY;AAC7B,QAAI;AACF,YAAM,SAAS,MAAM,WAAW,oBAAoB;AACpD,4BAAsB,QAAQ,QAAQ,SAAS;AAC/C,+BAAyB,QAAQ,gBAAgB,CAAC;AAAA,IACpD,QAAQ;AACN,4BAAsB;AACtB,+BAAyB,CAAC;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,mBACd,QACS;AACT,SAAO,OAAO;AAChB;AAEO,SAAS,2BACd,QACA,UAGkB;AAClB,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,oBAAoB,MAAM,QAAQ,mBAAmB;AAAA,EAChE;AACA,MAAI,CAAC,OAAO,gBAAgB;AAC1B,WAAO,EAAE,oBAAoB,MAAM,QAAQ,yBAAyB;AAAA,EACtE;AACA,MAAI,SAAS,+BAA+B;AAC1C,WAAO,EAAE,oBAAoB,OAAO,QAAQ,0BAA0B;AAAA,EACxE;AACA,SAAO,EAAE,oBAAoB,MAAM,QAAQ,mBAAmB;AAChE;AAEO,SAAS,uBAAuB,cAA4C;AACjF,QAAM,UAAU;AAAA,IACd,aAAa,sBACT,6EACA,aAAa,cACX,uEACA;AAAA,IACN;AAAA,EACF;AAEA,MAAI,aAAa,uBAAuB,aAAa,uBAAuB,SAAS,GAAG;AACtF,UAAM,SAAS,aAAa,uBAAuB,MAAM,GAAG,gCAAgC;AAC5F,UAAM,YAAY,KAAK;AAAA,MACrB;AAAA,MACA,aAAa,uBAAuB,SAAS;AAAA,IAC/C;AACA,UAAM,SAAS,YAAY,IAAI,MAAM,SAAS,WAAW;AACzD,YAAQ,KAAK,mCAAmC,OAAO,KAAK,IAAI,CAAC,GAAG,MAAM,GAAG;AAAA,EAC/E;AAEA,SAAO;AACT;AAEO,SAAS,4BAA4B,cAA4C;AACtF,QAAM,QAAkB,CAAC;AAEzB,aAAW,SAAS,oBAAoB;AACtC,QAAI,MAAM,gBAAgB,CAAC,aAAa,YAAa;AACrD,QAAI,MAAM,sBAAsB,CAAC,aAAa,oBAAqB;AACnE,UAAM,KAAK,OAAO,MAAM,IAAI,OAAO,MAAM,QAAQ,EAAE;AAAA,EACrD;AAEA,SAAO;AACT;AAMA,SAAS,UAAU,OAAwB;AACzC,SAAO,WAAW,sBAAsB,EACrC,OAAO,KAAK,UAAU,KAAK,CAAC,EAC5B,OAAO,KAAK;AACjB;AAEA,SAAS,gBAAgB,MAAqB,WAAmB,2BAA0C;AACzG,MAAI,CAAC,KAAM,QAAO;AAClB,SAAO,KAAK,SAAS,WACjB,GAAG,KAAK,MAAM,GAAG,QAAQ,CAAC,QAC1B;AACN;AAEA,SAAS,uBAA+B;AACtC,QAAM,WAAW,aAAa;AAAA,IAC5B,eAAe;AAAA,IACf,OAAO;AAAA,EACT,CAAC;AACD,MAAI,SAAS,WAAW,EAAG,QAAO;AAElC,SAAO,SAAS,IAAI,CAAC,YAAY;AAC/B,UAAM,QAAQ;AAAA,MACZ,KAAK,QAAQ,SAAS,QAAQ,EAAE;AAAA,MAChC,QAAQ,SAAS,UAAU,QAAQ,MAAM,KAAK;AAAA,MAC9C,gBAAgB,QAAQ,OAAO;AAAA,IACjC,EAAE,OAAO,OAAO;AAChB,WAAO,MAAM,KAAK,UAAK;AAAA,EACzB,CAAC,EAAE,KAAK,IAAI;AACd;AAEA,SAAS,qBAA6B;AACpC,QAAM,SAAS,WAAW;AAAA,IACxB,eAAe;AAAA,IACf,QAAQ;AAAA,IACR,OAAO;AAAA,EACT,CAAC;AACD,MAAI,OAAO,WAAW,EAAG,QAAO;AAEhC,SAAO,OAAO,IAAI,CAAC,UAAU;AAC3B,UAAM,QAAQ;AAAA,MACZ,MAAM,MAAM,gBAAgB,KAAK,gBAAgB,MAAM,OAAO,CAAC;AAAA,MAC/D,MAAM,aAAa,WAAW,MAAM,UAAU,KAAK;AAAA,IACrD,EAAE,OAAO,OAAO;AAChB,WAAO,MAAM,KAAK,UAAK;AAAA,EACzB,CAAC,EAAE,KAAK,IAAI;AACd;AAEA,SAAS,oBAA4B;AACnC,QAAM,QAAQ,UAAU;AAAA,IACtB,QAAQ;AAAA,IACR,OAAO;AAAA,EACT,CAAC;AACD,MAAI,MAAM,WAAW,EAAG,QAAO;AAE/B,SAAO,MAAM,IAAI,CAAC,SAAS;AACzB,UAAM,QAAQ;AAAA,MACZ,KAAK,KAAK,SAAS,KAAK,EAAE;AAAA,MAC1B,UAAU,KAAK,MAAM;AAAA,MACrB,gBAAgB,KAAK,OAAO;AAAA,IAC9B,EAAE,OAAO,OAAO;AAChB,WAAO,MAAM,KAAK,UAAK;AAAA,EACzB,CAAC,EAAE,KAAK,IAAI;AACd;AAEA,SAAS,oBAAoB,QAA4B;AACvD,QAAM,gBAAgB,OAAO,QAAQ;AACrC,QAAM,UACJ,iBAAiB,kBAAkB,aAAa,KAChD,iBAAiB,kBAAkB,oBAAoB;AACzD,MAAI,CAAC,QAAS,QAAO;AAErB,QAAM,UAAU,gBAAgB,QAAQ,SAAS,wBAAwB,KAAK;AAC9E,SAAO,UACH,QAAQ,QAAQ,IAAI;AAAA,EAAqB,OAAO,KAChD,QAAQ,QAAQ,IAAI;AAC1B;AAOA,eAAsB,6BACpB,QACA,eACmC;AACnC,QAAM,eAAe,MAAM,0BAA0B,QAAQ,aAAa;AAC1E,QAAM,oBAAoB,uBAAuB,YAAY;AAC7D,QAAM,oBAAoB,4BAA4B,YAAY;AAClE,QAAM,iBAAiB,qBAAqB;AAC5C,QAAM,eAAe,mBAAmB;AACxC,QAAM,cAAc,kBAAkB;AACtC,QAAM,gBAAgB,oBAAoB,MAAM;AAChD,QAAM,QAAQ;AAAA,IACZ,SAAS;AAAA,MACP,aAAa,OAAO,QAAQ;AAAA,MAC5B,gBAAgB,OAAO,QAAQ;AAAA,MAC/B,eAAe,OAAO,QAAQ;AAAA,MAC9B,mBAAmB,OAAO,QAAQ;AAAA,IACpC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,EACd;AAEA,SAAO;AAAA,IACL,WAAW,UAAU,KAAK;AAAA,IAC1B,aAAa;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK,UAAU,MAAM,SAAS,MAAM,WAAW;AAAA,MAC/C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK,kBAAkB;AAAA,MACvB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG;AAAA,MACH;AAAA,MACA;AAAA,MACA,GAAG;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,EAAE,KAAK,IAAI;AAAA,EACb;AACF;AAEA,eAAsB,gCACpB,QACA,eAC+C;AAC/C,QAAM,QAAQ,MAAM,6BAA6B,QAAQ,aAAa;AACtE,QAAM,WAAW,sBAAsB,gBAAgB;AACvD,MAAI,UAAU,eAAe,MAAM,WAAW;AAC5C,WAAO;AAAA,EACT;AACA,SAAO;AACT;","names":["SELECT_COLUMNS","SELECT_COLUMNS","DEFAULT_LIST_LIMIT","DEFAULT_STATUS","SELECT_COLUMNS","SELECT_COLUMNS"]}
@@ -107,4 +107,4 @@ export {
107
107
  listTurnsByRun,
108
108
  countToolCallsByRun
109
109
  };
110
- //# sourceMappingURL=chunk-6RFZWV4R.js.map
110
+ //# sourceMappingURL=chunk-FCJ5JV54.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/db/queries/turns.ts"],"sourcesContent":["/**\n * Agent turn CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a turn. */\nexport interface TurnInsert {\n run_id: string;\n agent_id: string;\n turn_number: number;\n tool_name: string;\n tool_input?: string | null;\n tool_output_summary?: string | null;\n started_at?: number | null;\n completed_at?: number | null;\n}\n\n/** Fields that can be updated after a turn row has been created. */\nexport interface TurnCompletion {\n tool_output_summary?: string | null;\n completed_at?: number | null;\n}\n\n/** Row shape returned from agent_turns queries (all columns). */\nexport interface TurnRow {\n id: number;\n run_id: string;\n agent_id: string;\n turn_number: number;\n tool_name: string;\n tool_input: string | null;\n tool_output_summary: string | null;\n started_at: number | null;\n completed_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst TURN_COLUMNS = [\n 'id',\n 'run_id',\n 'agent_id',\n 'turn_number',\n 'tool_name',\n 'tool_input',\n 'tool_output_summary',\n 'started_at',\n 'completed_at',\n] as const;\n\nconst SELECT_COLUMNS = TURN_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed TurnRow. */\nfunction toTurnRow(row: Record<string, unknown>): TurnRow {\n return {\n id: row.id as number,\n run_id: row.run_id as string,\n agent_id: row.agent_id as string,\n turn_number: row.turn_number as number,\n tool_name: row.tool_name as string,\n tool_input: (row.tool_input as string) ?? null,\n tool_output_summary: (row.tool_output_summary as string) ?? null,\n started_at: (row.started_at as number) ?? null,\n completed_at: (row.completed_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new agent turn.\n *\n * The `id` is auto-generated by the INTEGER PRIMARY KEY (AUTOINCREMENT).\n */\nexport function insertTurn(data: TurnInsert): TurnRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO agent_turns (\n run_id, agent_id, turn_number, tool_name,\n tool_input, tool_output_summary, started_at, completed_at\n ) VALUES (\n ?, ?, ?, ?,\n ?, ?, ?, ?\n )`,\n ).run(\n data.run_id,\n data.agent_id,\n data.turn_number,\n data.tool_name,\n data.tool_input ?? null,\n data.tool_output_summary ?? null,\n data.started_at ?? null,\n data.completed_at ?? null,\n );\n\n const turnId = Number(info.lastInsertRowid);\n\n return toTurnRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_turns WHERE id = ?`).get(turnId) as Record<string, unknown>,\n );\n}\n\n/**\n * Update completion metadata for an existing turn.\n *\n * @returns the updated row, or null if the turn does not exist.\n */\nexport function updateTurn(id: number, completion: TurnCompletion): TurnRow | null {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE agent_turns\n SET tool_output_summary = ?, completed_at = ?\n WHERE id = ?`,\n ).run(\n completion.tool_output_summary ?? null,\n completion.completed_at ?? null,\n id,\n );\n\n if (info.changes === 0) return null;\n\n return toTurnRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_turns WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * List all turns for a specific run, ordered by turn_number ASC.\n */\nexport function listTurns(runId: string): TurnRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM agent_turns\n WHERE run_id = ?\n ORDER BY turn_number ASC`,\n ).all(runId) as Record<string, unknown>[];\n\n return rows.map(toTurnRow);\n}\n\n/**\n * List all agent turns for a run, ordered by turn_number ASC.\n *\n * Alias for `listTurns` with an explicit \"by run\" naming convention used\n * by the dashboard API layer.\n */\nexport function listTurnsByRun(runId: string): TurnRow[] {\n return listTurns(runId);\n}\n\n/** Count tool calls by name for a specific run. */\nexport function countToolCallsByRun(\n runId: string,\n toolNames: string[],\n): Record<string, number> {\n if (toolNames.length === 0) return {};\n const db = getDatabase();\n const placeholders = toolNames.map(() => '?').join(', ');\n const rows = db.prepare(\n `SELECT tool_name, COUNT(*) as count\n FROM agent_turns\n WHERE run_id = ? AND tool_name IN (${placeholders})\n GROUP BY tool_name`,\n ).all(runId, ...toolNames) as Array<{ tool_name: string; count: number }>;\n\n const result: Record<string, number> = {};\n for (const row of rows) {\n result[row.tool_name] = row.count;\n }\n return result;\n}\n"],"mappings":";;;;;;AAgDA,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,aAAa,KAAK,IAAI;AAO7C,SAAS,UAAU,KAAuC;AACxD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,QAAQ,IAAI;AAAA,IACZ,UAAU,IAAI;AAAA,IACd,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,YAAa,IAAI,cAAyB;AAAA,IAC1C,qBAAsB,IAAI,uBAAkC;AAAA,IAC5D,YAAa,IAAI,cAAyB;AAAA,IAC1C,cAAe,IAAI,gBAA2B;AAAA,EAChD;AACF;AAWO,SAAS,WAAW,MAA2B;AACpD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,uBAAuB;AAAA,IAC5B,KAAK,cAAc;AAAA,IACnB,KAAK,gBAAgB;AAAA,EACvB;AAEA,QAAM,SAAS,OAAO,KAAK,eAAe;AAE1C,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,MAAM;AAAA,EACjF;AACF;AAOO,SAAS,WAAW,IAAY,YAA4C;AACjF,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE;AAAA,IACA,WAAW,uBAAuB;AAAA,IAClC,WAAW,gBAAgB;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,EAAE;AAAA,EAC7E;AACF;AAKO,SAAS,UAAU,OAA0B;AAClD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA,EAI1B,EAAE,IAAI,KAAK;AAEX,SAAO,KAAK,IAAI,SAAS;AAC3B;AAQO,SAAS,eAAe,OAA0B;AACvD,SAAO,UAAU,KAAK;AACxB;AAGO,SAAS,oBACd,OACA,WACwB;AACxB,MAAI,UAAU,WAAW,EAAG,QAAO,CAAC;AACpC,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,UAAU,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACvD,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,0CAEsC,YAAY;AAAA;AAAA,EAEpD,EAAE,IAAI,OAAO,GAAG,SAAS;AAEzB,QAAM,SAAiC,CAAC;AACxC,aAAW,OAAO,MAAM;AACtB,WAAO,IAAI,SAAS,IAAI,IAAI;AAAA,EAC9B;AACA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../src/db/queries/turns.ts"],"sourcesContent":["/**\n * Agent turn CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a turn. */\nexport interface TurnInsert {\n run_id: string;\n agent_id: string;\n turn_number: number;\n tool_name: string;\n tool_input?: string | null;\n tool_output_summary?: string | null;\n started_at?: number | null;\n completed_at?: number | null;\n}\n\n/** Fields that can be updated after a turn row has been created. */\nexport interface TurnCompletion {\n tool_output_summary?: string | null;\n completed_at?: number | null;\n}\n\n/** Row shape returned from agent_turns queries (all columns). */\nexport interface TurnRow {\n id: number;\n run_id: string;\n agent_id: string;\n turn_number: number;\n tool_name: string;\n tool_input: string | null;\n tool_output_summary: string | null;\n started_at: number | null;\n completed_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst TURN_COLUMNS = [\n 'id',\n 'run_id',\n 'agent_id',\n 'turn_number',\n 'tool_name',\n 'tool_input',\n 'tool_output_summary',\n 'started_at',\n 'completed_at',\n] as const;\n\nconst SELECT_COLUMNS = TURN_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed TurnRow. */\nfunction toTurnRow(row: Record<string, unknown>): TurnRow {\n return {\n id: row.id as number,\n run_id: row.run_id as string,\n agent_id: row.agent_id as string,\n turn_number: row.turn_number as number,\n tool_name: row.tool_name as string,\n tool_input: (row.tool_input as string) ?? null,\n tool_output_summary: (row.tool_output_summary as string) ?? null,\n started_at: (row.started_at as number) ?? null,\n completed_at: (row.completed_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new agent turn.\n *\n * The `id` is auto-generated by the INTEGER PRIMARY KEY (AUTOINCREMENT).\n */\nexport function insertTurn(data: TurnInsert): TurnRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO agent_turns (\n run_id, agent_id, turn_number, tool_name,\n tool_input, tool_output_summary, started_at, completed_at\n ) VALUES (\n ?, ?, ?, ?,\n ?, ?, ?, ?\n )`,\n ).run(\n data.run_id,\n data.agent_id,\n data.turn_number,\n data.tool_name,\n data.tool_input ?? null,\n data.tool_output_summary ?? null,\n data.started_at ?? null,\n data.completed_at ?? null,\n );\n\n const turnId = Number(info.lastInsertRowid);\n\n return toTurnRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_turns WHERE id = ?`).get(turnId) as Record<string, unknown>,\n );\n}\n\n/**\n * Update completion metadata for an existing turn.\n *\n * @returns the updated row, or null if the turn does not exist.\n */\nexport function updateTurn(id: number, completion: TurnCompletion): TurnRow | null {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE agent_turns\n SET tool_output_summary = ?, completed_at = ?\n WHERE id = ?`,\n ).run(\n completion.tool_output_summary ?? null,\n completion.completed_at ?? null,\n id,\n );\n\n if (info.changes === 0) return null;\n\n return toTurnRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_turns WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * List all turns for a specific run, ordered by turn_number ASC.\n */\nexport function listTurns(runId: string): TurnRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM agent_turns\n WHERE run_id = ?\n ORDER BY turn_number ASC`,\n ).all(runId) as Record<string, unknown>[];\n\n return rows.map(toTurnRow);\n}\n\n/**\n * List all agent turns for a run, ordered by turn_number ASC.\n *\n * Alias for `listTurns` with an explicit \"by run\" naming convention used\n * by the dashboard API layer.\n */\nexport function listTurnsByRun(runId: string): TurnRow[] {\n return listTurns(runId);\n}\n\n/** Count tool calls by name for a specific run. */\nexport function countToolCallsByRun(\n runId: string,\n toolNames: string[],\n): Record<string, number> {\n if (toolNames.length === 0) return {};\n const db = getDatabase();\n const placeholders = toolNames.map(() => '?').join(', ');\n const rows = db.prepare(\n `SELECT tool_name, COUNT(*) as count\n FROM agent_turns\n WHERE run_id = ? AND tool_name IN (${placeholders})\n GROUP BY tool_name`,\n ).all(runId, ...toolNames) as Array<{ tool_name: string; count: number }>;\n\n const result: Record<string, number> = {};\n for (const row of rows) {\n result[row.tool_name] = row.count;\n }\n return result;\n}\n\n"],"mappings":";;;;;;AAgDA,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,aAAa,KAAK,IAAI;AAO7C,SAAS,UAAU,KAAuC;AACxD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,QAAQ,IAAI;AAAA,IACZ,UAAU,IAAI;AAAA,IACd,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,YAAa,IAAI,cAAyB;AAAA,IAC1C,qBAAsB,IAAI,uBAAkC;AAAA,IAC5D,YAAa,IAAI,cAAyB;AAAA,IAC1C,cAAe,IAAI,gBAA2B;AAAA,EAChD;AACF;AAWO,SAAS,WAAW,MAA2B;AACpD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,uBAAuB;AAAA,IAC5B,KAAK,cAAc;AAAA,IACnB,KAAK,gBAAgB;AAAA,EACvB;AAEA,QAAM,SAAS,OAAO,KAAK,eAAe;AAE1C,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,MAAM;AAAA,EACjF;AACF;AAOO,SAAS,WAAW,IAAY,YAA4C;AACjF,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE;AAAA,IACA,WAAW,uBAAuB;AAAA,IAClC,WAAW,gBAAgB;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,EAAE;AAAA,EAC7E;AACF;AAKO,SAAS,UAAU,OAA0B;AAClD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA,EAI1B,EAAE,IAAI,KAAK;AAEX,SAAO,KAAK,IAAI,SAAS;AAC3B;AAQO,SAAS,eAAe,OAA0B;AACvD,SAAO,UAAU,KAAK;AACxB;AAGO,SAAS,oBACd,OACA,WACwB;AACxB,MAAI,UAAU,WAAW,EAAG,QAAO,CAAC;AACpC,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,UAAU,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACvD,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,0CAEsC,YAAY;AAAA;AAAA,EAEpD,EAAE,IAAI,OAAO,GAAG,SAAS;AAEzB,QAAM,SAAiC,CAAC;AACxC,aAAW,OAAO,MAAM;AACtB,WAAO,IAAI,SAAS,IAAI,IAAI;AAAA,EAC9B;AACA,SAAO;AACT;","names":[]}
@@ -5,13 +5,13 @@ import {
5
5
  import {
6
6
  normalizeHookInput,
7
7
  readStdin
8
- } from "./chunk-DCSGJ7W4.js";
8
+ } from "./chunk-5ZG4RMUH.js";
9
9
  import {
10
10
  resolveVaultDir
11
11
  } from "./chunk-CUDIZJY7.js";
12
12
  import {
13
13
  DaemonClient
14
- } from "./chunk-VVNL26WX.js";
14
+ } from "./chunk-P66DLD6G.js";
15
15
 
16
16
  // src/hooks/send-event.ts
17
17
  import fs from "fs";
@@ -23,12 +23,15 @@ async function sendEvent(hookName, buildEvent) {
23
23
  const rawInput = JSON.parse(await readStdin());
24
24
  const input = normalizeHookInput(rawInput);
25
25
  const event = buildEvent(input);
26
+ const eventWithContext = {
27
+ ...event,
28
+ transcript_path: input.transcriptPath
29
+ };
26
30
  const client = new DaemonClient(VAULT_DIR);
27
- const result = await client.post("/events", { ...event, session_id: input.sessionId, agent: input.agent });
31
+ const result = await client.post("/events", { ...eventWithContext, session_id: input.sessionId, agent: input.agent });
28
32
  if (!result.ok) {
29
33
  const buffer = new EventBuffer(path.join(VAULT_DIR, "buffer"), input.sessionId);
30
- const { session_id: _, ...bufferPayload } = event;
31
- buffer.append(bufferPayload);
34
+ buffer.append(eventWithContext);
32
35
  }
33
36
  } catch (error) {
34
37
  process.stderr.write(`[myco] ${hookName} error: ${error.message}
@@ -39,4 +42,4 @@ async function sendEvent(hookName, buildEvent) {
39
42
  export {
40
43
  sendEvent
41
44
  };
42
- //# sourceMappingURL=chunk-2PDWCDKY.js.map
45
+ //# sourceMappingURL=chunk-G6QIBNZM.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/hooks/send-event.ts"],"sourcesContent":["/**\n * Shared hook helper — sends an event to the daemon, buffers on failure.\n *\n * Every hook follows the same pattern: read stdin, POST to daemon /events,\n * buffer to disk if the daemon is unreachable. This helper extracts that\n * skeleton so each hook is a one-liner mapping input fields to event fields.\n */\n\nimport { DaemonClient } from './client.js';\nimport { readStdin } from './read-stdin.js';\nimport { normalizeHookInput, type NormalizedHookInput } from './normalize.js';\nimport { EventBuffer } from '../capture/buffer.js';\nimport { resolveVaultDir } from '../vault/resolve.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/**\n * Read hook stdin, POST event to daemon, buffer on failure.\n *\n * @param hookName — used for error logging (e.g., 'subagent-start')\n * @param buildEvent — maps the normalized hook input to the event payload.\n * Receives a NormalizedHookInput with canonical field names.\n * Return the full event object (must include `type`).\n */\nexport async function sendEvent(\n hookName: string,\n buildEvent: (input: NormalizedHookInput) => Record<string, unknown>,\n): Promise<void> {\n const VAULT_DIR = resolveVaultDir();\n if (!fs.existsSync(path.join(VAULT_DIR, 'myco.yaml'))) return;\n\n try {\n const rawInput = JSON.parse(await readStdin()) as Record<string, unknown>;\n const input = normalizeHookInput(rawInput);\n\n const event = buildEvent(input);\n\n const client = new DaemonClient(VAULT_DIR);\n const result = await client.post('/events', { ...event, session_id: input.sessionId, agent: input.agent });\n\n if (!result.ok) {\n const buffer = new EventBuffer(path.join(VAULT_DIR, 'buffer'), input.sessionId);\n // Strip session_id from buffer entry — it's in the filename\n const { session_id: _, ...bufferPayload } = event;\n buffer.append(bufferPayload);\n }\n } catch (error) {\n process.stderr.write(`[myco] ${hookName} error: ${(error as Error).message}\\n`);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAaA,OAAO,QAAQ;AACf,OAAO,UAAU;AAUjB,eAAsB,UACpB,UACA,YACe;AACf,QAAM,YAAY,gBAAgB;AAClC,MAAI,CAAC,GAAG,WAAW,KAAK,KAAK,WAAW,WAAW,CAAC,EAAG;AAEvD,MAAI;AACF,UAAM,WAAW,KAAK,MAAM,MAAM,UAAU,CAAC;AAC7C,UAAM,QAAQ,mBAAmB,QAAQ;AAEzC,UAAM,QAAQ,WAAW,KAAK;AAE9B,UAAM,SAAS,IAAI,aAAa,SAAS;AACzC,UAAM,SAAS,MAAM,OAAO,KAAK,WAAW,EAAE,GAAG,OAAO,YAAY,MAAM,WAAW,OAAO,MAAM,MAAM,CAAC;AAEzG,QAAI,CAAC,OAAO,IAAI;AACd,YAAM,SAAS,IAAI,YAAY,KAAK,KAAK,WAAW,QAAQ,GAAG,MAAM,SAAS;AAE9E,YAAM,EAAE,YAAY,GAAG,GAAG,cAAc,IAAI;AAC5C,aAAO,OAAO,aAAa;AAAA,IAC7B;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,OAAO,MAAM,UAAU,QAAQ,WAAY,MAAgB,OAAO;AAAA,CAAI;AAAA,EAChF;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/hooks/send-event.ts"],"sourcesContent":["/**\n * Shared hook helper — sends an event to the daemon, buffers on failure.\n *\n * Every hook follows the same pattern: read stdin, POST to daemon /events,\n * buffer to disk if the daemon is unreachable. This helper extracts that\n * skeleton so each hook is a one-liner mapping input fields to event fields.\n */\n\nimport { DaemonClient } from './client.js';\nimport { readStdin } from './read-stdin.js';\nimport { normalizeHookInput, type NormalizedHookInput } from './normalize.js';\nimport { EventBuffer } from '../capture/buffer.js';\nimport { resolveVaultDir } from '../vault/resolve.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/**\n * Read hook stdin, POST event to daemon, buffer on failure.\n *\n * @param hookName — used for error logging (e.g., 'subagent-start')\n * @param buildEvent — maps the normalized hook input to the event payload.\n * Receives a NormalizedHookInput with canonical field names.\n * Return the full event object (must include `type`).\n */\nexport async function sendEvent(\n hookName: string,\n buildEvent: (input: NormalizedHookInput) => Record<string, unknown>,\n): Promise<void> {\n const VAULT_DIR = resolveVaultDir();\n if (!fs.existsSync(path.join(VAULT_DIR, 'myco.yaml'))) return;\n\n try {\n const rawInput = JSON.parse(await readStdin()) as Record<string, unknown>;\n const input = normalizeHookInput(rawInput);\n\n const event = buildEvent(input);\n const eventWithContext = {\n ...event,\n transcript_path: input.transcriptPath,\n };\n\n const client = new DaemonClient(VAULT_DIR);\n const result = await client.post('/events', { ...eventWithContext, session_id: input.sessionId, agent: input.agent });\n\n if (!result.ok) {\n const buffer = new EventBuffer(path.join(VAULT_DIR, 'buffer'), input.sessionId);\n buffer.append(eventWithContext);\n }\n } catch (error) {\n process.stderr.write(`[myco] ${hookName} error: ${(error as Error).message}\\n`);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAaA,OAAO,QAAQ;AACf,OAAO,UAAU;AAUjB,eAAsB,UACpB,UACA,YACe;AACf,QAAM,YAAY,gBAAgB;AAClC,MAAI,CAAC,GAAG,WAAW,KAAK,KAAK,WAAW,WAAW,CAAC,EAAG;AAEvD,MAAI;AACF,UAAM,WAAW,KAAK,MAAM,MAAM,UAAU,CAAC;AAC7C,UAAM,QAAQ,mBAAmB,QAAQ;AAEzC,UAAM,QAAQ,WAAW,KAAK;AAC9B,UAAM,mBAAmB;AAAA,MACvB,GAAG;AAAA,MACH,iBAAiB,MAAM;AAAA,IACzB;AAEA,UAAM,SAAS,IAAI,aAAa,SAAS;AACzC,UAAM,SAAS,MAAM,OAAO,KAAK,WAAW,EAAE,GAAG,kBAAkB,YAAY,MAAM,WAAW,OAAO,MAAM,MAAM,CAAC;AAEpH,QAAI,CAAC,OAAO,IAAI;AACd,YAAM,SAAS,IAAI,YAAY,KAAK,KAAK,WAAW,QAAQ,GAAG,MAAM,SAAS;AAC9E,aAAO,OAAO,gBAAgB;AAAA,IAChC;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,OAAO,MAAM,UAAU,QAAQ,WAAY,MAAgB,OAAO;AAAA,CAAI;AAAA,EAChF;AACF;","names":[]}
@@ -1,16 +1,18 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
2
  import {
3
- AgentTaskSchema,
4
3
  loadAgentTasks,
5
4
  taskFromParsed
6
- } from "./chunk-CCRGY3QW.js";
5
+ } from "./chunk-JZS6GZ6T.js";
6
+ import {
7
+ AgentTaskSchema
8
+ } from "./chunk-OUJSQSKE.js";
7
9
  import {
8
10
  BUILT_IN_SOURCE,
9
11
  MAX_TASK_NAME_LENGTH,
10
12
  TASK_NAME_PATTERN,
11
13
  USER_TASKS_DIR,
12
14
  USER_TASK_SOURCE
13
- } from "./chunk-FLLBJLHM.js";
15
+ } from "./chunk-6C6QZ4PM.js";
14
16
  import {
15
17
  require_dist
16
18
  } from "./chunk-6LQIMRTC.js";
@@ -100,4 +102,4 @@ export {
100
102
  deleteUserTask,
101
103
  copyTaskToUser
102
104
  };
103
- //# sourceMappingURL=chunk-6X2ERTQV.js.map
105
+ //# sourceMappingURL=chunk-ILJPRYES.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/agent/registry.ts"],"sourcesContent":["/**\n * User task registry.\n *\n * Loads built-in tasks from the definitions directory and user-created tasks\n * from the vault's tasks/ subdirectory. User tasks with the same name as a\n * built-in task override the built-in.\n *\n * No module-level cache — always reads from disk.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { parse as parseYaml, stringify as stringifyYaml } from 'yaml';\nimport { USER_TASKS_DIR, USER_TASK_SOURCE, BUILT_IN_SOURCE, TASK_NAME_PATTERN, MAX_TASK_NAME_LENGTH } from '@myco/constants.js';\nimport { loadAgentTasks, taskFromParsed } from './loader.js';\nimport { AgentTaskSchema } from './schemas.js';\nimport type { AgentTask } from './types.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n// BUILT_IN_SOURCE imported from @myco/constants.js\n\n/** Suffix appended to the task name when copying a built-in task for the user. */\nconst COPY_SUFFIX = '-custom';\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Load all tasks: built-in definitions merged with user-created overrides.\n *\n * Built-in tasks are loaded from `definitionsDir/tasks/*.yaml`.\n * User tasks are loaded from `vaultDir/tasks/*.yaml`.\n * A user task with the same name as a built-in task replaces it entirely.\n *\n * Always reads from disk — no caching.\n *\n * @param definitionsDir — path to `src/agent/definitions/` (or dist equivalent).\n * @param vaultDir — optional vault directory; user tasks skipped if not provided.\n * @returns map from task name → AgentTask.\n */\nexport function loadAllTasks(definitionsDir: string, vaultDir?: string): Map<string, AgentTask> {\n const result = new Map<string, AgentTask>();\n\n // Load built-in tasks first\n const builtIn = loadAgentTasks(definitionsDir);\n for (const task of builtIn) {\n result.set(task.name, { ...task, isBuiltin: true, source: BUILT_IN_SOURCE });\n }\n\n // Overlay user tasks (override built-in if same name)\n if (vaultDir) {\n const userTasksDir = path.join(vaultDir, USER_TASKS_DIR);\n if (fs.existsSync(userTasksDir)) {\n const files = fs.readdirSync(userTasksDir).filter((f) => f.endsWith('.yaml'));\n for (const file of files) {\n const filePath = path.join(userTasksDir, file);\n try {\n const raw = fs.readFileSync(filePath, 'utf-8');\n const parsed = AgentTaskSchema.parse(parseYaml(raw));\n const task: AgentTask = {\n ...taskFromParsed(parsed),\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n result.set(task.name, task);\n } catch (err) {\n console.warn(`[registry] Skipping malformed user task file: ${filePath}`, err);\n }\n }\n }\n }\n\n return result;\n}\n\n/**\n * Validate a task name against the allowed pattern and length limit.\n *\n * Valid names: lowercase letters, digits, and hyphens. Must start and end\n * with a letter or digit. Single character names (a–z, 0–9) are allowed.\n *\n * @param name — candidate task name.\n * @returns true if valid.\n */\nexport function validateTaskName(name: string): boolean {\n if (name.length > MAX_TASK_NAME_LENGTH) return false;\n return TASK_NAME_PATTERN.test(name);\n}\n\n/**\n * Serialize an AgentTask to YAML and write it to `vaultDir/tasks/<name>.yaml`.\n *\n * Validates the task through AgentTaskSchema before writing.\n * Creates the tasks directory if it does not exist (idempotent).\n * Strips the internal `source` and `isBuiltin` fields from the serialized output.\n *\n * @param vaultDir — path to the vault root directory.\n * @param task — task to write.\n * @returns absolute path to the written file.\n * @throws if schema validation fails.\n */\nexport function writeUserTask(vaultDir: string, task: AgentTask): string {\n // Validate before touching the filesystem\n AgentTaskSchema.parse(task);\n\n const tasksDir = path.join(vaultDir, USER_TASKS_DIR);\n fs.mkdirSync(tasksDir, { recursive: true });\n\n // Strip internal-only fields before serializing\n const { isBuiltin: _isBuiltin, source: _source, ...serializable } = task;\n const yaml = stringifyYaml(serializable);\n\n const filePath = path.join(tasksDir, `${task.name}.yaml`);\n fs.writeFileSync(filePath, yaml, 'utf-8');\n return filePath;\n}\n\n/**\n * Delete a user task YAML file from `vaultDir/tasks/<taskName>.yaml`.\n *\n * @param vaultDir — path to the vault root directory.\n * @param taskName — name of the task to delete.\n * @returns true if the file existed and was removed, false if it did not exist.\n */\nexport function deleteUserTask(vaultDir: string, taskName: string): boolean {\n const filePath = path.join(vaultDir, USER_TASKS_DIR, `${taskName}.yaml`);\n if (!fs.existsSync(filePath)) return false;\n fs.rmSync(filePath);\n return true;\n}\n\n/**\n * Create a user copy of an existing task.\n *\n * Loads all tasks (built-in + user), locates `sourceName`, then writes a new\n * user task with the given name (or `sourceName + COPY_SUFFIX` if omitted),\n * `isDefault: false`, `isBuiltin: false`, and `source: 'user'`.\n *\n * @param definitionsDir — path to built-in definitions directory.\n * @param vaultDir — path to vault root directory.\n * @param sourceName — name of the task to copy.\n * @param newName — optional name for the copy; defaults to `sourceName + '-custom'`.\n * @returns the newly written AgentTask.\n * @throws if the source task is not found.\n * @throws if the new name is invalid.\n */\nexport function copyTaskToUser(\n definitionsDir: string,\n vaultDir: string,\n sourceName: string,\n newName?: string,\n): AgentTask {\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n\n const source = allTasks.get(sourceName);\n if (!source) {\n throw new Error(`Task not found: ${sourceName}`);\n }\n\n const targetName = newName ?? `${sourceName}${COPY_SUFFIX}`;\n if (!validateTaskName(targetName)) {\n throw new Error(`Invalid task name: ${targetName}`);\n }\n\n const copy: AgentTask = {\n ...source,\n name: targetName,\n isDefault: false,\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n\n writeUserTask(vaultDir, copy);\n return copy;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAYA,kBAA+D;AAF/D,OAAO,QAAQ;AACf,OAAO,UAAU;AAcjB,IAAM,cAAc;AAmBb,SAAS,aAAa,gBAAwB,UAA2C;AAC9F,QAAM,SAAS,oBAAI,IAAuB;AAG1C,QAAM,UAAU,eAAe,cAAc;AAC7C,aAAW,QAAQ,SAAS;AAC1B,WAAO,IAAI,KAAK,MAAM,EAAE,GAAG,MAAM,WAAW,MAAM,QAAQ,gBAAgB,CAAC;AAAA,EAC7E;AAGA,MAAI,UAAU;AACZ,UAAM,eAAe,KAAK,KAAK,UAAU,cAAc;AACvD,QAAI,GAAG,WAAW,YAAY,GAAG;AAC/B,YAAM,QAAQ,GAAG,YAAY,YAAY,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAC5E,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,KAAK,KAAK,cAAc,IAAI;AAC7C,YAAI;AACF,gBAAM,MAAM,GAAG,aAAa,UAAU,OAAO;AAC7C,gBAAM,SAAS,gBAAgB,UAAM,YAAAA,OAAU,GAAG,CAAC;AACnD,gBAAM,OAAkB;AAAA,YACtB,GAAG,eAAe,MAAM;AAAA,YACxB,WAAW;AAAA,YACX,QAAQ;AAAA,UACV;AACA,iBAAO,IAAI,KAAK,MAAM,IAAI;AAAA,QAC5B,SAAS,KAAK;AACZ,kBAAQ,KAAK,iDAAiD,QAAQ,IAAI,GAAG;AAAA,QAC/E;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,iBAAiB,MAAuB;AACtD,MAAI,KAAK,SAAS,qBAAsB,QAAO;AAC/C,SAAO,kBAAkB,KAAK,IAAI;AACpC;AAcO,SAAS,cAAc,UAAkB,MAAyB;AAEvE,kBAAgB,MAAM,IAAI;AAE1B,QAAM,WAAW,KAAK,KAAK,UAAU,cAAc;AACnD,KAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAG1C,QAAM,EAAE,WAAW,YAAY,QAAQ,SAAS,GAAG,aAAa,IAAI;AACpE,QAAM,WAAO,YAAAC,WAAc,YAAY;AAEvC,QAAM,WAAW,KAAK,KAAK,UAAU,GAAG,KAAK,IAAI,OAAO;AACxD,KAAG,cAAc,UAAU,MAAM,OAAO;AACxC,SAAO;AACT;AASO,SAAS,eAAe,UAAkB,UAA2B;AAC1E,QAAM,WAAW,KAAK,KAAK,UAAU,gBAAgB,GAAG,QAAQ,OAAO;AACvE,MAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,QAAO;AACrC,KAAG,OAAO,QAAQ;AAClB,SAAO;AACT;AAiBO,SAAS,eACd,gBACA,UACA,YACA,SACW;AACX,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AAEtD,QAAM,SAAS,SAAS,IAAI,UAAU;AACtC,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,mBAAmB,UAAU,EAAE;AAAA,EACjD;AAEA,QAAM,aAAa,WAAW,GAAG,UAAU,GAAG,WAAW;AACzD,MAAI,CAAC,iBAAiB,UAAU,GAAG;AACjC,UAAM,IAAI,MAAM,sBAAsB,UAAU,EAAE;AAAA,EACpD;AAEA,QAAM,OAAkB;AAAA,IACtB,GAAG;AAAA,IACH,MAAM;AAAA,IACN,WAAW;AAAA,IACX,WAAW;AAAA,IACX,QAAQ;AAAA,EACV;AAEA,gBAAc,UAAU,IAAI;AAC5B,SAAO;AACT;","names":["parseYaml","stringifyYaml"]}
1
+ {"version":3,"sources":["../src/agent/registry.ts"],"sourcesContent":["/**\n * User task registry.\n *\n * Loads built-in tasks from the definitions directory and user-created tasks\n * from the vault's tasks/ subdirectory. User tasks with the same name as a\n * built-in task override the built-in.\n *\n * No module-level cache — always reads from disk.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { parse as parseYaml, stringify as stringifyYaml } from 'yaml';\nimport { USER_TASKS_DIR, USER_TASK_SOURCE, BUILT_IN_SOURCE, TASK_NAME_PATTERN, MAX_TASK_NAME_LENGTH } from '@myco/constants.js';\nimport { loadAgentTasks, taskFromParsed } from './loader.js';\nimport { AgentTaskSchema } from './schemas.js';\nimport type { AgentTask } from './types.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n// BUILT_IN_SOURCE imported from @myco/constants.js\n\n/** Suffix appended to the task name when copying a built-in task for the user. */\nconst COPY_SUFFIX = '-custom';\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Load all tasks: built-in definitions merged with user-created overrides.\n *\n * Built-in tasks are loaded from `definitionsDir/tasks/*.yaml`.\n * User tasks are loaded from `vaultDir/tasks/*.yaml`.\n * A user task with the same name as a built-in task replaces it entirely.\n *\n * Always reads from disk — no caching.\n *\n * @param definitionsDir — path to `src/agent/definitions/` (or dist equivalent).\n * @param vaultDir — optional vault directory; user tasks skipped if not provided.\n * @returns map from task name → AgentTask.\n */\nexport function loadAllTasks(definitionsDir: string, vaultDir?: string): Map<string, AgentTask> {\n const result = new Map<string, AgentTask>();\n\n // Load built-in tasks first\n const builtIn = loadAgentTasks(definitionsDir);\n for (const task of builtIn) {\n result.set(task.name, { ...task, isBuiltin: true, source: BUILT_IN_SOURCE });\n }\n\n // Overlay user tasks (override built-in if same name)\n if (vaultDir) {\n const userTasksDir = path.join(vaultDir, USER_TASKS_DIR);\n if (fs.existsSync(userTasksDir)) {\n const files = fs.readdirSync(userTasksDir).filter((f) => f.endsWith('.yaml'));\n for (const file of files) {\n const filePath = path.join(userTasksDir, file);\n try {\n const raw = fs.readFileSync(filePath, 'utf-8');\n const parsed = AgentTaskSchema.parse(parseYaml(raw));\n const task: AgentTask = {\n ...taskFromParsed(parsed),\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n result.set(task.name, task);\n } catch (err) {\n console.warn(`[registry] Skipping malformed user task file: ${filePath}`, err);\n }\n }\n }\n }\n\n return result;\n}\n\n/**\n * Validate a task name against the allowed pattern and length limit.\n *\n * Valid names: lowercase letters, digits, and hyphens. Must start and end\n * with a letter or digit. Single character names (a–z, 0–9) are allowed.\n *\n * @param name — candidate task name.\n * @returns true if valid.\n */\nexport function validateTaskName(name: string): boolean {\n if (name.length > MAX_TASK_NAME_LENGTH) return false;\n return TASK_NAME_PATTERN.test(name);\n}\n\n/**\n * Serialize an AgentTask to YAML and write it to `vaultDir/tasks/<name>.yaml`.\n *\n * Validates the task through AgentTaskSchema before writing.\n * Creates the tasks directory if it does not exist (idempotent).\n * Strips the internal `source` and `isBuiltin` fields from the serialized output.\n *\n * @param vaultDir — path to the vault root directory.\n * @param task — task to write.\n * @returns absolute path to the written file.\n * @throws if schema validation fails.\n */\nexport function writeUserTask(vaultDir: string, task: AgentTask): string {\n // Validate before touching the filesystem\n AgentTaskSchema.parse(task);\n\n const tasksDir = path.join(vaultDir, USER_TASKS_DIR);\n fs.mkdirSync(tasksDir, { recursive: true });\n\n // Strip internal-only fields before serializing\n const { isBuiltin: _isBuiltin, source: _source, ...serializable } = task;\n const yaml = stringifyYaml(serializable);\n\n const filePath = path.join(tasksDir, `${task.name}.yaml`);\n fs.writeFileSync(filePath, yaml, 'utf-8');\n return filePath;\n}\n\n/**\n * Delete a user task YAML file from `vaultDir/tasks/<taskName>.yaml`.\n *\n * @param vaultDir — path to the vault root directory.\n * @param taskName — name of the task to delete.\n * @returns true if the file existed and was removed, false if it did not exist.\n */\nexport function deleteUserTask(vaultDir: string, taskName: string): boolean {\n const filePath = path.join(vaultDir, USER_TASKS_DIR, `${taskName}.yaml`);\n if (!fs.existsSync(filePath)) return false;\n fs.rmSync(filePath);\n return true;\n}\n\n/**\n * Create a user copy of an existing task.\n *\n * Loads all tasks (built-in + user), locates `sourceName`, then writes a new\n * user task with the given name (or `sourceName + COPY_SUFFIX` if omitted),\n * `isDefault: false`, `isBuiltin: false`, and `source: 'user'`.\n *\n * @param definitionsDir — path to built-in definitions directory.\n * @param vaultDir — path to vault root directory.\n * @param sourceName — name of the task to copy.\n * @param newName — optional name for the copy; defaults to `sourceName + '-custom'`.\n * @returns the newly written AgentTask.\n * @throws if the source task is not found.\n * @throws if the new name is invalid.\n */\nexport function copyTaskToUser(\n definitionsDir: string,\n vaultDir: string,\n sourceName: string,\n newName?: string,\n): AgentTask {\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n\n const source = allTasks.get(sourceName);\n if (!source) {\n throw new Error(`Task not found: ${sourceName}`);\n }\n\n const targetName = newName ?? `${sourceName}${COPY_SUFFIX}`;\n if (!validateTaskName(targetName)) {\n throw new Error(`Invalid task name: ${targetName}`);\n }\n\n const copy: AgentTask = {\n ...source,\n name: targetName,\n isDefault: false,\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n\n writeUserTask(vaultDir, copy);\n return copy;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAYA,kBAA+D;AAF/D,OAAO,QAAQ;AACf,OAAO,UAAU;AAcjB,IAAM,cAAc;AAmBb,SAAS,aAAa,gBAAwB,UAA2C;AAC9F,QAAM,SAAS,oBAAI,IAAuB;AAG1C,QAAM,UAAU,eAAe,cAAc;AAC7C,aAAW,QAAQ,SAAS;AAC1B,WAAO,IAAI,KAAK,MAAM,EAAE,GAAG,MAAM,WAAW,MAAM,QAAQ,gBAAgB,CAAC;AAAA,EAC7E;AAGA,MAAI,UAAU;AACZ,UAAM,eAAe,KAAK,KAAK,UAAU,cAAc;AACvD,QAAI,GAAG,WAAW,YAAY,GAAG;AAC/B,YAAM,QAAQ,GAAG,YAAY,YAAY,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAC5E,iBAAW,QAAQ,OAAO;AACxB,cAAM,WAAW,KAAK,KAAK,cAAc,IAAI;AAC7C,YAAI;AACF,gBAAM,MAAM,GAAG,aAAa,UAAU,OAAO;AAC7C,gBAAM,SAAS,gBAAgB,UAAM,YAAAA,OAAU,GAAG,CAAC;AACnD,gBAAM,OAAkB;AAAA,YACtB,GAAG,eAAe,MAAM;AAAA,YACxB,WAAW;AAAA,YACX,QAAQ;AAAA,UACV;AACA,iBAAO,IAAI,KAAK,MAAM,IAAI;AAAA,QAC5B,SAAS,KAAK;AACZ,kBAAQ,KAAK,iDAAiD,QAAQ,IAAI,GAAG;AAAA,QAC/E;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,iBAAiB,MAAuB;AACtD,MAAI,KAAK,SAAS,qBAAsB,QAAO;AAC/C,SAAO,kBAAkB,KAAK,IAAI;AACpC;AAcO,SAAS,cAAc,UAAkB,MAAyB;AAEvE,kBAAgB,MAAM,IAAI;AAE1B,QAAM,WAAW,KAAK,KAAK,UAAU,cAAc;AACnD,KAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAG1C,QAAM,EAAE,WAAW,YAAY,QAAQ,SAAS,GAAG,aAAa,IAAI;AACpE,QAAM,WAAO,YAAAC,WAAc,YAAY;AAEvC,QAAM,WAAW,KAAK,KAAK,UAAU,GAAG,KAAK,IAAI,OAAO;AACxD,KAAG,cAAc,UAAU,MAAM,OAAO;AACxC,SAAO;AACT;AASO,SAAS,eAAe,UAAkB,UAA2B;AAC1E,QAAM,WAAW,KAAK,KAAK,UAAU,gBAAgB,GAAG,QAAQ,OAAO;AACvE,MAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,QAAO;AACrC,KAAG,OAAO,QAAQ;AAClB,SAAO;AACT;AAiBO,SAAS,eACd,gBACA,UACA,YACA,SACW;AACX,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AAEtD,QAAM,SAAS,SAAS,IAAI,UAAU;AACtC,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,mBAAmB,UAAU,EAAE;AAAA,EACjD;AAEA,QAAM,aAAa,WAAW,GAAG,UAAU,GAAG,WAAW;AACzD,MAAI,CAAC,iBAAiB,UAAU,GAAG;AACjC,UAAM,IAAI,MAAM,sBAAsB,UAAU,EAAE;AAAA,EACpD;AAEA,QAAM,OAAkB;AAAA,IACtB,GAAG;AAAA,IACH,MAAM;AAAA,IACN,WAAW;AAAA,IACX,WAAW;AAAA,IACX,QAAQ;AAAA,EACV;AAEA,gBAAc,UAAU,IAAI;AAC5B,SAAO;AACT;","names":["parseYaml","stringifyYaml"]}
@@ -19,6 +19,10 @@ function withTaskConfig(config, taskId, update) {
19
19
  entry.provider = { ...update.provider };
20
20
  }
21
21
  }
22
+ if ("runtime" in update) {
23
+ if (update.runtime === null) delete entry.runtime;
24
+ else if (update.runtime !== void 0) entry.runtime = update.runtime;
25
+ }
22
26
  if ("model" in update) {
23
27
  if (update.model === null) delete entry.model;
24
28
  else if (update.model !== void 0) entry.model = update.model;
@@ -95,4 +99,4 @@ export {
95
99
  withTaskConfig,
96
100
  withEmbedding
97
101
  };
98
- //# sourceMappingURL=chunk-US4LNCAT.js.map
102
+ //# sourceMappingURL=chunk-IPPMYQ2Y.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/config/updates.ts"],"sourcesContent":["import type { MycoConfig, EmbeddingProviderConfig, ContextConfig, TaskProviderOverride, PhaseOverride } from './schema.js';\nimport { setAtPath } from '../utils/dot-path.js';\n\n/**\n * Set a value at a dot-separated path, returning a new config object.\n * Creates intermediate objects along the path as needed.\n */\nexport function withValue(config: MycoConfig, dotPath: string, value: unknown): MycoConfig {\n const clone = structuredClone(config) as MycoConfig & Record<string, unknown>;\n setAtPath(clone, dotPath, value);\n return clone as MycoConfig;\n}\n\n/** Provider override shape used in task config updates. Null means delete. */\ninterface ProviderInput {\n runtime?: 'claude-sdk' | 'openai-agents';\n type: 'anthropic' | 'ollama' | 'lmstudio' | 'openai' | 'openrouter' | 'openai-compatible';\n local_backend?: 'ollama' | 'lmstudio';\n model?: string;\n reasoning_map?: Partial<Record<'low' | 'default' | 'high', string>>;\n base_url?: string;\n context_length?: number;\n}\n\n/** Phase override input. Null fields mean delete. */\ninterface PhaseInput {\n provider?: ProviderInput | null;\n model?: string | null;\n maxTurns?: number | null;\n}\n\n/** Input shape for task config updates. Null values mean \"delete this field\". */\nexport interface TaskConfigUpdate {\n provider?: ProviderInput | null;\n runtime?: 'claude-sdk' | 'openai-agents' | null;\n model?: string | null;\n maxTurns?: number | null;\n timeoutSeconds?: number | null;\n phases?: Record<string, PhaseInput | null> | null;\n schedule?: { enabled?: boolean; intervalSeconds?: number; runIn?: ('active' | 'idle' | 'sleep')[] } | null;\n}\n\n/**\n * Apply partial task config updates, returning a new config object.\n * Null values delete fields. Empty task entries and phase maps are cleaned up.\n */\nexport function withTaskConfig(\n config: MycoConfig,\n taskId: string,\n update: TaskConfigUpdate,\n): MycoConfig {\n const tasks = { ...(config.agent.tasks ?? {}) };\n const entry: TaskProviderOverride = { ...(tasks[taskId] ?? {}) };\n\n // Apply top-level fields\n if ('provider' in update) {\n if (update.provider === null) {\n delete entry.provider;\n } else if (update.provider !== undefined) {\n entry.provider = { ...update.provider };\n }\n }\n\n if ('runtime' in update) {\n if (update.runtime === null) delete entry.runtime;\n else if (update.runtime !== undefined) entry.runtime = update.runtime;\n }\n\n if ('model' in update) {\n if (update.model === null) delete entry.model;\n else if (update.model !== undefined) entry.model = update.model;\n }\n\n if ('maxTurns' in update) {\n if (update.maxTurns === null) delete entry.maxTurns;\n else if (update.maxTurns !== undefined) entry.maxTurns = update.maxTurns;\n }\n\n if ('timeoutSeconds' in update) {\n if (update.timeoutSeconds === null) delete entry.timeoutSeconds;\n else if (update.timeoutSeconds !== undefined) entry.timeoutSeconds = update.timeoutSeconds;\n }\n\n // Handle schedule\n if ('schedule' in update) {\n if (update.schedule === null) {\n delete entry.schedule;\n } else if (update.schedule !== undefined) {\n entry.schedule = { ...entry.schedule, ...update.schedule };\n }\n }\n\n // Apply phase overrides\n if ('phases' in update) {\n if (update.phases === null) {\n delete entry.phases;\n } else if (update.phases !== undefined) {\n const phases: Record<string, PhaseOverride> = { ...(entry.phases ?? {}) };\n\n for (const [phaseName, phaseValue] of Object.entries(update.phases)) {\n if (phaseValue === null) {\n delete phases[phaseName];\n } else {\n const pe: PhaseOverride = { ...(phases[phaseName] ?? {}) };\n if ('provider' in phaseValue) {\n if (phaseValue.provider === null) delete pe.provider;\n else if (phaseValue.provider !== undefined) pe.provider = { ...phaseValue.provider };\n }\n if ('model' in phaseValue) {\n if (phaseValue.model === null) delete pe.model;\n else if (phaseValue.model !== undefined) pe.model = phaseValue.model;\n }\n if ('maxTurns' in phaseValue) {\n if (phaseValue.maxTurns === null) delete pe.maxTurns;\n else if (phaseValue.maxTurns !== undefined) pe.maxTurns = phaseValue.maxTurns;\n }\n phases[phaseName] = pe;\n }\n }\n\n // Clean up empty phases map\n if (Object.keys(phases).length === 0) {\n delete entry.phases;\n } else {\n entry.phases = phases;\n }\n }\n }\n\n // Clean up empty task entry\n if (Object.keys(entry).length === 0) {\n delete tasks[taskId];\n } else {\n tasks[taskId] = entry;\n }\n\n return {\n ...config,\n agent: {\n ...config.agent,\n tasks: Object.keys(tasks).length > 0 ? tasks : undefined,\n },\n };\n}\n\n/**\n * Merge partial embedding updates into config, returning a new config object.\n */\nexport function withEmbedding(\n config: MycoConfig,\n updates: Partial<EmbeddingProviderConfig>,\n): MycoConfig {\n return {\n ...config,\n embedding: { ...config.embedding, ...updates },\n };\n}\n\n/**\n * Merge partial context injection updates into config, returning a new config object.\n */\nexport function withContext(\n config: MycoConfig,\n updates: Partial<ContextConfig>,\n): MycoConfig {\n return {\n ...config,\n context: { ...config.context, ...updates },\n };\n}\n"],"mappings":";;;;;;AAOO,SAAS,UAAU,QAAoB,SAAiB,OAA4B;AACzF,QAAM,QAAQ,gBAAgB,MAAM;AACpC,YAAU,OAAO,SAAS,KAAK;AAC/B,SAAO;AACT;AAmCO,SAAS,eACd,QACA,QACA,QACY;AACZ,QAAM,QAAQ,EAAE,GAAI,OAAO,MAAM,SAAS,CAAC,EAAG;AAC9C,QAAM,QAA8B,EAAE,GAAI,MAAM,MAAM,KAAK,CAAC,EAAG;AAG/D,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,MAAM;AAC5B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,aAAa,QAAW;AACxC,YAAM,WAAW,EAAE,GAAG,OAAO,SAAS;AAAA,IACxC;AAAA,EACF;AAEA,MAAI,aAAa,QAAQ;AACvB,QAAI,OAAO,YAAY,KAAM,QAAO,MAAM;AAAA,aACjC,OAAO,YAAY,OAAW,OAAM,UAAU,OAAO;AAAA,EAChE;AAEA,MAAI,WAAW,QAAQ;AACrB,QAAI,OAAO,UAAU,KAAM,QAAO,MAAM;AAAA,aAC/B,OAAO,UAAU,OAAW,OAAM,QAAQ,OAAO;AAAA,EAC5D;AAEA,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,KAAM,QAAO,MAAM;AAAA,aAClC,OAAO,aAAa,OAAW,OAAM,WAAW,OAAO;AAAA,EAClE;AAEA,MAAI,oBAAoB,QAAQ;AAC9B,QAAI,OAAO,mBAAmB,KAAM,QAAO,MAAM;AAAA,aACxC,OAAO,mBAAmB,OAAW,OAAM,iBAAiB,OAAO;AAAA,EAC9E;AAGA,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,MAAM;AAC5B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,aAAa,QAAW;AACxC,YAAM,WAAW,EAAE,GAAG,MAAM,UAAU,GAAG,OAAO,SAAS;AAAA,IAC3D;AAAA,EACF;AAGA,MAAI,YAAY,QAAQ;AACtB,QAAI,OAAO,WAAW,MAAM;AAC1B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,WAAW,QAAW;AACtC,YAAM,SAAwC,EAAE,GAAI,MAAM,UAAU,CAAC,EAAG;AAExE,iBAAW,CAAC,WAAW,UAAU,KAAK,OAAO,QAAQ,OAAO,MAAM,GAAG;AACnE,YAAI,eAAe,MAAM;AACvB,iBAAO,OAAO,SAAS;AAAA,QACzB,OAAO;AACL,gBAAM,KAAoB,EAAE,GAAI,OAAO,SAAS,KAAK,CAAC,EAAG;AACzD,cAAI,cAAc,YAAY;AAC5B,gBAAI,WAAW,aAAa,KAAM,QAAO,GAAG;AAAA,qBACnC,WAAW,aAAa,OAAW,IAAG,WAAW,EAAE,GAAG,WAAW,SAAS;AAAA,UACrF;AACA,cAAI,WAAW,YAAY;AACzB,gBAAI,WAAW,UAAU,KAAM,QAAO,GAAG;AAAA,qBAChC,WAAW,UAAU,OAAW,IAAG,QAAQ,WAAW;AAAA,UACjE;AACA,cAAI,cAAc,YAAY;AAC5B,gBAAI,WAAW,aAAa,KAAM,QAAO,GAAG;AAAA,qBACnC,WAAW,aAAa,OAAW,IAAG,WAAW,WAAW;AAAA,UACvE;AACA,iBAAO,SAAS,IAAI;AAAA,QACtB;AAAA,MACF;AAGA,UAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AACpC,eAAO,MAAM;AAAA,MACf,OAAO;AACL,cAAM,SAAS;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,KAAK,KAAK,EAAE,WAAW,GAAG;AACnC,WAAO,MAAM,MAAM;AAAA,EACrB,OAAO;AACL,UAAM,MAAM,IAAI;AAAA,EAClB;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,OAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV,OAAO,OAAO,KAAK,KAAK,EAAE,SAAS,IAAI,QAAQ;AAAA,IACjD;AAAA,EACF;AACF;AAKO,SAAS,cACd,QACA,SACY;AACZ,SAAO;AAAA,IACL,GAAG;AAAA,IACH,WAAW,EAAE,GAAG,OAAO,WAAW,GAAG,QAAQ;AAAA,EAC/C;AACF;","names":[]}
@@ -1,11 +1,11 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
2
  import {
3
3
  SymbiontInstaller
4
- } from "./chunk-Q36VMZST.js";
4
+ } from "./chunk-VHNRMM4O.js";
5
5
  import {
6
6
  LmStudioBackend,
7
7
  OllamaBackend
8
- } from "./chunk-FMRZ26U5.js";
8
+ } from "./chunk-X3IGT5RV.js";
9
9
  import {
10
10
  closeDatabase,
11
11
  initDatabase,
@@ -13,7 +13,7 @@ import {
13
13
  } from "./chunk-MYX5NCRH.js";
14
14
  import {
15
15
  DaemonClient
16
- } from "./chunk-VVNL26WX.js";
16
+ } from "./chunk-P66DLD6G.js";
17
17
 
18
18
  // src/cli/shared.ts
19
19
  import fs from "fs";
@@ -126,4 +126,4 @@ export {
126
126
  VAULT_GITIGNORE,
127
127
  registerSymbionts
128
128
  };
129
- //# sourceMappingURL=chunk-KESLPBKV.js.map
129
+ //# sourceMappingURL=chunk-JR54LTPP.js.map
@@ -1,4 +1,8 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+ import {
3
+ AgentDefinitionSchema,
4
+ AgentTaskSchema
5
+ } from "./chunk-OUJSQSKE.js";
2
6
  import {
3
7
  getDatabase
4
8
  } from "./chunk-MYX5NCRH.js";
@@ -8,16 +12,12 @@ import {
8
12
  import {
9
13
  BUILT_IN_SOURCE,
10
14
  DEFAULT_AGENT_ID,
11
- SCHEDULABLE_POWER_STATES,
12
15
  USER_TASK_SOURCE,
13
16
  epochSeconds
14
- } from "./chunk-FLLBJLHM.js";
17
+ } from "./chunk-6C6QZ4PM.js";
15
18
  import {
16
19
  require_dist
17
20
  } from "./chunk-6LQIMRTC.js";
18
- import {
19
- external_exports
20
- } from "./chunk-U6PF3YII.js";
21
21
  import {
22
22
  __toESM
23
23
  } from "./chunk-PZUWP5VK.js";
@@ -216,90 +216,6 @@ function getDefaultTask(agentId) {
216
216
  return toTaskRow(row);
217
217
  }
218
218
 
219
- // src/agent/schemas.ts
220
- var ProviderConfigSchema = external_exports.object({
221
- type: external_exports.enum(["anthropic", "ollama", "lmstudio"]),
222
- baseUrl: external_exports.string().optional(),
223
- apiKey: external_exports.string().optional(),
224
- model: external_exports.string().optional()
225
- });
226
- var ExecutionConfigSchema = external_exports.object({
227
- model: external_exports.string().optional(),
228
- maxTurns: external_exports.number().optional(),
229
- timeoutSeconds: external_exports.number().optional(),
230
- provider: ProviderConfigSchema.optional()
231
- });
232
- var ContextQuerySchema = external_exports.object({
233
- tool: external_exports.string(),
234
- queryTemplate: external_exports.string(),
235
- limit: external_exports.number(),
236
- purpose: external_exports.string(),
237
- required: external_exports.boolean()
238
- });
239
- var AgentDefinitionSchema = external_exports.object({
240
- name: external_exports.string(),
241
- displayName: external_exports.string(),
242
- description: external_exports.string(),
243
- model: external_exports.string(),
244
- maxTurns: external_exports.number(),
245
- timeoutSeconds: external_exports.number(),
246
- systemPromptPath: external_exports.string(),
247
- tools: external_exports.array(external_exports.string())
248
- });
249
- var OrchestratorConfigSchema = external_exports.object({
250
- enabled: external_exports.boolean(),
251
- model: external_exports.string().optional(),
252
- maxTurns: external_exports.number().optional()
253
- });
254
- var PreConditionSchema = external_exports.enum([
255
- "has-unprocessed-batches",
256
- "has-active-skills",
257
- "has-approved-candidates",
258
- "has-skill-survey-evidence"
259
- ]);
260
- var TaskScheduleSchema = external_exports.object({
261
- /** Whether auto-run is enabled for this task. */
262
- enabled: external_exports.boolean().default(false),
263
- /** Seconds between runs. */
264
- intervalSeconds: external_exports.number().int().positive(),
265
- /** PowerManager states where this task runs. */
266
- runIn: external_exports.array(external_exports.enum([...SCHEDULABLE_POWER_STATES])).min(1),
267
- /** Optional pre-condition check before running. */
268
- preCondition: PreConditionSchema.optional()
269
- });
270
- var PhaseDefinitionSchema = external_exports.object({
271
- name: external_exports.string(),
272
- prompt: external_exports.string(),
273
- tools: external_exports.array(external_exports.string()),
274
- maxTurns: external_exports.number(),
275
- model: external_exports.string().optional(),
276
- required: external_exports.boolean(),
277
- dependsOn: external_exports.array(external_exports.string()).optional(),
278
- provider: ProviderConfigSchema.optional(),
279
- skipPriorContext: external_exports.boolean().optional(),
280
- readOnly: external_exports.boolean().optional()
281
- });
282
- var AgentTaskSchema = external_exports.object({
283
- name: external_exports.string(),
284
- displayName: external_exports.string(),
285
- description: external_exports.string(),
286
- agent: external_exports.string(),
287
- prompt: external_exports.string(),
288
- isDefault: external_exports.boolean(),
289
- toolOverrides: external_exports.array(external_exports.string()).optional(),
290
- model: external_exports.string().optional(),
291
- maxTurns: external_exports.number().optional(),
292
- timeoutSeconds: external_exports.number().optional(),
293
- phases: external_exports.array(PhaseDefinitionSchema).optional(),
294
- execution: ExecutionConfigSchema.optional(),
295
- contextQueries: external_exports.record(external_exports.string(), external_exports.array(ContextQuerySchema)).optional(),
296
- schemaVersion: external_exports.number().optional(),
297
- orchestrator: OrchestratorConfigSchema.optional(),
298
- schedule: TaskScheduleSchema.optional(),
299
- /** Task-specific params with defaults. */
300
- params: external_exports.record(external_exports.string(), external_exports.union([external_exports.string(), external_exports.number(), external_exports.boolean()])).optional()
301
- });
302
-
303
219
  // src/agent/loader.ts
304
220
  var AGENT_DEFINITION_FILE = "agent.yaml";
305
221
  var TASKS_SUBDIRECTORY = "tasks";
@@ -357,6 +273,7 @@ function taskFromParsed(parsed) {
357
273
  isDefault: parsed.isDefault,
358
274
  ...parsed.toolOverrides ? { toolOverrides: parsed.toolOverrides } : {},
359
275
  ...parsed.model ? { model: parsed.model } : {},
276
+ ...parsed.reasoningLevel ? { reasoningLevel: parsed.reasoningLevel } : {},
360
277
  ...parsed.maxTurns ? { maxTurns: parsed.maxTurns } : {},
361
278
  ...parsed.timeoutSeconds ? { timeoutSeconds: parsed.timeoutSeconds } : {},
362
279
  ...parsed.phases ? { phases: parsed.phases } : {},
@@ -373,7 +290,9 @@ function loadSystemPrompt(definitionsDir, relativePath) {
373
290
  return fs.readFileSync(filePath, "utf-8").trim();
374
291
  }
375
292
  function resolveEffectiveConfig(definition, agentOverrides, taskOverrides) {
293
+ let runtime = taskOverrides?.execution?.runtime ?? taskOverrides?.execution?.provider?.runtime ?? "claude-sdk";
376
294
  let model = definition.model;
295
+ let reasoningLevel = taskOverrides?.reasoningLevel;
377
296
  let maxTurns = definition.maxTurns;
378
297
  let timeoutSeconds = definition.timeoutSeconds;
379
298
  let tools = [...definition.tools];
@@ -391,6 +310,7 @@ function resolveEffectiveConfig(definition, agentOverrides, taskOverrides) {
391
310
  }
392
311
  }
393
312
  if (taskOverrides?.model) model = taskOverrides.model;
313
+ if (taskOverrides?.reasoningLevel) reasoningLevel = taskOverrides.reasoningLevel;
394
314
  if (taskOverrides?.maxTurns) maxTurns = taskOverrides.maxTurns;
395
315
  if (taskOverrides?.timeoutSeconds) timeoutSeconds = taskOverrides.timeoutSeconds;
396
316
  if (taskOverrides?.toolOverrides) {
@@ -398,15 +318,18 @@ function resolveEffectiveConfig(definition, agentOverrides, taskOverrides) {
398
318
  }
399
319
  if (taskOverrides?.execution) {
400
320
  if (taskOverrides.execution.model) model = taskOverrides.execution.model;
321
+ if (taskOverrides.execution.reasoningLevel) reasoningLevel = taskOverrides.execution.reasoningLevel;
401
322
  if (taskOverrides.execution.maxTurns) maxTurns = taskOverrides.execution.maxTurns;
402
323
  if (taskOverrides.execution.timeoutSeconds) timeoutSeconds = taskOverrides.execution.timeoutSeconds;
403
324
  }
404
- const taskName = taskOverrides?.name ?? "full-intelligence";
405
- const taskDisplayName = taskOverrides?.displayName ?? "Full Intelligence";
325
+ const taskName = taskOverrides?.name ?? "vault-evolve";
326
+ const taskDisplayName = taskOverrides?.displayName ?? "Vault Evolve";
406
327
  const taskPrompt = taskOverrides?.prompt ?? "";
407
328
  return {
408
329
  agentId,
330
+ runtime,
409
331
  model,
332
+ ...reasoningLevel ? { reasoningLevel } : {},
410
333
  maxTurns,
411
334
  timeoutSeconds,
412
335
  systemPromptPath: definition.systemPromptPath,
@@ -465,7 +388,7 @@ async function registerBuiltInAgentsAndTasks(definitionsDir, vaultDir) {
465
388
  ).run(BUILT_IN_SOURCE, definition.name, ...validTaskIds);
466
389
  }
467
390
  if (vaultDir) {
468
- const { loadAllTasks } = await import("./registry-U4CHXK6R.js");
391
+ const { loadAllTasks } = await import("./registry-M2Z5QBWH.js");
469
392
  const allTasks = loadAllTasks(definitionsDir, vaultDir);
470
393
  for (const [name, task] of allTasks) {
471
394
  if (task.source === USER_TASK_SOURCE) {
@@ -497,7 +420,6 @@ export {
497
420
  getDefaultTask,
498
421
  registerAgent,
499
422
  getAgent,
500
- AgentTaskSchema,
501
423
  resolveDefinitionsDir,
502
424
  loadAgentDefinition,
503
425
  loadAgentTasks,
@@ -506,4 +428,4 @@ export {
506
428
  resolveEffectiveConfig,
507
429
  registerBuiltInAgentsAndTasks
508
430
  };
509
- //# sourceMappingURL=chunk-CCRGY3QW.js.map
431
+ //# sourceMappingURL=chunk-JZS6GZ6T.js.map