@goondocks/myco 0.19.5 → 0.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. package/dist/{agent-run-3IQXE5PI.js → agent-run-4HUXVRHW.js} +8 -8
  2. package/dist/{agent-tasks-5DIA3CE5.js → agent-tasks-JF45ELB6.js} +8 -8
  3. package/dist/{chunk-DURKJTVO.js → chunk-3WOS4TAR.js} +9 -1
  4. package/dist/chunk-3WOS4TAR.js.map +1 -0
  5. package/dist/{chunk-27ZDDWIA.js → chunk-4LCIKVDM.js} +49 -21
  6. package/dist/chunk-4LCIKVDM.js.map +1 -0
  7. package/dist/{chunk-Q4QD6LJT.js → chunk-4M7EWPIA.js} +3 -3
  8. package/dist/{chunk-FGKCE5AE.js → chunk-4YFKBL3F.js} +2 -2
  9. package/dist/{chunk-KYH4V4ML.js → chunk-57O67XVF.js} +3 -3
  10. package/dist/{chunk-7ONVLO43.js → chunk-5XIVBO25.js} +2 -2
  11. package/dist/{chunk-QLCD77AN.js → chunk-6RFZWV4R.js} +18 -1
  12. package/dist/chunk-6RFZWV4R.js.map +1 -0
  13. package/dist/{chunk-VH7XYQFL.js → chunk-ACQ2AIEM.js} +2 -2
  14. package/dist/{chunk-6ZDJXSEO.js → chunk-BPRIYNLE.js} +3 -3
  15. package/dist/chunk-CUDIZJY7.js +36 -0
  16. package/dist/chunk-CUDIZJY7.js.map +1 -0
  17. package/dist/{chunk-WKNAKQKA.js → chunk-DCSGJ7W4.js} +13 -19
  18. package/dist/chunk-DCSGJ7W4.js.map +1 -0
  19. package/dist/{chunk-Q6OEZM3S.js → chunk-EVDQKYCG.js} +237 -10
  20. package/dist/chunk-EVDQKYCG.js.map +1 -0
  21. package/dist/{chunk-I54KLC6H.js → chunk-FLLBJLHM.js} +3 -1
  22. package/dist/{chunk-I54KLC6H.js.map → chunk-FLLBJLHM.js.map} +1 -1
  23. package/dist/{chunk-PMT2LSTQ.js → chunk-FMRZ26U5.js} +2 -2
  24. package/dist/{chunk-3J6TUJSV.js → chunk-KHT24OWC.js} +3 -3
  25. package/dist/{chunk-5OXBT5MD.js → chunk-L6XFAJIF.js} +65 -14
  26. package/dist/chunk-L6XFAJIF.js.map +1 -0
  27. package/dist/{chunk-6DDRJQ4X.js → chunk-MYOZLMB2.js} +2 -2
  28. package/dist/{chunk-UVKQ62II.js → chunk-NGROSFOH.js} +24 -2
  29. package/dist/chunk-NGROSFOH.js.map +1 -0
  30. package/dist/{chunk-K2QX43GC.js → chunk-P3DN5EWW.js} +4 -4
  31. package/dist/{chunk-IQ5LQTV7.js → chunk-POR75WM6.js} +4 -4
  32. package/dist/{chunk-KABTXALI.js → chunk-QS5TWZBL.js} +4 -4
  33. package/dist/{chunk-UTSCRMJE.js → chunk-SRXTSI25.js} +110 -4
  34. package/dist/chunk-SRXTSI25.js.map +1 -0
  35. package/dist/{chunk-2QJCV3UL.js → chunk-UOQQENDW.js} +3 -3
  36. package/dist/{chunk-GFR542SM.js → chunk-US4LNCAT.js} +5 -11
  37. package/dist/chunk-US4LNCAT.js.map +1 -0
  38. package/dist/{chunk-44PZCAYS.js → chunk-XL75KZGI.js} +23 -13
  39. package/dist/chunk-XL75KZGI.js.map +1 -0
  40. package/dist/{chunk-5WPTS6A4.js → chunk-YSNIAJ5D.js} +7 -4
  41. package/dist/chunk-YSNIAJ5D.js.map +1 -0
  42. package/dist/chunk-ZXZPJJN3.js +54 -0
  43. package/dist/chunk-ZXZPJJN3.js.map +1 -0
  44. package/dist/{cli-RTUSGLTM.js → cli-AHTINAHY.js} +43 -43
  45. package/dist/{client-YWE5YJB7.js → client-LHENCAV3.js} +4 -4
  46. package/dist/{config-I5MJ6RXI.js → config-XPV5GDE4.js} +8 -16
  47. package/dist/config-XPV5GDE4.js.map +1 -0
  48. package/dist/{detect-BEOIHGBC.js → detect-PXNM6TA7.js} +2 -2
  49. package/dist/{detect-providers-2EY55EHK.js → detect-providers-5KOPZ7J2.js} +4 -4
  50. package/dist/{doctor-FIG7VEYV.js → doctor-XPCF5HV5.js} +13 -13
  51. package/dist/{executor-2TMGOVEA.js → executor-ACDHGTRH.js} +115 -77
  52. package/dist/executor-ACDHGTRH.js.map +1 -0
  53. package/dist/{init-3536BYDC.js → init-V3KCC36O.js} +14 -14
  54. package/dist/{installer-YH3WQISI.js → installer-ZNK4JSQA.js} +4 -4
  55. package/dist/{llm-SWDDQQWY.js → llm-TH4NLIRM.js} +4 -4
  56. package/dist/{loader-K4WF4EEJ.js → loader-H7OFASVC.js} +15 -3
  57. package/dist/{loader-AAZ6VUIA.js → loader-TSB5M7FD.js} +3 -3
  58. package/dist/{logs-KNKPQE5A.js → logs-7YVGGBIS.js} +2 -2
  59. package/dist/{main-R5ZD5OIZ.js → main-5S4MDCIO.js} +770 -176
  60. package/dist/main-5S4MDCIO.js.map +1 -0
  61. package/dist/{open-5UD5JQIM.js → open-AB5ULZIB.js} +8 -8
  62. package/dist/{post-compact-ZJFE66O3.js → post-compact-P2B7C7FE.js} +9 -8
  63. package/dist/{post-compact-ZJFE66O3.js.map → post-compact-P2B7C7FE.js.map} +1 -1
  64. package/dist/{post-tool-use-CAR2USJP.js → post-tool-use-LXL6NXDS.js} +8 -7
  65. package/dist/{post-tool-use-CAR2USJP.js.map → post-tool-use-LXL6NXDS.js.map} +1 -1
  66. package/dist/{post-tool-use-failure-OMIKVEVR.js → post-tool-use-failure-WAYVVKGR.js} +9 -8
  67. package/dist/{post-tool-use-failure-OMIKVEVR.js.map → post-tool-use-failure-WAYVVKGR.js.map} +1 -1
  68. package/dist/{pre-compact-6SXYI5CD.js → pre-compact-BCXUCF4V.js} +9 -8
  69. package/dist/{pre-compact-6SXYI5CD.js.map → pre-compact-BCXUCF4V.js.map} +1 -1
  70. package/dist/{provider-check-WCM3SDTM.js → provider-check-43LAMSMH.js} +4 -4
  71. package/dist/{registry-OCM4WAPJ.js → registry-MGJSJBAS.js} +4 -4
  72. package/dist/{remove-NJSFVZXW.js → remove-KAPX5NT2.js} +10 -10
  73. package/dist/{restart-U5ZGJON7.js → restart-HQO36FTG.js} +9 -9
  74. package/dist/{search-HO7CXV6H.js → search-YOMOKAAI.js} +9 -9
  75. package/dist/{server-BUSZIUZV.js → server-2N23P6F2.js} +40 -27
  76. package/dist/{server-BUSZIUZV.js.map → server-2N23P6F2.js.map} +1 -1
  77. package/dist/{session-RVT2QELH.js → session-WW2JLHPX.js} +9 -10
  78. package/dist/{session-RVT2QELH.js.map → session-WW2JLHPX.js.map} +1 -1
  79. package/dist/{session-end-4W6SZVGH.js → session-end-4WRTIBVQ.js} +8 -7
  80. package/dist/{session-end-4W6SZVGH.js.map → session-end-4WRTIBVQ.js.map} +1 -1
  81. package/dist/{session-start-PMPKAST4.js → session-start-HRWTZXQR.js} +15 -15
  82. package/dist/session-start-HRWTZXQR.js.map +1 -0
  83. package/dist/{setup-llm-6UAJUHQE.js → setup-llm-HFWSBUAF.js} +10 -9
  84. package/dist/{setup-llm-6UAJUHQE.js.map → setup-llm-HFWSBUAF.js.map} +1 -1
  85. package/dist/src/agent/definitions/tasks/full-intelligence.yaml +37 -8
  86. package/dist/src/agent/prompts/agent.md +2 -2
  87. package/dist/src/cli.js +1 -1
  88. package/dist/src/daemon/main.js +1 -1
  89. package/dist/src/hooks/post-tool-use.js +1 -1
  90. package/dist/src/hooks/session-end.js +1 -1
  91. package/dist/src/hooks/session-start.js +1 -1
  92. package/dist/src/hooks/stop.js +1 -1
  93. package/dist/src/hooks/user-prompt-submit.js +1 -1
  94. package/dist/src/mcp/server.js +1 -1
  95. package/dist/src/symbionts/manifests/codex.yaml +28 -0
  96. package/dist/{stats-W47FF6RD.js → stats-7A4CJ4MS.js} +9 -9
  97. package/dist/{stop-6TAO2UU2.js → stop-R2GDHMRA.js} +8 -7
  98. package/dist/{stop-6TAO2UU2.js.map → stop-R2GDHMRA.js.map} +1 -1
  99. package/dist/{stop-failure-R76SULCV.js → stop-failure-773KR4VZ.js} +9 -8
  100. package/dist/{stop-failure-R76SULCV.js.map → stop-failure-773KR4VZ.js.map} +1 -1
  101. package/dist/{subagent-start-TJMUZLP2.js → subagent-start-IDECNBHW.js} +9 -8
  102. package/dist/{subagent-start-TJMUZLP2.js.map → subagent-start-IDECNBHW.js.map} +1 -1
  103. package/dist/{subagent-stop-M3DAFJWQ.js → subagent-stop-3JH7DR2S.js} +9 -8
  104. package/dist/{subagent-stop-M3DAFJWQ.js.map → subagent-stop-3JH7DR2S.js.map} +1 -1
  105. package/dist/{task-completed-2KVR5JV6.js → task-completed-AYVHPHDR.js} +9 -8
  106. package/dist/{task-completed-2KVR5JV6.js.map → task-completed-AYVHPHDR.js.map} +1 -1
  107. package/dist/{team-2IAT6MKD.js → team-3JKF7VAD.js} +5 -5
  108. package/dist/{turns-3ZQAF6HF.js → turns-YFNI5CQC.js} +6 -4
  109. package/dist/ui/assets/index-C2JuNtRB.css +1 -0
  110. package/dist/ui/assets/index-JLVaQKV2.js +832 -0
  111. package/dist/ui/favicon-dusk.svg +11 -0
  112. package/dist/ui/favicon-moss.svg +11 -0
  113. package/dist/ui/favicon-plum.svg +11 -0
  114. package/dist/ui/favicon-sage.svg +11 -0
  115. package/dist/ui/favicon-slate.svg +11 -0
  116. package/dist/ui/favicon-terracotta.svg +11 -0
  117. package/dist/ui/index.html +3 -3
  118. package/dist/{update-TB34JEB7.js → update-YWYW55JM.js} +10 -10
  119. package/dist/{user-prompt-submit-O4TP7NJ6.js → user-prompt-submit-YELSR6XI.js} +9 -8
  120. package/dist/{user-prompt-submit-O4TP7NJ6.js.map → user-prompt-submit-YELSR6XI.js.map} +1 -1
  121. package/dist/{verify-SESZXGVY.js → verify-JS44DVKJ.js} +5 -5
  122. package/dist/{version-QBORV23E.js → version-K5NETYIL.js} +2 -2
  123. package/package.json +1 -1
  124. package/skills/myco/SKILL.md +78 -43
  125. package/skills/myco/references/vault-status.md +1 -1
  126. package/dist/chunk-27ZDDWIA.js.map +0 -1
  127. package/dist/chunk-44PZCAYS.js.map +0 -1
  128. package/dist/chunk-5OXBT5MD.js.map +0 -1
  129. package/dist/chunk-5WPTS6A4.js.map +0 -1
  130. package/dist/chunk-5ZT2Q6P5.js +0 -25
  131. package/dist/chunk-5ZT2Q6P5.js.map +0 -1
  132. package/dist/chunk-AULBWINA.js +0 -227
  133. package/dist/chunk-AULBWINA.js.map +0 -1
  134. package/dist/chunk-DURKJTVO.js.map +0 -1
  135. package/dist/chunk-GFR542SM.js.map +0 -1
  136. package/dist/chunk-Q6OEZM3S.js.map +0 -1
  137. package/dist/chunk-QLCD77AN.js.map +0 -1
  138. package/dist/chunk-UTSCRMJE.js.map +0 -1
  139. package/dist/chunk-UVKQ62II.js.map +0 -1
  140. package/dist/chunk-VQF5E4ZX.js +0 -91
  141. package/dist/chunk-VQF5E4ZX.js.map +0 -1
  142. package/dist/chunk-WKNAKQKA.js.map +0 -1
  143. package/dist/config-I5MJ6RXI.js.map +0 -1
  144. package/dist/executor-2TMGOVEA.js.map +0 -1
  145. package/dist/main-R5ZD5OIZ.js.map +0 -1
  146. package/dist/resolution-events-PYLSI6QT.js +0 -15
  147. package/dist/session-start-PMPKAST4.js.map +0 -1
  148. package/dist/ui/assets/index-C-6W8e3m.js +0 -842
  149. package/dist/ui/assets/index-CRmkSi63.css +0 -1
  150. package/dist/version-QBORV23E.js.map +0 -1
  151. /package/dist/{agent-run-3IQXE5PI.js.map → agent-run-4HUXVRHW.js.map} +0 -0
  152. /package/dist/{agent-tasks-5DIA3CE5.js.map → agent-tasks-JF45ELB6.js.map} +0 -0
  153. /package/dist/{chunk-Q4QD6LJT.js.map → chunk-4M7EWPIA.js.map} +0 -0
  154. /package/dist/{chunk-FGKCE5AE.js.map → chunk-4YFKBL3F.js.map} +0 -0
  155. /package/dist/{chunk-KYH4V4ML.js.map → chunk-57O67XVF.js.map} +0 -0
  156. /package/dist/{chunk-7ONVLO43.js.map → chunk-5XIVBO25.js.map} +0 -0
  157. /package/dist/{chunk-VH7XYQFL.js.map → chunk-ACQ2AIEM.js.map} +0 -0
  158. /package/dist/{chunk-6ZDJXSEO.js.map → chunk-BPRIYNLE.js.map} +0 -0
  159. /package/dist/{chunk-PMT2LSTQ.js.map → chunk-FMRZ26U5.js.map} +0 -0
  160. /package/dist/{chunk-3J6TUJSV.js.map → chunk-KHT24OWC.js.map} +0 -0
  161. /package/dist/{chunk-6DDRJQ4X.js.map → chunk-MYOZLMB2.js.map} +0 -0
  162. /package/dist/{chunk-K2QX43GC.js.map → chunk-P3DN5EWW.js.map} +0 -0
  163. /package/dist/{chunk-IQ5LQTV7.js.map → chunk-POR75WM6.js.map} +0 -0
  164. /package/dist/{chunk-KABTXALI.js.map → chunk-QS5TWZBL.js.map} +0 -0
  165. /package/dist/{chunk-2QJCV3UL.js.map → chunk-UOQQENDW.js.map} +0 -0
  166. /package/dist/{cli-RTUSGLTM.js.map → cli-AHTINAHY.js.map} +0 -0
  167. /package/dist/{client-YWE5YJB7.js.map → client-LHENCAV3.js.map} +0 -0
  168. /package/dist/{detect-BEOIHGBC.js.map → detect-PXNM6TA7.js.map} +0 -0
  169. /package/dist/{detect-providers-2EY55EHK.js.map → detect-providers-5KOPZ7J2.js.map} +0 -0
  170. /package/dist/{doctor-FIG7VEYV.js.map → doctor-XPCF5HV5.js.map} +0 -0
  171. /package/dist/{init-3536BYDC.js.map → init-V3KCC36O.js.map} +0 -0
  172. /package/dist/{installer-YH3WQISI.js.map → installer-ZNK4JSQA.js.map} +0 -0
  173. /package/dist/{llm-SWDDQQWY.js.map → llm-TH4NLIRM.js.map} +0 -0
  174. /package/dist/{loader-AAZ6VUIA.js.map → loader-H7OFASVC.js.map} +0 -0
  175. /package/dist/{loader-K4WF4EEJ.js.map → loader-TSB5M7FD.js.map} +0 -0
  176. /package/dist/{logs-KNKPQE5A.js.map → logs-7YVGGBIS.js.map} +0 -0
  177. /package/dist/{open-5UD5JQIM.js.map → open-AB5ULZIB.js.map} +0 -0
  178. /package/dist/{provider-check-WCM3SDTM.js.map → provider-check-43LAMSMH.js.map} +0 -0
  179. /package/dist/{registry-OCM4WAPJ.js.map → registry-MGJSJBAS.js.map} +0 -0
  180. /package/dist/{remove-NJSFVZXW.js.map → remove-KAPX5NT2.js.map} +0 -0
  181. /package/dist/{restart-U5ZGJON7.js.map → restart-HQO36FTG.js.map} +0 -0
  182. /package/dist/{search-HO7CXV6H.js.map → search-YOMOKAAI.js.map} +0 -0
  183. /package/dist/{stats-W47FF6RD.js.map → stats-7A4CJ4MS.js.map} +0 -0
  184. /package/dist/{resolution-events-PYLSI6QT.js.map → team-3JKF7VAD.js.map} +0 -0
  185. /package/dist/{team-2IAT6MKD.js.map → turns-YFNI5CQC.js.map} +0 -0
  186. /package/dist/{update-TB34JEB7.js.map → update-YWYW55JM.js.map} +0 -0
  187. /package/dist/{verify-SESZXGVY.js.map → verify-JS44DVKJ.js.map} +0 -0
  188. /package/dist/{turns-3ZQAF6HF.js.map → version-K5NETYIL.js.map} +0 -0
@@ -0,0 +1,36 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+
3
+ // src/vault/resolve.ts
4
+ import path from "path";
5
+ import { execFileSync } from "child_process";
6
+ var MYCO_PROJECT_ROOT_ENV = "MYCO_PROJECT_ROOT";
7
+ var MYCO_VAULT_DIR_ENV = "MYCO_VAULT_DIR";
8
+ function resolveVaultDir(cwd = process.cwd(), env = process.env) {
9
+ const explicitVaultDir = readAbsoluteEnv(env, MYCO_VAULT_DIR_ENV);
10
+ if (explicitVaultDir) return explicitVaultDir;
11
+ const explicitProjectRoot = readAbsoluteEnv(env, MYCO_PROJECT_ROOT_ENV);
12
+ if (explicitProjectRoot) return path.join(explicitProjectRoot, ".myco");
13
+ return path.join(resolveRepoRoot(cwd), ".myco");
14
+ }
15
+ function readAbsoluteEnv(env, key) {
16
+ const raw = env[key];
17
+ if (typeof raw !== "string" || raw.length === 0) return null;
18
+ return path.isAbsolute(raw) ? raw : null;
19
+ }
20
+ function resolveRepoRoot(cwd) {
21
+ try {
22
+ const gitCommon = execFileSync(
23
+ "git",
24
+ ["rev-parse", "--git-common-dir"],
25
+ { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
26
+ ).trim();
27
+ return path.resolve(cwd, gitCommon, "..");
28
+ } catch {
29
+ return cwd;
30
+ }
31
+ }
32
+
33
+ export {
34
+ resolveVaultDir
35
+ };
36
+ //# sourceMappingURL=chunk-CUDIZJY7.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/vault/resolve.ts"],"sourcesContent":["import path from 'node:path';\nimport { execFileSync } from 'node:child_process';\n\nexport const MYCO_PROJECT_ROOT_ENV = 'MYCO_PROJECT_ROOT';\nexport const MYCO_VAULT_DIR_ENV = 'MYCO_VAULT_DIR';\n\n/**\n * Resolve the vault directory.\n *\n * Always `.myco/` in the project root. The vault is a SQLite database\n * that lives with the project — no external overrides needed.\n *\n * Uses git to find the repo root so this works correctly in\n * git worktrees — worktree agents resolve to the same vault\n * as the main working tree. Some symbionts launch their MCP child with\n * cwd=/, so explicit env anchors win before any cwd-based fallback.\n */\nexport function resolveVaultDir(\n cwd = process.cwd(),\n env: NodeJS.ProcessEnv = process.env,\n): string {\n const explicitVaultDir = readAbsoluteEnv(env, MYCO_VAULT_DIR_ENV);\n if (explicitVaultDir) return explicitVaultDir;\n\n const explicitProjectRoot = readAbsoluteEnv(env, MYCO_PROJECT_ROOT_ENV);\n if (explicitProjectRoot) return path.join(explicitProjectRoot, '.myco');\n\n return path.join(resolveRepoRoot(cwd), '.myco');\n}\n\nfunction readAbsoluteEnv(\n env: NodeJS.ProcessEnv,\n key: string,\n): string | null {\n const raw = env[key];\n if (typeof raw !== 'string' || raw.length === 0) return null;\n return path.isAbsolute(raw) ? raw : null;\n}\n\n/**\n * Find the main repo root, even from a git worktree.\n *\n * `git rev-parse --git-common-dir` returns the shared .git directory:\n * - In a normal repo: \".git\" (relative)\n * - In a worktree: \"/abs/path/to/main-repo/.git\" (absolute)\n *\n * The repo root is the parent of that path.\n * Falls back to cwd if not in a git repo.\n */\nfunction resolveRepoRoot(cwd: string): string {\n try {\n const gitCommon = execFileSync(\n 'git', ['rev-parse', '--git-common-dir'],\n { cwd, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] },\n ).trim();\n return path.resolve(cwd, gitCommon, '..');\n } catch {\n return cwd;\n }\n}\n"],"mappings":";;;AAAA,OAAO,UAAU;AACjB,SAAS,oBAAoB;AAEtB,IAAM,wBAAwB;AAC9B,IAAM,qBAAqB;AAa3B,SAAS,gBACd,MAAM,QAAQ,IAAI,GAClB,MAAyB,QAAQ,KACzB;AACR,QAAM,mBAAmB,gBAAgB,KAAK,kBAAkB;AAChE,MAAI,iBAAkB,QAAO;AAE7B,QAAM,sBAAsB,gBAAgB,KAAK,qBAAqB;AACtE,MAAI,oBAAqB,QAAO,KAAK,KAAK,qBAAqB,OAAO;AAEtE,SAAO,KAAK,KAAK,gBAAgB,GAAG,GAAG,OAAO;AAChD;AAEA,SAAS,gBACP,KACA,KACe;AACf,QAAM,MAAM,IAAI,GAAG;AACnB,MAAI,OAAO,QAAQ,YAAY,IAAI,WAAW,EAAG,QAAO;AACxD,SAAO,KAAK,WAAW,GAAG,IAAI,MAAM;AACtC;AAYA,SAAS,gBAAgB,KAAqB;AAC5C,MAAI;AACF,UAAM,YAAY;AAAA,MAChB;AAAA,MAAO,CAAC,aAAa,kBAAkB;AAAA,MACvC,EAAE,KAAK,UAAU,SAAS,OAAO,CAAC,QAAQ,QAAQ,MAAM,EAAE;AAAA,IAC5D,EAAE,KAAK;AACP,WAAO,KAAK,QAAQ,KAAK,WAAW,IAAI;AAAA,EAC1C,QAAQ;AACN,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -1,10 +1,13 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+ import {
3
+ getAtPath
4
+ } from "./chunk-ZXZPJJN3.js";
2
5
  import {
3
6
  loadManifests
4
- } from "./chunk-UVKQ62II.js";
7
+ } from "./chunk-NGROSFOH.js";
5
8
  import {
6
9
  STDIN_TIMEOUT_MS
7
- } from "./chunk-I54KLC6H.js";
10
+ } from "./chunk-FLLBJLHM.js";
8
11
 
9
12
  // src/hooks/read-stdin.ts
10
13
  function readStdin() {
@@ -82,30 +85,21 @@ function detectManifest(input) {
82
85
  cachedManifest = null;
83
86
  return null;
84
87
  }
85
- function resolveField(input, fieldPath) {
86
- const parts = fieldPath.split(".");
87
- let current = input;
88
- for (const part of parts) {
89
- if (current === null || current === void 0 || typeof current !== "object") return void 0;
90
- current = current[part];
91
- }
92
- return current;
93
- }
94
88
  function normalizeHookInput(input) {
95
89
  const manifest = detectManifest(input);
96
90
  const fields = manifest?.hookFields ?? DEFAULT_HOOK_FIELDS;
97
- const sessionIdFromInput = resolveField(input, fields.sessionId);
91
+ const sessionIdFromInput = getAtPath(input, fields.sessionId);
98
92
  const sessionIdFromEnv = "sessionIdEnv" in fields && fields.sessionIdEnv ? process.env[fields.sessionIdEnv] : void 0;
99
93
  const sessionId = sessionIdFromInput ?? sessionIdFromEnv ?? process.env.MYCO_SESSION_ID ?? `s-${Date.now()}`;
100
94
  return {
101
95
  agent: manifest?.name ?? DEFAULT_AGENT_NAME,
102
96
  sessionId,
103
- transcriptPath: resolveField(input, fields.transcriptPath),
104
- lastResponse: resolveField(input, fields.lastResponse),
105
- prompt: resolveField(input, fields.prompt),
106
- toolName: resolveField(input, fields.toolName),
107
- toolInput: resolveField(input, fields.toolInput),
108
- toolOutput: resolveField(input, fields.toolOutput),
97
+ transcriptPath: getAtPath(input, fields.transcriptPath),
98
+ lastResponse: getAtPath(input, fields.lastResponse),
99
+ prompt: getAtPath(input, fields.prompt),
100
+ toolName: getAtPath(input, fields.toolName),
101
+ toolInput: getAtPath(input, fields.toolInput),
102
+ toolOutput: getAtPath(input, fields.toolOutput),
109
103
  raw: input
110
104
  };
111
105
  }
@@ -114,4 +108,4 @@ export {
114
108
  readStdin,
115
109
  normalizeHookInput
116
110
  };
117
- //# sourceMappingURL=chunk-WKNAKQKA.js.map
111
+ //# sourceMappingURL=chunk-DCSGJ7W4.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/hooks/read-stdin.ts","../src/hooks/normalize.ts"],"sourcesContent":["import { STDIN_TIMEOUT_MS } from '../constants.js';\n\nexport function readStdin(): Promise<string> {\n return new Promise((resolve) => {\n let data = '';\n process.stdin.on('data', (chunk: Buffer) => { data += chunk; });\n process.stdin.on('end', () => resolve(data));\n setTimeout(() => resolve(data || '{}'), STDIN_TIMEOUT_MS);\n });\n}\n","/**\n * Hook payload normalization layer.\n *\n * Each agent sends different field names in hook stdin (e.g., Claude Code uses\n * `session_id`, VS Code uses `sessionId`, Windsurf uses `trajectory_id`).\n * This module detects the active agent, loads its manifest, and maps the\n * raw input to a canonical shape that all hooks can consume uniformly.\n */\n\nimport { loadManifests } from '../symbionts/detect.js';\nimport type { SymbiontManifest } from '../symbionts/manifest-schema.js';\nimport { getAtPath } from '../utils/dot-path.js';\n\n/** Default field mappings when no agent manifest is detected (Claude Code conventions). */\nconst DEFAULT_HOOK_FIELDS = {\n sessionId: 'session_id',\n transcriptPath: 'transcript_path',\n lastResponse: 'last_assistant_message',\n prompt: 'prompt',\n toolName: 'tool_name',\n toolInput: 'tool_input',\n toolOutput: 'tool_output',\n} as const;\n\n/** Canonical hook input with normalized field names. */\nexport interface NormalizedHookInput {\n /** Detected agent name from manifest (e.g., 'claude-code', 'codex', 'windsurf'). */\n agent: string;\n sessionId: string;\n transcriptPath?: string;\n lastResponse?: string;\n prompt?: string;\n toolName?: string;\n toolInput?: unknown;\n toolOutput?: unknown;\n /** The full raw input for any fields not covered by the mapping. */\n raw: Record<string, unknown>;\n}\n\n/** Default agent name when no manifest is detected. */\nconst DEFAULT_AGENT_NAME = 'claude-code';\n\n/** Cached manifest for the detected agent — resolved once per process. */\nlet cachedManifest: SymbiontManifest | null | undefined;\n\n/**\n * Parse `--symbiont <name>` from process argv.\n *\n * The hook command line rendered by the installer for every symbiont's\n * hooks.json looks like:\n *\n * node .agents/myco-run.cjs hook session-start --symbiont codex\n *\n * `.agents/myco-run.cjs` resolves the myco binary via\n * `.myco/runtime.command` and execs it with all argv passed through, so\n * by the time the hook handler module loads, `process.argv` contains\n * the flag. This is the installer's explicit declaration of which\n * symbiont owns this invocation — strictly more reliable than any\n * runtime heuristic.\n *\n * Supports both `--symbiont codex` (two args) and `--symbiont=codex`\n * (one arg) to be forgiving about shell quoting on Windows.\n */\nexport function readSymbiontFlag(argv: readonly string[]): string | undefined {\n for (let i = 0; i < argv.length; i++) {\n const arg = argv[i];\n if (arg === '--symbiont') {\n const next = argv[i + 1];\n if (next && !next.startsWith('-')) return next;\n } else if (arg.startsWith('--symbiont=')) {\n return arg.slice('--symbiont='.length);\n }\n }\n return undefined;\n}\n\n/**\n * Detect which symbiont is driving this hook invocation.\n *\n * Detection strategies in order:\n * 1. **Primary** — `--symbiont <name>` argv flag rendered into each\n * agent's hooks.json at install time. Dead simple and unambiguous:\n * the installer knows which agent it's writing into, so we bake\n * the identity into the hook command itself.\n * 2. `pluginRootEnvVar` (e.g., `CLAUDE_PLUGIN_ROOT`) — set natively by\n * agents that cooperate with a plugin system. Fallback for older\n * installs that predate the argv flag.\n * 3. `sessionIdEnv` fallback (e.g., `GEMINI_SESSION_ID`) — set by agents\n * that expose the session via env var rather than payload field.\n * 4. Payload-driven heuristic: match the event's `transcript_path` /\n * `cwd` against each manifest's `configDir`. Safety net for pre-\n * flag installations that have somehow also lost their env-var\n * signal. Generic — works for every manifest without per-agent\n * branching.\n *\n * The cache is per-process, which is fine: each hook invocation is a\n * short-lived Node process. `input` is optional so callers that just\n * want env-based detection (e.g., at module import time) still work.\n */\nfunction detectManifest(input?: Record<string, unknown>): SymbiontManifest | null {\n if (cachedManifest !== undefined) return cachedManifest;\n\n const manifests = loadManifests();\n\n // 1) Primary: explicit --symbiont flag from the installer-rendered\n // hook command. This is the source of truth when present.\n const flagName = readSymbiontFlag(process.argv);\n if (flagName) {\n const m = manifests.find((x) => x.name === flagName);\n if (m) {\n cachedManifest = m;\n return m;\n }\n // Flag specified an unknown manifest — fall through to heuristics\n // rather than guessing. Logging happens at the handler level.\n }\n\n // 2) Env-var detection: check pluginRootEnvVar for each manifest.\n for (const m of manifests) {\n if (process.env[m.pluginRootEnvVar]) {\n cachedManifest = m;\n return m;\n }\n }\n\n // 3) sessionIdEnv fallback (e.g., GEMINI_SESSION_ID).\n for (const m of manifests) {\n if (m.hookFields.sessionIdEnv && process.env[m.hookFields.sessionIdEnv]) {\n cachedManifest = m;\n return m;\n }\n }\n\n // 4) Payload-driven heuristic: match configDir against transcript_path\n // / cwd. Kept as a safety net for pre-flag installations. Preferred\n // signals above always win when they're available.\n if (input) {\n const candidates: string[] = [];\n const tp = input.transcript_path;\n const cwd = input.cwd;\n if (typeof tp === 'string' && tp.length > 0) candidates.push(tp);\n if (typeof cwd === 'string' && cwd.length > 0) candidates.push(cwd);\n for (const m of manifests) {\n const marker = `/${m.configDir}/`;\n if (candidates.some((c) => c.includes(marker))) {\n cachedManifest = m;\n return m;\n }\n }\n }\n\n cachedManifest = null;\n return null;\n}\n\n/**\n * Normalize a raw hook input using the active agent's manifest field mappings.\n * Falls back to Claude Code field names if no agent is detected.\n */\nexport function normalizeHookInput(input: Record<string, unknown>): NormalizedHookInput {\n const manifest = detectManifest(input);\n const fields = manifest?.hookFields ?? DEFAULT_HOOK_FIELDS;\n\n // Resolve session ID: try the mapped field, then env var fallback, then MYCO_SESSION_ID\n const sessionIdFromInput = getAtPath(input, fields.sessionId) as string | undefined;\n const sessionIdFromEnv = 'sessionIdEnv' in fields && fields.sessionIdEnv\n ? process.env[fields.sessionIdEnv]\n : undefined;\n const sessionId = sessionIdFromInput\n ?? sessionIdFromEnv\n ?? process.env.MYCO_SESSION_ID\n ?? `s-${Date.now()}`;\n\n return {\n agent: manifest?.name ?? DEFAULT_AGENT_NAME,\n sessionId,\n transcriptPath: getAtPath(input, fields.transcriptPath) as string | undefined,\n lastResponse: getAtPath(input, fields.lastResponse) as string | undefined,\n prompt: getAtPath(input, fields.prompt) as string | undefined,\n toolName: getAtPath(input, fields.toolName) as string | undefined,\n toolInput: getAtPath(input, fields.toolInput),\n toolOutput: getAtPath(input, fields.toolOutput),\n raw: input,\n };\n}\n\n/** Reset cached manifest — exposed for testing only. */\nexport function _resetManifestCache(): void {\n cachedManifest = undefined;\n}\n"],"mappings":";;;;;;;;;;;;AAEO,SAAS,YAA6B;AAC3C,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,QAAI,OAAO;AACX,YAAQ,MAAM,GAAG,QAAQ,CAAC,UAAkB;AAAE,cAAQ;AAAA,IAAO,CAAC;AAC9D,YAAQ,MAAM,GAAG,OAAO,MAAM,QAAQ,IAAI,CAAC;AAC3C,eAAW,MAAM,QAAQ,QAAQ,IAAI,GAAG,gBAAgB;AAAA,EAC1D,CAAC;AACH;;;ACKA,IAAM,sBAAsB;AAAA,EAC1B,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,cAAc;AAAA,EACd,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,WAAW;AAAA,EACX,YAAY;AACd;AAkBA,IAAM,qBAAqB;AAG3B,IAAI;AAoBG,SAAS,iBAAiB,MAA6C;AAC5E,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,MAAM,KAAK,CAAC;AAClB,QAAI,QAAQ,cAAc;AACxB,YAAM,OAAO,KAAK,IAAI,CAAC;AACvB,UAAI,QAAQ,CAAC,KAAK,WAAW,GAAG,EAAG,QAAO;AAAA,IAC5C,WAAW,IAAI,WAAW,aAAa,GAAG;AACxC,aAAO,IAAI,MAAM,cAAc,MAAM;AAAA,IACvC;AAAA,EACF;AACA,SAAO;AACT;AAyBA,SAAS,eAAe,OAA0D;AAChF,MAAI,mBAAmB,OAAW,QAAO;AAEzC,QAAM,YAAY,cAAc;AAIhC,QAAM,WAAW,iBAAiB,QAAQ,IAAI;AAC9C,MAAI,UAAU;AACZ,UAAM,IAAI,UAAU,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AACnD,QAAI,GAAG;AACL,uBAAiB;AACjB,aAAO;AAAA,IACT;AAAA,EAGF;AAGA,aAAW,KAAK,WAAW;AACzB,QAAI,QAAQ,IAAI,EAAE,gBAAgB,GAAG;AACnC,uBAAiB;AACjB,aAAO;AAAA,IACT;AAAA,EACF;AAGA,aAAW,KAAK,WAAW;AACzB,QAAI,EAAE,WAAW,gBAAgB,QAAQ,IAAI,EAAE,WAAW,YAAY,GAAG;AACvE,uBAAiB;AACjB,aAAO;AAAA,IACT;AAAA,EACF;AAKA,MAAI,OAAO;AACT,UAAM,aAAuB,CAAC;AAC9B,UAAM,KAAK,MAAM;AACjB,UAAM,MAAM,MAAM;AAClB,QAAI,OAAO,OAAO,YAAY,GAAG,SAAS,EAAG,YAAW,KAAK,EAAE;AAC/D,QAAI,OAAO,QAAQ,YAAY,IAAI,SAAS,EAAG,YAAW,KAAK,GAAG;AAClE,eAAW,KAAK,WAAW;AACzB,YAAM,SAAS,IAAI,EAAE,SAAS;AAC9B,UAAI,WAAW,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM,CAAC,GAAG;AAC9C,yBAAiB;AACjB,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,mBAAiB;AACjB,SAAO;AACT;AAMO,SAAS,mBAAmB,OAAqD;AACtF,QAAM,WAAW,eAAe,KAAK;AACrC,QAAM,SAAS,UAAU,cAAc;AAGvC,QAAM,qBAAqB,UAAU,OAAO,OAAO,SAAS;AAC5D,QAAM,mBAAmB,kBAAkB,UAAU,OAAO,eACxD,QAAQ,IAAI,OAAO,YAAY,IAC/B;AACJ,QAAM,YAAY,sBACb,oBACA,QAAQ,IAAI,mBACZ,KAAK,KAAK,IAAI,CAAC;AAEpB,SAAO;AAAA,IACL,OAAO,UAAU,QAAQ;AAAA,IACzB;AAAA,IACA,gBAAgB,UAAU,OAAO,OAAO,cAAc;AAAA,IACtD,cAAc,UAAU,OAAO,OAAO,YAAY;AAAA,IAClD,QAAQ,UAAU,OAAO,OAAO,MAAM;AAAA,IACtC,UAAU,UAAU,OAAO,OAAO,QAAQ;AAAA,IAC1C,WAAW,UAAU,OAAO,OAAO,SAAS;AAAA,IAC5C,YAAY,UAAU,OAAO,OAAO,UAAU;AAAA,IAC9C,KAAK;AAAA,EACP;AACF;","names":[]}
@@ -1,11 +1,212 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
- import {
3
- getTeamMachineId,
4
- syncRow
5
- } from "./chunk-AULBWINA.js";
6
2
  import {
7
3
  getDatabase
8
4
  } from "./chunk-MYX5NCRH.js";
5
+ import {
6
+ DEFAULT_MACHINE_ID
7
+ } from "./chunk-FLLBJLHM.js";
8
+
9
+ // src/daemon/team-context.ts
10
+ var teamSyncEnabled = false;
11
+ var teamMachineId = DEFAULT_MACHINE_ID;
12
+ function initTeamContext(enabled, machineId) {
13
+ teamSyncEnabled = enabled;
14
+ teamMachineId = machineId;
15
+ }
16
+ function isTeamSyncEnabled() {
17
+ return teamSyncEnabled;
18
+ }
19
+ function getTeamMachineId() {
20
+ return teamMachineId;
21
+ }
22
+
23
+ // src/db/queries/team-outbox.ts
24
+ var BURST_BATCH_SIZE = 200;
25
+ var SENT_PRUNE_AGE_SECONDS = 86400;
26
+ var MAX_OUTBOX_RETRIES = 10;
27
+ var MS_PER_SECOND = 1e3;
28
+ var OUTBOX_COLUMNS = [
29
+ "id",
30
+ "table_name",
31
+ "row_id",
32
+ "operation",
33
+ "payload",
34
+ "machine_id",
35
+ "created_at",
36
+ "sent_at",
37
+ "retry_count",
38
+ "last_attempt_at"
39
+ ];
40
+ var SELECT_COLUMNS = OUTBOX_COLUMNS.join(", ");
41
+ function toOutboxRow(row) {
42
+ return {
43
+ id: row.id,
44
+ table_name: row.table_name,
45
+ row_id: row.row_id,
46
+ operation: row.operation,
47
+ payload: row.payload,
48
+ machine_id: row.machine_id,
49
+ created_at: row.created_at,
50
+ sent_at: row.sent_at ?? null,
51
+ retry_count: row.retry_count ?? 0,
52
+ last_attempt_at: row.last_attempt_at ?? null
53
+ };
54
+ }
55
+ function syncRow(tableName, row) {
56
+ if (!isTeamSyncEnabled()) return;
57
+ enqueueOutbox({
58
+ table_name: tableName,
59
+ row_id: String(row.id),
60
+ payload: JSON.stringify(row),
61
+ machine_id: getTeamMachineId(),
62
+ created_at: row.created_at ?? Math.floor(Date.now() / 1e3)
63
+ });
64
+ }
65
+ function enqueueOutbox(data) {
66
+ const db = getDatabase();
67
+ const info = db.prepare(
68
+ `INSERT INTO team_outbox (
69
+ table_name, row_id, operation, payload, machine_id, created_at
70
+ ) VALUES (?, ?, ?, ?, ?, ?)`
71
+ ).run(
72
+ data.table_name,
73
+ data.row_id,
74
+ data.operation ?? "upsert",
75
+ data.payload,
76
+ data.machine_id,
77
+ data.created_at
78
+ );
79
+ const id = Number(info.lastInsertRowid);
80
+ return toOutboxRow(
81
+ db.prepare(`SELECT ${SELECT_COLUMNS} FROM team_outbox WHERE id = ?`).get(id)
82
+ );
83
+ }
84
+ function listPending(limit) {
85
+ const db = getDatabase();
86
+ const rows = db.prepare(
87
+ `SELECT ${SELECT_COLUMNS}
88
+ FROM team_outbox
89
+ WHERE sent_at IS NULL AND retry_count < ?
90
+ ORDER BY created_at ASC
91
+ LIMIT ?`
92
+ ).all(MAX_OUTBOX_RETRIES, limit ?? BURST_BATCH_SIZE);
93
+ return rows.map(toOutboxRow);
94
+ }
95
+ function markSent(ids, sentAt) {
96
+ if (ids.length === 0) return;
97
+ const db = getDatabase();
98
+ const placeholders = ids.map(() => "?").join(", ");
99
+ db.prepare(
100
+ `UPDATE team_outbox
101
+ SET sent_at = ?
102
+ WHERE id IN (${placeholders})`
103
+ ).run(sentAt, ...ids);
104
+ }
105
+ function incrementRetryCount(ids, attemptAt) {
106
+ if (ids.length === 0) return [];
107
+ const db = getDatabase();
108
+ const placeholders = ids.map(() => "?").join(", ");
109
+ db.prepare(
110
+ `UPDATE team_outbox
111
+ SET retry_count = retry_count + 1, last_attempt_at = ?
112
+ WHERE id IN (${placeholders})`
113
+ ).run(attemptAt, ...ids);
114
+ const deadLettered = db.prepare(
115
+ `SELECT id FROM team_outbox
116
+ WHERE id IN (${placeholders}) AND retry_count >= ?`
117
+ ).all(...ids, MAX_OUTBOX_RETRIES);
118
+ return deadLettered.map((r) => r.id);
119
+ }
120
+ function retryDeadLettered() {
121
+ const db = getDatabase();
122
+ const info = db.prepare(
123
+ `UPDATE team_outbox
124
+ SET retry_count = 0, last_attempt_at = NULL
125
+ WHERE sent_at IS NULL AND retry_count >= ?`
126
+ ).run(MAX_OUTBOX_RETRIES);
127
+ return info.changes;
128
+ }
129
+ function countDeadLettered() {
130
+ const db = getDatabase();
131
+ const row = db.prepare(
132
+ `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count >= ?`
133
+ ).get(MAX_OUTBOX_RETRIES);
134
+ return row.count;
135
+ }
136
+ function pruneOld() {
137
+ const db = getDatabase();
138
+ const cutoff = Math.floor(Date.now() / MS_PER_SECOND) - SENT_PRUNE_AGE_SECONDS;
139
+ const info = db.prepare(
140
+ `DELETE FROM team_outbox
141
+ WHERE sent_at IS NOT NULL AND sent_at < ?`
142
+ ).run(cutoff);
143
+ return info.changes;
144
+ }
145
+ function countPending() {
146
+ const db = getDatabase();
147
+ const row = db.prepare(
148
+ `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count < ?`
149
+ ).get(MAX_OUTBOX_RETRIES);
150
+ return row.count;
151
+ }
152
+ var BACKFILL_TABLES = [
153
+ "sessions",
154
+ "prompt_batches",
155
+ "spores",
156
+ "entities",
157
+ "graph_edges",
158
+ "resolution_events",
159
+ "plans",
160
+ "artifacts",
161
+ "digest_extracts",
162
+ "skill_candidates",
163
+ "skill_records"
164
+ ];
165
+ var BACKFILL_TABLE_SET = new Set(BACKFILL_TABLES);
166
+ function markSourceRowsSynced(records, syncedAt) {
167
+ const db = getDatabase();
168
+ const byTable = /* @__PURE__ */ new Map();
169
+ for (const rec of records) {
170
+ if (!BACKFILL_TABLE_SET.has(rec.table_name)) continue;
171
+ const ids = byTable.get(rec.table_name) ?? [];
172
+ ids.push(rec.row_id);
173
+ byTable.set(rec.table_name, ids);
174
+ }
175
+ for (const [table, ids] of byTable) {
176
+ const placeholders = ids.map(() => "?").join(", ");
177
+ db.prepare(
178
+ `UPDATE ${table} SET synced_at = ? WHERE id IN (${placeholders}) AND synced_at IS NULL`
179
+ ).run(syncedAt, ...ids);
180
+ }
181
+ }
182
+ function backfillUnsynced(machineId) {
183
+ const db = getDatabase();
184
+ let total = 0;
185
+ const now = Math.floor(Date.now() / MS_PER_SECOND);
186
+ for (const table of BACKFILL_TABLES) {
187
+ const rows = db.prepare(
188
+ `SELECT * FROM ${table}
189
+ WHERE synced_at IS NULL
190
+ AND NOT EXISTS (
191
+ SELECT 1 FROM team_outbox
192
+ WHERE team_outbox.table_name = ? AND team_outbox.row_id = CAST(${table}.id AS TEXT)
193
+ )`
194
+ ).all(table);
195
+ if (rows.length === 0) continue;
196
+ const insertBatch = db.transaction((batchRows) => {
197
+ const stmt = db.prepare(
198
+ `INSERT INTO team_outbox (table_name, row_id, operation, payload, machine_id, created_at)
199
+ VALUES (?, ?, 'upsert', ?, ?, ?)`
200
+ );
201
+ for (const row of batchRows) {
202
+ stmt.run(table, String(row.id), JSON.stringify(row), machineId, now);
203
+ }
204
+ });
205
+ insertBatch(rows);
206
+ total += rows.length;
207
+ }
208
+ return total;
209
+ }
9
210
 
10
211
  // src/db/queries/sessions.ts
11
212
  var DEFAULT_LIST_LIMIT = 100;
@@ -37,7 +238,7 @@ var SESSION_COLUMNS = [
37
238
  "machine_id",
38
239
  "synced_at"
39
240
  ];
40
- var SELECT_COLUMNS = SESSION_COLUMNS.join(", ");
241
+ var SELECT_COLUMNS2 = SESSION_COLUMNS.join(", ");
41
242
  function toSessionRow(row) {
42
243
  return {
43
244
  id: row.id,
@@ -120,7 +321,7 @@ function upsertSession(data) {
120
321
  data.tool_count !== void 0 ? 1 : 0
121
322
  );
122
323
  const row = toSessionRow(
123
- db.prepare(`SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`).get(data.id)
324
+ db.prepare(`SELECT ${SELECT_COLUMNS2} FROM sessions WHERE id = ?`).get(data.id)
124
325
  );
125
326
  syncRow("sessions", row);
126
327
  return row;
@@ -128,7 +329,7 @@ function upsertSession(data) {
128
329
  function getSession(id) {
129
330
  const db = getDatabase();
130
331
  const row = db.prepare(
131
- `SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`
332
+ `SELECT ${SELECT_COLUMNS2} FROM sessions WHERE id = ?`
132
333
  ).get(id);
133
334
  if (!row) return null;
134
335
  return toSessionRow(row);
@@ -144,6 +345,18 @@ function buildSessionsWhere(options) {
144
345
  conditions.push(`agent = ?`);
145
346
  params.push(options.agent);
146
347
  }
348
+ if (options.branch !== void 0) {
349
+ conditions.push(`branch = ?`);
350
+ params.push(options.branch);
351
+ }
352
+ if (options.user !== void 0) {
353
+ conditions.push(`"user" = ?`);
354
+ params.push(options.user);
355
+ }
356
+ if (options.id !== void 0) {
357
+ conditions.push(`id = ?`);
358
+ params.push(options.id);
359
+ }
147
360
  if (options.search !== void 0 && options.search.length > 0) {
148
361
  conditions.push(`(title LIKE ? OR id LIKE ?)`);
149
362
  const pattern = `%${options.search}%`;
@@ -167,7 +380,7 @@ function listSessions(options = {}) {
167
380
  const limit = options.limit ?? DEFAULT_LIST_LIMIT;
168
381
  const offset = options.offset ?? 0;
169
382
  const rows = db.prepare(
170
- `SELECT ${SELECT_COLUMNS}
383
+ `SELECT ${SELECT_COLUMNS2}
171
384
  FROM sessions
172
385
  ${where}
173
386
  ORDER BY created_at DESC
@@ -198,7 +411,7 @@ function reactivateSessionIfCompleted(id) {
198
411
  ).run(id);
199
412
  if (info.changes === 0) return false;
200
413
  const row = db.prepare(
201
- `SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`
414
+ `SELECT ${SELECT_COLUMNS2} FROM sessions WHERE id = ?`
202
415
  ).get(id);
203
416
  if (row) syncRow("sessions", toSessionRow(row));
204
417
  return true;
@@ -328,6 +541,20 @@ function deleteSessionCascade(sessionId) {
328
541
  }
329
542
 
330
543
  export {
544
+ initTeamContext,
545
+ isTeamSyncEnabled,
546
+ getTeamMachineId,
547
+ syncRow,
548
+ enqueueOutbox,
549
+ listPending,
550
+ markSent,
551
+ incrementRetryCount,
552
+ retryDeadLettered,
553
+ countDeadLettered,
554
+ pruneOld,
555
+ countPending,
556
+ markSourceRowsSynced,
557
+ backfillUnsynced,
331
558
  upsertSession,
332
559
  getSession,
333
560
  listSessions,
@@ -340,4 +567,4 @@ export {
340
567
  getSessionImpact,
341
568
  deleteSessionCascade
342
569
  };
343
- //# sourceMappingURL=chunk-Q6OEZM3S.js.map
570
+ //# sourceMappingURL=chunk-EVDQKYCG.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/daemon/team-context.ts","../src/db/queries/team-outbox.ts","../src/db/queries/sessions.ts"],"sourcesContent":["/**\n * Module-level state for team sync.\n *\n * Initialized once by the daemon on startup. Query modules import\n * `isTeamSyncEnabled()` and `getTeamMachineId()` to decide whether\n * to enqueue outbox records on write.\n */\n\nimport { SYNC_PROTOCOL_VERSION, DEFAULT_MACHINE_ID } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Module state\n// ---------------------------------------------------------------------------\n\nlet teamSyncEnabled = false;\nlet teamMachineId = DEFAULT_MACHINE_ID;\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Initialize team context. Called once on daemon startup.\n */\nexport function initTeamContext(enabled: boolean, machineId: string): void {\n teamSyncEnabled = enabled;\n teamMachineId = machineId;\n}\n\n/**\n * Whether team sync is currently enabled.\n *\n * Query modules check this before enqueuing outbox records.\n */\nexport function isTeamSyncEnabled(): boolean {\n return teamSyncEnabled;\n}\n\n/**\n * The machine ID for this instance.\n */\nexport function getTeamMachineId(): string {\n return teamMachineId;\n}\n\n/**\n * The sync protocol version in use.\n */\nexport function getTeamSyncProtocolVersion(): number {\n return SYNC_PROTOCOL_VERSION;\n}\n\n/**\n * Reset team context (for testing).\n */\nexport function resetTeamContext(): void {\n teamSyncEnabled = false;\n teamMachineId = DEFAULT_MACHINE_ID;\n}\n","/**\n * Team outbox CRUD query helpers.\n *\n * The outbox pattern: write paths enqueue records here when team sync is enabled.\n * The sync client flushes pending records in batches to the Cloudflare Worker.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { isTeamSyncEnabled, getTeamMachineId } from '@myco/daemon/team-context.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Max records returned per listPending call. */\nconst BURST_BATCH_SIZE = 200;\n\n/** Age in seconds after which sent records are pruned (24 hours). */\nconst SENT_PRUNE_AGE_SECONDS = 86_400;\n\n/** Max retry attempts before a record is dead-lettered. */\nexport const MAX_OUTBOX_RETRIES = 10;\n\n/** Milliseconds-per-second multiplier for epoch math. */\nconst MS_PER_SECOND = 1000;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required when enqueuing an outbox record. */\nexport interface OutboxInsert {\n table_name: string;\n row_id: string;\n operation?: string;\n payload: string;\n machine_id: string;\n created_at: number;\n}\n\n/** Row shape returned from outbox queries. */\nexport interface OutboxRow {\n id: number;\n table_name: string;\n row_id: string;\n operation: string;\n payload: string;\n machine_id: string;\n created_at: number;\n sent_at: number | null;\n retry_count: number;\n last_attempt_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst OUTBOX_COLUMNS = [\n 'id',\n 'table_name',\n 'row_id',\n 'operation',\n 'payload',\n 'machine_id',\n 'created_at',\n 'sent_at',\n 'retry_count',\n 'last_attempt_at',\n] as const;\n\nconst SELECT_COLUMNS = OUTBOX_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed OutboxRow. */\nfunction toOutboxRow(row: Record<string, unknown>): OutboxRow {\n return {\n id: row.id as number,\n table_name: row.table_name as string,\n row_id: row.row_id as string,\n operation: row.operation as string,\n payload: row.payload as string,\n machine_id: row.machine_id as string,\n created_at: row.created_at as number,\n sent_at: (row.sent_at as number) ?? null,\n retry_count: (row.retry_count as number) ?? 0,\n last_attempt_at: (row.last_attempt_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Convenience helper — used by query modules\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a row for team sync if sync is enabled.\n *\n * Centralizes the if-enabled / enqueue / serialize pattern that every\n * write-path query module previously duplicated inline.\n */\nexport function syncRow(tableName: string, row: { id: string | number; created_at?: number }): void {\n if (!isTeamSyncEnabled()) return;\n enqueueOutbox({\n table_name: tableName,\n row_id: String(row.id),\n payload: JSON.stringify(row),\n machine_id: getTeamMachineId(),\n created_at: row.created_at ?? Math.floor(Date.now() / 1000),\n });\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a record into the team outbox for later sync.\n *\n * Inserted with `sent_at = NULL` (pending).\n */\nexport function enqueueOutbox(data: OutboxInsert): OutboxRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO team_outbox (\n table_name, row_id, operation, payload, machine_id, created_at\n ) VALUES (?, ?, ?, ?, ?, ?)`,\n ).run(\n data.table_name,\n data.row_id,\n data.operation ?? 'upsert',\n data.payload,\n data.machine_id,\n data.created_at,\n );\n\n const id = Number(info.lastInsertRowid);\n\n return toOutboxRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM team_outbox WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * List pending outbox records (oldest-first).\n *\n * Uses burst sizing: fetches BURST_BATCH_SIZE rows and returns them all.\n * If fewer than BURST_THRESHOLD rows come back, callers get a normal-size\n * batch; if more, the full burst. This avoids a separate COUNT query.\n */\nexport function listPending(limit?: number): OutboxRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM team_outbox\n WHERE sent_at IS NULL AND retry_count < ?\n ORDER BY created_at ASC\n LIMIT ?`,\n ).all(MAX_OUTBOX_RETRIES, limit ?? BURST_BATCH_SIZE) as Record<string, unknown>[];\n\n return rows.map(toOutboxRow);\n}\n\n/**\n * Mark outbox records as sent by setting sent_at.\n */\nexport function markSent(ids: number[], sentAt: number): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = ?\n WHERE id IN (${placeholders})`,\n ).run(sentAt, ...ids);\n}\n\n/**\n * Reset sent_at to NULL for records that need to be retried.\n *\n * This allows the sync client to re-enqueue specific records for retry.\n */\nexport function markForRetry(ids: number[]): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = NULL\n WHERE id IN (${placeholders})`,\n ).run(...ids);\n}\n\n/**\n * Increment retry_count and set last_attempt_at for failed outbox records.\n *\n * @returns IDs of records that have now reached MAX_OUTBOX_RETRIES (newly dead-lettered).\n */\nexport function incrementRetryCount(ids: number[], attemptAt: number): number[] {\n if (ids.length === 0) return [];\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET retry_count = retry_count + 1, last_attempt_at = ?\n WHERE id IN (${placeholders})`,\n ).run(attemptAt, ...ids);\n\n // Return IDs that just hit the threshold\n const deadLettered = db.prepare(\n `SELECT id FROM team_outbox\n WHERE id IN (${placeholders}) AND retry_count >= ?`,\n ).all(...ids, MAX_OUTBOX_RETRIES) as Array<{ id: number }>;\n\n return deadLettered.map((r) => r.id);\n}\n\n/**\n * Reset all dead-lettered records back to pending for retry.\n *\n * @returns the number of records reset.\n */\nexport function retryDeadLettered(): number {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE team_outbox\n SET retry_count = 0, last_attempt_at = NULL\n WHERE sent_at IS NULL AND retry_count >= ?`,\n ).run(MAX_OUTBOX_RETRIES);\n\n return info.changes;\n}\n\n/**\n * Count dead-lettered outbox records (exceeded max retries, never sent).\n */\nexport function countDeadLettered(): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count >= ?`,\n ).get(MAX_OUTBOX_RETRIES) as { count: number };\n\n return row.count;\n}\n\n/**\n * Prune old outbox records.\n *\n * Removes sent records older than 24 hours.\n *\n * @returns the number of records deleted.\n */\nexport function pruneOld(): number {\n const db = getDatabase();\n const cutoff = Math.floor(Date.now() / MS_PER_SECOND) - SENT_PRUNE_AGE_SECONDS;\n\n const info = db.prepare(\n `DELETE FROM team_outbox\n WHERE sent_at IS NOT NULL AND sent_at < ?`,\n ).run(cutoff);\n\n return info.changes;\n}\n\n/**\n * Count pending (unsent) outbox records.\n */\nexport function countPending(): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count < ?`,\n ).get(MAX_OUTBOX_RETRIES) as { count: number };\n\n return row.count;\n}\n\n// ---------------------------------------------------------------------------\n// Source-row sync bookkeeping\n// ---------------------------------------------------------------------------\n\n/** Tables eligible for backfill/sync (must have id, machine_id, synced_at columns). */\nconst BACKFILL_TABLES = [\n 'sessions',\n 'prompt_batches',\n 'spores',\n 'entities',\n 'graph_edges',\n 'resolution_events',\n 'plans',\n 'artifacts',\n 'digest_extracts',\n 'skill_candidates',\n 'skill_records',\n] as const;\n// entity_mentions excluded — no `id` column (composite key entity_id+note_id+note_type)\n// skill_usage excluded — no `synced_at` column (syncs via syncRow on insert)\n\nconst BACKFILL_TABLE_SET = new Set<string>(BACKFILL_TABLES);\n\n/**\n * Mark source rows as synced after successful outbox flush.\n *\n * Groups outbox records by table_name, then sets `synced_at` on the\n * corresponding source rows. This closes the re-enqueue loop: once\n * synced_at is non-NULL, `backfillUnsynced` skips the row even after\n * the outbox entry is pruned.\n */\nexport function markSourceRowsSynced(records: OutboxRow[], syncedAt: number): void {\n const db = getDatabase();\n\n // Group row_ids by table\n const byTable = new Map<string, string[]>();\n for (const rec of records) {\n if (!BACKFILL_TABLE_SET.has(rec.table_name)) continue;\n const ids = byTable.get(rec.table_name) ?? [];\n ids.push(rec.row_id);\n byTable.set(rec.table_name, ids);\n }\n\n for (const [table, ids] of byTable) {\n const placeholders = ids.map(() => '?').join(', ');\n db.prepare(\n `UPDATE ${table} SET synced_at = ? WHERE id IN (${placeholders}) AND synced_at IS NULL`,\n ).run(syncedAt, ...ids);\n }\n}\n\n// ---------------------------------------------------------------------------\n// Backfill\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue all unsynced records across all synced tables into the outbox.\n *\n * Scans each table for rows where `synced_at IS NULL`, serializes the full\n * row as JSON, and inserts into the outbox. Idempotent — re-running only\n * picks up rows not yet in the outbox (checked via existing outbox entries).\n *\n * @returns the total number of records enqueued.\n */\nexport function backfillUnsynced(machineId: string): number {\n const db = getDatabase();\n let total = 0;\n\n const now = Math.floor(Date.now() / MS_PER_SECOND);\n\n // Process one table at a time in separate transactions to avoid long locks\n for (const table of BACKFILL_TABLES) {\n const rows = db.prepare(\n `SELECT * FROM ${table}\n WHERE synced_at IS NULL\n AND NOT EXISTS (\n SELECT 1 FROM team_outbox\n WHERE team_outbox.table_name = ? AND team_outbox.row_id = CAST(${table}.id AS TEXT)\n )`,\n ).all(table) as Record<string, unknown>[];\n\n if (rows.length === 0) continue;\n\n const insertBatch = db.transaction((batchRows: Record<string, unknown>[]) => {\n const stmt = db.prepare(\n `INSERT INTO team_outbox (table_name, row_id, operation, payload, machine_id, created_at)\n VALUES (?, ?, 'upsert', ?, ?, ?)`,\n );\n for (const row of batchRows) {\n stmt.run(table, String(row.id), JSON.stringify(row), machineId, now);\n }\n });\n\n insertBatch(rows);\n total += rows.length;\n }\n\n return total;\n}\n\n","/**\n * Session CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { getTeamMachineId } from '@myco/daemon/team-context.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of sessions returned by listSessions when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Session status value when a session is closed normally. */\nconst STATUS_COMPLETED = 'completed';\n\n/** Default session status for new sessions. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default prompt count for new sessions. */\nconst DEFAULT_PROMPT_COUNT = 0;\n\n/** Default tool count for new sessions. */\nconst DEFAULT_TOOL_COUNT = 0;\n\n/** Default processed flag for new sessions. */\nconst DEFAULT_PROCESSED = 0;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting/upserting a session. */\nexport interface SessionInsert {\n id: string;\n agent: string;\n started_at: number;\n created_at: number;\n user?: string | null;\n project_root?: string | null;\n branch?: string | null;\n ended_at?: number | null;\n status?: string;\n prompt_count?: number;\n tool_count?: number;\n title?: string | null;\n summary?: string | null;\n transcript_path?: string | null;\n parent_session_id?: string | null;\n parent_session_reason?: string | null;\n processed?: number;\n content_hash?: string | null;\n machine_id?: string;\n}\n\n/** Row shape returned from session queries (all columns). */\nexport interface SessionRow {\n id: string;\n agent: string;\n user: string | null;\n project_root: string | null;\n branch: string | null;\n started_at: number;\n ended_at: number | null;\n status: string;\n prompt_count: number;\n tool_count: number;\n title: string | null;\n summary: string | null;\n transcript_path: string | null;\n parent_session_id: string | null;\n parent_session_reason: string | null;\n processed: number;\n content_hash: string | null;\n embedded: number;\n created_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Updatable fields for `updateSession`. */\nexport interface SessionUpdate {\n agent?: string;\n user?: string | null;\n project_root?: string | null;\n branch?: string | null;\n ended_at?: number | null;\n status?: string;\n prompt_count?: number;\n tool_count?: number;\n title?: string | null;\n summary?: string | null;\n transcript_path?: string | null;\n parent_session_id?: string | null;\n parent_session_reason?: string | null;\n processed?: number;\n content_hash?: string | null;\n}\n\n/** Filter options for `listSessions`. */\nexport interface ListSessionsOptions {\n limit?: number;\n offset?: number;\n status?: string;\n agent?: string;\n search?: string;\n /** Filter to sessions that ran on this git branch. */\n branch?: string;\n /** Filter to sessions authored by this user. */\n user?: string;\n /** Filter to this exact session id (used for plan→session resolution). */\n id?: string;\n /** Only return sessions created after this epoch-seconds timestamp. */\n since?: number;\n /**\n * When explicitly `false` and no `status` filter is set, exclude sessions\n * still in `status = 'active'` — intelligence-task reads opt in to this.\n * Defaults permissive so UI listings keep showing in-flight sessions.\n */\n includeActive?: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst SESSION_COLUMNS = [\n 'id',\n 'agent',\n '\"user\"',\n 'project_root',\n 'branch',\n 'started_at',\n 'ended_at',\n 'status',\n 'prompt_count',\n 'tool_count',\n 'title',\n 'summary',\n 'transcript_path',\n 'parent_session_id',\n 'parent_session_reason',\n 'processed',\n 'content_hash',\n 'embedded',\n 'created_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = SESSION_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Normalize a SQLite result row into a typed SessionRow.\n *\n * The quoted \"user\" column comes back as `user` in the result object.\n */\nfunction toSessionRow(row: Record<string, unknown>): SessionRow {\n return {\n id: row.id as string,\n agent: row.agent as string,\n user: (row.user as string) ?? null,\n project_root: (row.project_root as string) ?? null,\n branch: (row.branch as string) ?? null,\n started_at: row.started_at as number,\n ended_at: (row.ended_at as number) ?? null,\n status: row.status as string,\n prompt_count: row.prompt_count as number,\n tool_count: row.tool_count as number,\n title: (row.title as string) ?? null,\n summary: (row.summary as string) ?? null,\n transcript_path: (row.transcript_path as string) ?? null,\n parent_session_id: (row.parent_session_id as string) ?? null,\n parent_session_reason: (row.parent_session_reason as string) ?? null,\n processed: row.processed as number,\n content_hash: (row.content_hash as string) ?? null,\n embedded: (row.embedded as number) ?? 0,\n created_at: row.created_at as number,\n machine_id: (row.machine_id as string) ?? 'local',\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a session or update it if the id already exists.\n *\n * On conflict the row is updated with the values from `data`, preserving\n * any columns not supplied via COALESCE with EXCLUDED values.\n */\nexport function upsertSession(data: SessionInsert): SessionRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO sessions (\n id, agent, \"user\", project_root, branch,\n started_at, ended_at, status, prompt_count, tool_count,\n title, summary, transcript_path,\n parent_session_id, parent_session_reason,\n processed, content_hash, created_at, machine_id\n ) VALUES (\n ?, ?, ?, ?, ?,\n ?, ?, ?, ?, ?,\n ?, ?, ?,\n ?, ?,\n ?, ?, ?, ?\n )\n ON CONFLICT (id) DO UPDATE SET\n agent = EXCLUDED.agent,\n \"user\" = EXCLUDED.\"user\",\n project_root = EXCLUDED.project_root,\n branch = EXCLUDED.branch,\n started_at = EXCLUDED.started_at,\n ended_at = COALESCE(EXCLUDED.ended_at, sessions.ended_at),\n status = COALESCE(EXCLUDED.status, sessions.status),\n prompt_count = CASE WHEN ? THEN EXCLUDED.prompt_count ELSE sessions.prompt_count END,\n tool_count = CASE WHEN ? THEN EXCLUDED.tool_count ELSE sessions.tool_count END,\n title = COALESCE(EXCLUDED.title, sessions.title),\n summary = COALESCE(EXCLUDED.summary, sessions.summary),\n transcript_path = COALESCE(EXCLUDED.transcript_path, sessions.transcript_path),\n parent_session_id = EXCLUDED.parent_session_id,\n parent_session_reason = EXCLUDED.parent_session_reason,\n processed = COALESCE(EXCLUDED.processed, sessions.processed),\n content_hash = EXCLUDED.content_hash`,\n ).run(\n data.id,\n data.agent,\n data.user ?? null,\n data.project_root ?? null,\n data.branch ?? null,\n data.started_at,\n data.ended_at ?? null,\n data.status ?? DEFAULT_STATUS,\n data.prompt_count ?? DEFAULT_PROMPT_COUNT,\n data.tool_count ?? DEFAULT_TOOL_COUNT,\n data.title ?? null,\n data.summary ?? null,\n data.transcript_path ?? null,\n data.parent_session_id ?? null,\n data.parent_session_reason ?? null,\n data.processed ?? DEFAULT_PROCESSED,\n data.content_hash ?? null,\n data.created_at,\n data.machine_id ?? getTeamMachineId(),\n data.prompt_count !== undefined ? 1 : 0,\n data.tool_count !== undefined ? 1 : 0,\n );\n\n const row = toSessionRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n syncRow('sessions', row);\n\n return row;\n}\n\n/**\n * Retrieve a single session by id.\n *\n * @returns the session row, or null if not found.\n */\nexport function getSession(id: string): SessionRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toSessionRow(row);\n}\n\n/** Build WHERE clause and bound params from session filter options. */\nfunction buildSessionsWhere(\n options: Omit<ListSessionsOptions, 'limit' | 'offset'>,\n): { where: string; params: unknown[] } {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n\n if (options.agent !== undefined) {\n conditions.push(`agent = ?`);\n params.push(options.agent);\n }\n\n if (options.branch !== undefined) {\n conditions.push(`branch = ?`);\n params.push(options.branch);\n }\n\n if (options.user !== undefined) {\n conditions.push(`\"user\" = ?`);\n params.push(options.user);\n }\n\n if (options.id !== undefined) {\n conditions.push(`id = ?`);\n params.push(options.id);\n }\n\n if (options.search !== undefined && options.search.length > 0) {\n conditions.push(`(title LIKE ? OR id LIKE ?)`);\n const pattern = `%${options.search}%`;\n params.push(pattern, pattern);\n }\n if (options.since !== undefined) {\n conditions.push('created_at > ?');\n params.push(options.since);\n }\n\n // Exclude active sessions only when the caller explicitly opts in and\n // hasn't already constrained `status`. Intelligence-task reads set this\n // to avoid picking up in-flight work; UI/CLI leave it unset.\n if (options.includeActive === false && options.status === undefined) {\n conditions.push(`status != 'active'`);\n }\n\n return {\n where: conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '',\n params,\n };\n}\n\n/**\n * List sessions with optional filters, ordered by created_at DESC.\n */\nexport function listSessions(\n options: ListSessionsOptions = {},\n): SessionRow[] {\n const db = getDatabase();\n const { where, params } = buildSessionsWhere(options);\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n const offset = options.offset ?? 0;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM sessions\n ${where}\n ORDER BY created_at DESC\n LIMIT ?\n OFFSET ?`,\n ).all(...params, limit, offset) as Record<string, unknown>[];\n\n return rows.map(toSessionRow);\n}\n\n/**\n * Count sessions matching optional filters (for pagination totals).\n */\nexport function countSessions(\n options: Omit<ListSessionsOptions, 'limit' | 'offset'> = {},\n): number {\n const db = getDatabase();\n const { where, params } = buildSessionsWhere(options);\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM sessions ${where}`,\n ).get(...params) as { count: number };\n\n return row.count;\n}\n\n/**\n * Return the set of session IDs currently in `status = 'active'`.\n *\n * Used by the semantic-search path, which can't apply a SQL join against\n * session status (the vector store is a separate concern), so it filters\n * results in-memory against this set instead. Bounded by the number of\n * concurrent in-flight sessions — typically small.\n */\nexport function getActiveSessionIds(): Set<string> {\n const db = getDatabase();\n const rows = db.prepare(\n `SELECT id FROM sessions WHERE status = 'active'`,\n ).all() as Array<{ id: string }>;\n return new Set(rows.map((r) => r.id));\n}\n\n/**\n * Flip a session back to `status = 'active'` if it's currently `'completed'`.\n *\n * Called on live user activity (`user_prompt` events) so a session that was\n * auto-completed by the stale sweep or manually completed via the API snaps\n * back to active transparently when the user resumes. No-op for sessions\n * that are already active or don't exist.\n *\n * The `ended_at` column is intentionally preserved — it records the most\n * recent completion time, and the next completion will overwrite it.\n *\n * @returns true if a row was updated (session was completed and is now active)\n */\nexport function reactivateSessionIfCompleted(id: string): boolean {\n const db = getDatabase();\n const info = db.prepare(\n `UPDATE sessions SET status = 'active' WHERE id = ? AND status = 'completed'`,\n ).run(id);\n if (info.changes === 0) return false;\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n if (row) syncRow('sessions', toSessionRow(row));\n\n return true;\n}\n\n/**\n * Update specific fields on an existing session.\n *\n * @returns the updated row, or null if the session does not exist.\n */\nexport function updateSession(\n id: string,\n updates: SessionUpdate,\n): SessionRow | null {\n const db = getDatabase();\n\n const setClauses: string[] = [];\n const params: unknown[] = [];\n\n const fieldMap: Record<string, string> = {\n agent: 'agent',\n user: '\"user\"',\n project_root: 'project_root',\n branch: 'branch',\n ended_at: 'ended_at',\n status: 'status',\n prompt_count: 'prompt_count',\n tool_count: 'tool_count',\n title: 'title',\n summary: 'summary',\n transcript_path: 'transcript_path',\n parent_session_id: 'parent_session_id',\n parent_session_reason: 'parent_session_reason',\n processed: 'processed',\n content_hash: 'content_hash',\n };\n\n for (const [key, column] of Object.entries(fieldMap)) {\n if (key in updates) {\n setClauses.push(`${column} = ?`);\n params.push((updates as Record<string, unknown>)[key] ?? null);\n }\n }\n\n if (setClauses.length === 0) return getSession(id);\n\n params.push(id);\n\n db.prepare(\n `UPDATE sessions\n SET ${setClauses.join(', ')}\n WHERE id = ?`,\n ).run(...params);\n\n const updated = getSession(id);\n\n if (updated) syncRow('sessions', updated);\n\n return updated;\n}\n\n/**\n * Atomically increment tool_count for a session.\n *\n * Uses SQL `tool_count + 1` to avoid read-modify-write races.\n */\nexport function incrementSessionToolCount(id: string): void {\n const db = getDatabase();\n db.prepare(\n `UPDATE sessions SET tool_count = COALESCE(tool_count, 0) + 1 WHERE id = ?`,\n ).run(id);\n}\n\n/**\n * Close a session — set status to 'completed' and record the end time.\n *\n * @returns the updated row, or null if the session does not exist.\n */\nexport function closeSession(\n id: string,\n endedAt: number,\n): SessionRow | null {\n const db = getDatabase();\n\n db.prepare(\n `UPDATE sessions\n SET status = ?, ended_at = ?\n WHERE id = ?`,\n ).run(STATUS_COMPLETED, endedAt, id);\n\n const closed = getSession(id);\n\n if (closed) syncRow('sessions', closed);\n\n return closed;\n}\n\n/**\n * Delete a session and all its child rows (batches, activities, attachments).\n *\n * No ON DELETE CASCADE in the schema, so we delete children first.\n * Returns true if the session existed and was deleted.\n */\nexport function deleteSession(id: string): boolean {\n const db = getDatabase();\n\n db.prepare(`DELETE FROM activities WHERE session_id = ?`).run(id);\n db.prepare(`DELETE FROM attachments WHERE session_id = ?`).run(id);\n db.prepare(`DELETE FROM prompt_batches WHERE session_id = ?`).run(id);\n const info = db.prepare(`DELETE FROM sessions WHERE id = ?`).run(id);\n\n return info.changes > 0;\n}\n\n// ---------------------------------------------------------------------------\n// Cascade delete + impact query\n// ---------------------------------------------------------------------------\n\n/** Counts of related data that would be affected by a session delete. */\nexport interface SessionImpact {\n promptCount: number;\n sporeCount: number;\n attachmentCount: number;\n graphEdgeCount: number;\n}\n\n/** Result of a cascade delete operation. */\nexport interface DeleteCascadeResult {\n deleted: boolean;\n counts: {\n prompts: number;\n spores: number;\n attachments: number;\n graphEdges: number;\n resolutionEvents: number;\n };\n /** Spore IDs that were deleted (needed for vault file + vector cleanup). */\n deletedSporeIds: string[];\n /** Attachment file paths that were deleted from DB (needed for disk cleanup). */\n deletedAttachmentPaths: string[];\n}\n\n/**\n * Get counts of all data related to a session, for pre-delete impact display.\n */\nexport function getSessionImpact(sessionId: string): SessionImpact {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT\n (SELECT COUNT(*) FROM prompt_batches WHERE session_id = ?) AS promptCount,\n (SELECT COUNT(*) FROM spores WHERE session_id = ?) AS sporeCount,\n (SELECT COUNT(*) FROM attachments WHERE session_id = ?) AS attachmentCount,\n (SELECT COUNT(*) FROM graph_edges WHERE session_id = ?) AS graphEdgeCount`,\n ).get(sessionId, sessionId, sessionId, sessionId) as SessionImpact;\n\n return row;\n}\n\n/**\n * Delete a session and ALL related data in a single transaction.\n *\n * Returns counts of deleted rows and IDs needed for post-transaction\n * cleanup (vault files, embedding vectors).\n */\nexport function deleteSessionCascade(sessionId: string): DeleteCascadeResult {\n const db = getDatabase();\n\n const zeroCounts: DeleteCascadeResult = {\n deleted: false,\n counts: { prompts: 0, spores: 0, attachments: 0, graphEdges: 0, resolutionEvents: 0 },\n deletedSporeIds: [],\n deletedAttachmentPaths: [],\n };\n\n // Check session exists first\n const exists = db.prepare(`SELECT id FROM sessions WHERE id = ?`).get(sessionId);\n if (!exists) return zeroCounts;\n\n // Collect IDs/paths needed for post-transaction cleanup before deleting.\n // Spores can reference prompt_batches from a different session (cross-session\n // spore linkage), so we must also collect spores linked via prompt_batch_id.\n const sporeIds = (db.prepare(\n `SELECT id FROM spores\n WHERE session_id = ?\n OR prompt_batch_id IN (SELECT id FROM prompt_batches WHERE session_id = ?)`,\n ).all(sessionId, sessionId) as { id: string }[]).map((r) => r.id);\n\n const attachmentPaths = (db.prepare(\n `SELECT file_path FROM attachments WHERE session_id = ?`,\n ).all(sessionId) as { file_path: string }[]).map((r) => r.file_path);\n\n // Run all deletes in a single transaction.\n //\n // Order matters — foreign_keys = ON is set in client.ts, so every DELETE\n // is checked immediately. Child rows must be removed before their parents:\n // - spores.prompt_batch_id → prompt_batches(id) [spores BEFORE prompt_batches]\n // - plans.prompt_batch_id → prompt_batches(id) [plans BEFORE prompt_batches]\n // - resolution_events.spore_id → spores(id) [resolution_events BEFORE spores]\n // - skill_usage.session_id → sessions(id) NOT NULL\n // - plans.session_id → sessions(id)\n // resolution_events can reference spores across sessions (e.g. a later\n // session supersedes an earlier session's spore), so we match by either\n // session_id OR spore_id-in-this-session to catch cross-session references.\n //\n // Spores can also reference prompt_batches from a different session\n // (cross-session prompt_batch_id linkage). We must delete those spores\n // BEFORE deleting prompt_batches to avoid FK violations.\n const result = db.transaction(() => {\n db.prepare(`DELETE FROM activities WHERE session_id = ?`).run(sessionId);\n const attachments = db.prepare(`DELETE FROM attachments WHERE session_id = ?`).run(sessionId);\n db.prepare(`DELETE FROM plans WHERE session_id = ?`).run(sessionId);\n db.prepare(`DELETE FROM skill_usage WHERE session_id = ?`).run(sessionId);\n const resEvents = db.prepare(\n `DELETE FROM resolution_events\n WHERE session_id = ?\n OR spore_id IN (\n SELECT id FROM spores\n WHERE session_id = ?\n OR prompt_batch_id IN (SELECT id FROM prompt_batches WHERE session_id = ?)\n )`,\n ).run(sessionId, sessionId, sessionId);\n const edges = db.prepare(`DELETE FROM graph_edges WHERE session_id = ?`).run(sessionId);\n const spores = db.prepare(\n `DELETE FROM spores\n WHERE session_id = ?\n OR prompt_batch_id IN (SELECT id FROM prompt_batches WHERE session_id = ?)`,\n ).run(sessionId, sessionId);\n const prompts = db.prepare(`DELETE FROM prompt_batches WHERE session_id = ?`).run(sessionId);\n const session = db.prepare(`DELETE FROM sessions WHERE id = ?`).run(sessionId);\n\n return {\n deleted: session.changes > 0,\n counts: {\n prompts: prompts.changes,\n spores: spores.changes,\n attachments: attachments.changes,\n graphEdges: edges.changes,\n resolutionEvents: resEvents.changes,\n },\n };\n })();\n\n return {\n ...result,\n deletedSporeIds: sporeIds,\n deletedAttachmentPaths: attachmentPaths,\n };\n}\n"],"mappings":";;;;;;;;;AAcA,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AASb,SAAS,gBAAgB,SAAkB,WAAyB;AACzE,oBAAkB;AAClB,kBAAgB;AAClB;AAOO,SAAS,oBAA6B;AAC3C,SAAO;AACT;AAKO,SAAS,mBAA2B;AACzC,SAAO;AACT;;;ACzBA,IAAM,mBAAmB;AAGzB,IAAM,yBAAyB;AAGxB,IAAM,qBAAqB;AAGlC,IAAM,gBAAgB;AAkCtB,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,eAAe,KAAK,IAAI;AAO/C,SAAS,YAAY,KAAyC;AAC5D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,YAAY,IAAI;AAAA,IAChB,QAAQ,IAAI;AAAA,IACZ,WAAW,IAAI;AAAA,IACf,SAAS,IAAI;AAAA,IACb,YAAY,IAAI;AAAA,IAChB,YAAY,IAAI;AAAA,IAChB,SAAU,IAAI,WAAsB;AAAA,IACpC,aAAc,IAAI,eAA0B;AAAA,IAC5C,iBAAkB,IAAI,mBAA8B;AAAA,EACtD;AACF;AAYO,SAAS,QAAQ,WAAmB,KAAyD;AAClG,MAAI,CAAC,kBAAkB,EAAG;AAC1B,gBAAc;AAAA,IACZ,YAAY;AAAA,IACZ,QAAQ,OAAO,IAAI,EAAE;AAAA,IACrB,SAAS,KAAK,UAAU,GAAG;AAAA,IAC3B,YAAY,iBAAiB;AAAA,IAC7B,YAAY,IAAI,cAAc,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAAA,EAC5D,CAAC;AACH;AAWO,SAAS,cAAc,MAA+B;AAC3D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,aAAa;AAAA,IAClB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,EACP;AAEA,QAAM,KAAK,OAAO,KAAK,eAAe;AAEtC,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,EAAE;AAAA,EAC7E;AACF;AASO,SAAS,YAAY,OAA6B;AACvD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA,EAK1B,EAAE,IAAI,oBAAoB,SAAS,gBAAgB;AAEnD,SAAO,KAAK,IAAI,WAAW;AAC7B;AAKO,SAAS,SAAS,KAAe,QAAsB;AAC5D,MAAI,IAAI,WAAW,EAAG;AAEtB,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,KAAG;AAAA,IACD;AAAA;AAAA,oBAEgB,YAAY;AAAA,EAC9B,EAAE,IAAI,QAAQ,GAAG,GAAG;AACtB;AAyBO,SAAS,oBAAoB,KAAe,WAA6B;AAC9E,MAAI,IAAI,WAAW,EAAG,QAAO,CAAC;AAE9B,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,KAAG;AAAA,IACD;AAAA;AAAA,oBAEgB,YAAY;AAAA,EAC9B,EAAE,IAAI,WAAW,GAAG,GAAG;AAGvB,QAAM,eAAe,GAAG;AAAA,IACtB;AAAA,oBACgB,YAAY;AAAA,EAC9B,EAAE,IAAI,GAAG,KAAK,kBAAkB;AAEhC,SAAO,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE;AACrC;AAOO,SAAS,oBAA4B;AAC1C,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,kBAAkB;AAExB,SAAO,KAAK;AACd;AAKO,SAAS,oBAA4B;AAC1C,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,kBAAkB;AAExB,SAAO,IAAI;AACb;AASO,SAAS,WAAmB;AACjC,QAAM,KAAK,YAAY;AACvB,QAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa,IAAI;AAExD,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,EAEF,EAAE,IAAI,MAAM;AAEZ,SAAO,KAAK;AACd;AAKO,SAAS,eAAuB;AACrC,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,kBAAkB;AAExB,SAAO,IAAI;AACb;AAOA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAIA,IAAM,qBAAqB,IAAI,IAAY,eAAe;AAUnD,SAAS,qBAAqB,SAAsB,UAAwB;AACjF,QAAM,KAAK,YAAY;AAGvB,QAAM,UAAU,oBAAI,IAAsB;AAC1C,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,mBAAmB,IAAI,IAAI,UAAU,EAAG;AAC7C,UAAM,MAAM,QAAQ,IAAI,IAAI,UAAU,KAAK,CAAC;AAC5C,QAAI,KAAK,IAAI,MAAM;AACnB,YAAQ,IAAI,IAAI,YAAY,GAAG;AAAA,EACjC;AAEA,aAAW,CAAC,OAAO,GAAG,KAAK,SAAS;AAClC,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACjD,OAAG;AAAA,MACD,UAAU,KAAK,mCAAmC,YAAY;AAAA,IAChE,EAAE,IAAI,UAAU,GAAG,GAAG;AAAA,EACxB;AACF;AAeO,SAAS,iBAAiB,WAA2B;AAC1D,QAAM,KAAK,YAAY;AACvB,MAAI,QAAQ;AAEZ,QAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa;AAGjD,aAAW,SAAS,iBAAiB;AACnC,UAAM,OAAO,GAAG;AAAA,MACd,iBAAiB,KAAK;AAAA;AAAA;AAAA;AAAA,0EAI8C,KAAK;AAAA;AAAA,IAE3E,EAAE,IAAI,KAAK;AAEX,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,cAAc,GAAG,YAAY,CAAC,cAAyC;AAC3E,YAAM,OAAO,GAAG;AAAA,QACd;AAAA;AAAA,MAEF;AACA,iBAAW,OAAO,WAAW;AAC3B,aAAK,IAAI,OAAO,OAAO,IAAI,EAAE,GAAG,KAAK,UAAU,GAAG,GAAG,WAAW,GAAG;AAAA,MACrE;AAAA,IACF,CAAC;AAED,gBAAY,IAAI;AAChB,aAAS,KAAK;AAAA,EAChB;AAEA,SAAO;AACT;;;ACtXA,IAAM,qBAAqB;AAG3B,IAAM,mBAAmB;AAGzB,IAAM,iBAAiB;AAGvB,IAAM,uBAAuB;AAG7B,IAAM,qBAAqB;AAG3B,IAAM,oBAAoB;AAoG1B,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMA,kBAAiB,gBAAgB,KAAK,IAAI;AAWhD,SAAS,aAAa,KAA0C;AAC9D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,MAAO,IAAI,QAAmB;AAAA,IAC9B,cAAe,IAAI,gBAA2B;AAAA,IAC9C,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAY,IAAI;AAAA,IAChB,UAAW,IAAI,YAAuB;AAAA,IACtC,QAAQ,IAAI;AAAA,IACZ,cAAc,IAAI;AAAA,IAClB,YAAY,IAAI;AAAA,IAChB,OAAQ,IAAI,SAAoB;AAAA,IAChC,SAAU,IAAI,WAAsB;AAAA,IACpC,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,mBAAoB,IAAI,qBAAgC;AAAA,IACxD,uBAAwB,IAAI,yBAAoC;AAAA,IAChE,WAAW,IAAI;AAAA,IACf,cAAe,IAAI,gBAA2B;AAAA,IAC9C,UAAW,IAAI,YAAuB;AAAA,IACtC,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAYO,SAAS,cAAc,MAAiC;AAC7D,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,QAAQ;AAAA,IACb,KAAK,gBAAgB;AAAA,IACrB,KAAK,UAAU;AAAA,IACf,KAAK;AAAA,IACL,KAAK,YAAY;AAAA,IACjB,KAAK,UAAU;AAAA,IACf,KAAK,gBAAgB;AAAA,IACrB,KAAK,cAAc;AAAA,IACnB,KAAK,SAAS;AAAA,IACd,KAAK,WAAW;AAAA,IAChB,KAAK,mBAAmB;AAAA,IACxB,KAAK,qBAAqB;AAAA,IAC1B,KAAK,yBAAyB;AAAA,IAC9B,KAAK,aAAa;AAAA,IAClB,KAAK,gBAAgB;AAAA,IACrB,KAAK;AAAA,IACL,KAAK,cAAc,iBAAiB;AAAA,IACpC,KAAK,iBAAiB,SAAY,IAAI;AAAA,IACtC,KAAK,eAAe,SAAY,IAAI;AAAA,EACtC;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUA,eAAc,6BAA6B,EAAE,IAAI,KAAK,EAAE;AAAA,EAC/E;AAEA,UAAQ,YAAY,GAAG;AAEvB,SAAO;AACT;AAOO,SAAS,WAAW,IAA+B;AACxD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,aAAa,GAAG;AACzB;AAGA,SAAS,mBACP,SACsC;AACtC,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AAEA,MAAI,QAAQ,UAAU,QAAW;AAC/B,eAAW,KAAK,WAAW;AAC3B,WAAO,KAAK,QAAQ,KAAK;AAAA,EAC3B;AAEA,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AAEA,MAAI,QAAQ,SAAS,QAAW;AAC9B,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,IAAI;AAAA,EAC1B;AAEA,MAAI,QAAQ,OAAO,QAAW;AAC5B,eAAW,KAAK,QAAQ;AACxB,WAAO,KAAK,QAAQ,EAAE;AAAA,EACxB;AAEA,MAAI,QAAQ,WAAW,UAAa,QAAQ,OAAO,SAAS,GAAG;AAC7D,eAAW,KAAK,6BAA6B;AAC7C,UAAM,UAAU,IAAI,QAAQ,MAAM;AAClC,WAAO,KAAK,SAAS,OAAO;AAAA,EAC9B;AACA,MAAI,QAAQ,UAAU,QAAW;AAC/B,eAAW,KAAK,gBAAgB;AAChC,WAAO,KAAK,QAAQ,KAAK;AAAA,EAC3B;AAKA,MAAI,QAAQ,kBAAkB,SAAS,QAAQ,WAAW,QAAW;AACnE,eAAW,KAAK,oBAAoB;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,OAAO,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAAA,IACrE;AAAA,EACF;AACF;AAKO,SAAS,aACd,UAA+B,CAAC,GAClB;AACd,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,mBAAmB,OAAO;AACpD,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,QAAQ,OAAO,MAAM;AAE9B,SAAO,KAAK,IAAI,YAAY;AAC9B;AAKO,SAAS,cACd,UAAyD,CAAC,GAClD;AACR,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,mBAAmB,OAAO;AAEpD,QAAM,MAAM,GAAG;AAAA,IACb,0CAA0C,KAAK;AAAA,EACjD,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,IAAI;AACb;AAUO,SAAS,sBAAmC;AACjD,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI;AACN,SAAO,IAAI,IAAI,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AACtC;AAeO,SAAS,6BAA6B,IAAqB;AAChE,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI,EAAE;AACR,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AACR,MAAI,IAAK,SAAQ,YAAY,aAAa,GAAG,CAAC;AAE9C,SAAO;AACT;AAOO,SAAS,cACd,IACA,SACmB;AACnB,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,QAAM,WAAmC;AAAA,IACvC,OAAO;AAAA,IACP,MAAM;AAAA,IACN,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY;AAAA,IACZ,OAAO;AAAA,IACP,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,uBAAuB;AAAA,IACvB,WAAW;AAAA,IACX,cAAc;AAAA,EAChB;AAEA,aAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACpD,QAAI,OAAO,SAAS;AAClB,iBAAW,KAAK,GAAG,MAAM,MAAM;AAC/B,aAAO,KAAM,QAAoC,GAAG,KAAK,IAAI;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO,WAAW,EAAE;AAEjD,SAAO,KAAK,EAAE;AAEd,KAAG;AAAA,IACD;AAAA,WACO,WAAW,KAAK,IAAI,CAAC;AAAA;AAAA,EAE9B,EAAE,IAAI,GAAG,MAAM;AAEf,QAAM,UAAU,WAAW,EAAE;AAE7B,MAAI,QAAS,SAAQ,YAAY,OAAO;AAExC,SAAO;AACT;AAOO,SAAS,0BAA0B,IAAkB;AAC1D,QAAM,KAAK,YAAY;AACvB,KAAG;AAAA,IACD;AAAA,EACF,EAAE,IAAI,EAAE;AACV;AAOO,SAAS,aACd,IACA,SACmB;AACnB,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,kBAAkB,SAAS,EAAE;AAEnC,QAAM,SAAS,WAAW,EAAE;AAE5B,MAAI,OAAQ,SAAQ,YAAY,MAAM;AAEtC,SAAO;AACT;AAkDO,SAAS,iBAAiB,WAAkC;AACjE,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE,IAAI,WAAW,WAAW,WAAW,SAAS;AAEhD,SAAO;AACT;AAQO,SAAS,qBAAqB,WAAwC;AAC3E,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAkC;AAAA,IACtC,SAAS;AAAA,IACT,QAAQ,EAAE,SAAS,GAAG,QAAQ,GAAG,aAAa,GAAG,YAAY,GAAG,kBAAkB,EAAE;AAAA,IACpF,iBAAiB,CAAC;AAAA,IAClB,wBAAwB,CAAC;AAAA,EAC3B;AAGA,QAAM,SAAS,GAAG,QAAQ,sCAAsC,EAAE,IAAI,SAAS;AAC/E,MAAI,CAAC,OAAQ,QAAO;AAKpB,QAAM,WAAY,GAAG;AAAA,IACnB;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,WAAW,SAAS,EAAuB,IAAI,CAAC,MAAM,EAAE,EAAE;AAEhE,QAAM,kBAAmB,GAAG;AAAA,IAC1B;AAAA,EACF,EAAE,IAAI,SAAS,EAA8B,IAAI,CAAC,MAAM,EAAE,SAAS;AAkBnE,QAAM,SAAS,GAAG,YAAY,MAAM;AAClC,OAAG,QAAQ,6CAA6C,EAAE,IAAI,SAAS;AACvE,UAAM,cAAc,GAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AAC5F,OAAG,QAAQ,wCAAwC,EAAE,IAAI,SAAS;AAClE,OAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AACxE,UAAM,YAAY,GAAG;AAAA,MACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOF,EAAE,IAAI,WAAW,WAAW,SAAS;AACrC,UAAM,QAAQ,GAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AACtF,UAAM,SAAS,GAAG;AAAA,MAChB;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,WAAW,SAAS;AAC1B,UAAM,UAAU,GAAG,QAAQ,iDAAiD,EAAE,IAAI,SAAS;AAC3F,UAAM,UAAU,GAAG,QAAQ,mCAAmC,EAAE,IAAI,SAAS;AAE7E,WAAO;AAAA,MACL,SAAS,QAAQ,UAAU;AAAA,MAC3B,QAAQ;AAAA,QACN,SAAS,QAAQ;AAAA,QACjB,QAAQ,OAAO;AAAA,QACf,aAAa,YAAY;AAAA,QACzB,YAAY,MAAM;AAAA,QAClB,kBAAkB,UAAU;AAAA,MAC9B;AAAA,IACF;AAAA,EACF,CAAC,EAAE;AAEH,SAAO;AAAA,IACL,GAAG;AAAA,IACH,iBAAiB;AAAA,IACjB,wBAAwB;AAAA,EAC1B;AACF;","names":["SELECT_COLUMNS"]}
@@ -36,6 +36,7 @@ var CONTENT_HASH_ALGORITHM = "sha256";
36
36
  var PROMPT_PREVIEW_CHARS = 300;
37
37
  var CONTENT_SNIPPET_CHARS = 120;
38
38
  var TOOL_OUTPUT_PREVIEW_CHARS = 200;
39
+ var SESSION_SUMMARY_PREVIEW_CHARS = 300;
39
40
  var SEARCH_PREVIEW_CHARS = 300;
40
41
  var LOG_PROMPT_PREVIEW_CHARS = 50;
41
42
  var LOG_MESSAGE_PREVIEW_CHARS = 80;
@@ -125,6 +126,7 @@ export {
125
126
  PROMPT_PREVIEW_CHARS,
126
127
  CONTENT_SNIPPET_CHARS,
127
128
  TOOL_OUTPUT_PREVIEW_CHARS,
129
+ SESSION_SUMMARY_PREVIEW_CHARS,
128
130
  SEARCH_PREVIEW_CHARS,
129
131
  LOG_PROMPT_PREVIEW_CHARS,
130
132
  LOG_MESSAGE_PREVIEW_CHARS,
@@ -189,4 +191,4 @@ export {
189
191
  WRANGLER_COMMAND_TIMEOUT_MS,
190
192
  RESTART_RESPONSE_FLUSH_MS
191
193
  };
192
- //# sourceMappingURL=chunk-I54KLC6H.js.map
194
+ //# sourceMappingURL=chunk-FLLBJLHM.js.map