@goondocks/myco 0.12.10 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -0
- package/dist/{agent-run-GUHXRNZB.js → agent-run-3WLMSLMJ.js} +8 -7
- package/dist/{agent-run-GUHXRNZB.js.map → agent-run-3WLMSLMJ.js.map} +1 -1
- package/dist/{agent-tasks-GWZI5WSB.js → agent-tasks-BQE2GEVS.js} +8 -7
- package/dist/{agent-tasks-GWZI5WSB.js.map → agent-tasks-BQE2GEVS.js.map} +1 -1
- package/dist/{chunk-PW5QVY44.js → chunk-4VF6KQ2Z.js} +371 -6
- package/dist/chunk-4VF6KQ2Z.js.map +1 -0
- package/dist/{chunk-QL2RBFIC.js → chunk-5YQ6VOFZ.js} +2 -2
- package/dist/{chunk-MCARJFBA.js → chunk-745SWTQX.js} +4 -4
- package/dist/{chunk-D4M2AV65.js → chunk-DTE3SHYK.js} +46 -3
- package/dist/chunk-DTE3SHYK.js.map +1 -0
- package/dist/{chunk-HLGPGHT7.js → chunk-EVPG26CR.js} +3 -3
- package/dist/{chunk-YRUJ5KGV.js → chunk-GDCSPMH4.js} +3 -3
- package/dist/{chunk-LUQBT2Y4.js → chunk-HHZ3RTEI.js} +2 -2
- package/dist/{chunk-TCGOSLW6.js → chunk-JJXVDCEX.js} +613 -98
- package/dist/chunk-JJXVDCEX.js.map +1 -0
- package/dist/{chunk-D4ESHOOJ.js → chunk-KNTJOMWY.js} +3 -3
- package/dist/{chunk-JHLALJPB.js → chunk-LD6U3L6O.js} +8 -8
- package/dist/{chunk-7NBDELZB.js → chunk-NVCGF2DS.js} +26 -8
- package/dist/{chunk-7NBDELZB.js.map → chunk-NVCGF2DS.js.map} +1 -1
- package/dist/{chunk-SW62AX75.js → chunk-OKCSSDFC.js} +2 -2
- package/dist/{chunk-RR75ZKEV.js → chunk-OQVKLTQY.js} +4 -4
- package/dist/{chunk-OAGY5APE.js → chunk-PSYLKCWQ.js} +83 -7
- package/dist/chunk-PSYLKCWQ.js.map +1 -0
- package/dist/{chunk-Q2AYS2QE.js → chunk-PX5KIOKY.js} +5 -7
- package/dist/chunk-PX5KIOKY.js.map +1 -0
- package/dist/chunk-QLCD77AN.js +93 -0
- package/dist/chunk-QLCD77AN.js.map +1 -0
- package/dist/{chunk-J3L2RTYK.js → chunk-RBFECYNA.js} +2 -2
- package/dist/{chunk-4VSNNMEU.js → chunk-S66YG6QK.js} +26 -3
- package/dist/{chunk-4VSNNMEU.js.map → chunk-S66YG6QK.js.map} +1 -1
- package/dist/chunk-SBVDG5JP.js +112 -0
- package/dist/chunk-SBVDG5JP.js.map +1 -0
- package/dist/{chunk-M5XWW7UI.js → chunk-SODRR3HE.js} +8 -1
- package/dist/chunk-SODRR3HE.js.map +1 -0
- package/dist/{chunk-SFC4GXEN.js → chunk-TNCBMGWB.js} +39 -104
- package/dist/chunk-TNCBMGWB.js.map +1 -0
- package/dist/{chunk-PIRWYDOH.js → chunk-TRA3R4EC.js} +7 -1
- package/dist/chunk-TRA3R4EC.js.map +1 -0
- package/dist/{chunk-LGPBVBFY.js → chunk-TVV6PZOC.js} +5 -7
- package/dist/chunk-TVV6PZOC.js.map +1 -0
- package/dist/{chunk-SVQAPEYH.js → chunk-UWXJCLCK.js} +22 -5
- package/dist/chunk-UWXJCLCK.js.map +1 -0
- package/dist/{chunk-JROOQQH6.js → chunk-X34OFKYU.js} +12 -7
- package/dist/chunk-X34OFKYU.js.map +1 -0
- package/dist/{chunk-FYGFMIS6.js → chunk-ZKXW46HZ.js} +2 -2
- package/dist/{cli-CFOIDXOY.js → cli-SKCINMJI.js} +41 -40
- package/dist/{cli-CFOIDXOY.js.map → cli-SKCINMJI.js.map} +1 -1
- package/dist/{client-ZBCGODTS.js → client-KZGZHHXT.js} +5 -5
- package/dist/{config-6RQ7FAEV.js → config-H657SF6B.js} +5 -3
- package/dist/{config-6RQ7FAEV.js.map → config-H657SF6B.js.map} +1 -1
- package/dist/{detect-providers-JFE3QLJI.js → detect-providers-PAVE2X6O.js} +4 -4
- package/dist/{doctor-W3I7RVU4.js → doctor-6FKSHJRU.js} +13 -11
- package/dist/{doctor-W3I7RVU4.js.map → doctor-6FKSHJRU.js.map} +1 -1
- package/dist/{executor-LKDWMGC7.js → executor-W5MKZH7B.js} +407 -46
- package/dist/executor-W5MKZH7B.js.map +1 -0
- package/dist/{init-M3HYJGHE.js → init-5QHCXBLF.js} +16 -15
- package/dist/{init-M3HYJGHE.js.map → init-5QHCXBLF.js.map} +1 -1
- package/dist/{init-wizard-C4WQA47U.js → init-wizard-HEY4HMG3.js} +12 -12
- package/dist/installer-25TSX4SR.js +13 -0
- package/dist/{llm-O46QYWEM.js → llm-T3QVHC3Y.js} +7 -7
- package/dist/{loader-4FMGOVWF.js → loader-JQLO6K44.js} +4 -2
- package/dist/{loader-BQ4X4K3F.js → loader-WQKVWL5D.js} +4 -4
- package/dist/{main-CMWNMCW2.js → main-IZ277SHB.js} +689 -124
- package/dist/main-IZ277SHB.js.map +1 -0
- package/dist/{open-4N7T37XV.js → open-S7YUWON4.js} +8 -7
- package/dist/{open-4N7T37XV.js.map → open-S7YUWON4.js.map} +1 -1
- package/dist/{openai-embeddings-HWAKOGUM.js → openai-embeddings-5T5ZP7LO.js} +4 -4
- package/dist/{openrouter-GXZK7JXR.js → openrouter-RD2COFC7.js} +4 -4
- package/dist/{post-compact-BPICHUPV.js → post-compact-EFKFT7TM.js} +7 -7
- package/dist/{post-tool-use-OHJ2EH7I.js → post-tool-use-624YC6ZH.js} +7 -7
- package/dist/{post-tool-use-failure-CBPY2TSN.js → post-tool-use-failure-QCHZAWQH.js} +7 -7
- package/dist/{pre-compact-ULAA4XIB.js → pre-compact-7DWH2EM3.js} +7 -7
- package/dist/{provider-check-CKZW3GQX.js → provider-check-QN7OGXZA.js} +4 -4
- package/dist/{registry-ZHUVXGPO.js → registry-2XQMCPA6.js} +5 -5
- package/dist/{remove-52PTVOCJ.js → remove-ESVIET5C.js} +10 -8
- package/dist/{remove-52PTVOCJ.js.map → remove-ESVIET5C.js.map} +1 -1
- package/dist/{resolution-events-WZHPQQMN.js → resolution-events-5EVUEWHS.js} +4 -4
- package/dist/{restart-O37BUPLH.js → restart-AZHV6OKM.js} +9 -8
- package/dist/{restart-O37BUPLH.js.map → restart-AZHV6OKM.js.map} +1 -1
- package/dist/{search-52YK2ZWU.js → search-JS4HXYGS.js} +9 -8
- package/dist/{search-52YK2ZWU.js.map → search-JS4HXYGS.js.map} +1 -1
- package/dist/{server-7OKRAJCM.js → server-KT5GW333.js} +115 -14
- package/dist/server-KT5GW333.js.map +1 -0
- package/dist/{session-57IAZYRK.js → session-JSI67FEM.js} +10 -9
- package/dist/{session-57IAZYRK.js.map → session-JSI67FEM.js.map} +1 -1
- package/dist/{session-end-WRKDJEWM.js → session-end-4CM462MC.js} +6 -6
- package/dist/{session-start-7VWGEVOX.js → session-start-ZGF7F6DE.js} +12 -12
- package/dist/{setup-llm-IDQPX22O.js → setup-llm-S2UYJYIS.js} +10 -9
- package/dist/{setup-llm-IDQPX22O.js.map → setup-llm-S2UYJYIS.js.map} +1 -1
- package/dist/src/agent/definitions/agent.yaml +2 -0
- package/dist/src/agent/definitions/tasks/digest-only.yaml +1 -0
- package/dist/src/agent/definitions/tasks/extract-only.yaml +1 -0
- package/dist/src/agent/definitions/tasks/full-intelligence.yaml +8 -0
- package/dist/src/agent/definitions/tasks/graph-maintenance.yaml +1 -0
- package/dist/src/agent/definitions/tasks/review-session.yaml +1 -0
- package/dist/src/agent/definitions/tasks/skill-evolve.yaml +155 -0
- package/dist/src/agent/definitions/tasks/skill-generate.yaml +210 -0
- package/dist/src/agent/definitions/tasks/skill-survey.yaml +149 -0
- package/dist/src/agent/definitions/tasks/supersession-sweep.yaml +1 -0
- package/dist/src/agent/definitions/tasks/title-summary.yaml +1 -0
- package/dist/src/agent/prompts/agent.md +29 -0
- package/dist/src/cli.js +1 -1
- package/dist/src/daemon/main.js +1 -1
- package/dist/src/hooks/post-tool-use.js +1 -1
- package/dist/src/hooks/session-end.js +1 -1
- package/dist/src/hooks/session-start.js +1 -1
- package/dist/src/hooks/stop.js +1 -1
- package/dist/src/hooks/user-prompt-submit.js +1 -1
- package/dist/src/mcp/server.js +1 -1
- package/dist/src/worker/src/index.ts +3 -0
- package/dist/src/worker/src/schema.ts +56 -0
- package/dist/{stats-D7U5HQ3L.js → stats-D2FM6ZXO.js} +10 -9
- package/dist/{stats-D7U5HQ3L.js.map → stats-D2FM6ZXO.js.map} +1 -1
- package/dist/{stop-VJU4AAOQ.js → stop-DQEKVNST.js} +6 -6
- package/dist/{stop-failure-ILPHO26U.js → stop-failure-EHH7AN5E.js} +7 -7
- package/dist/{subagent-start-77MY4UMP.js → subagent-start-6R52PAFA.js} +7 -7
- package/dist/{subagent-stop-DABERMXZ.js → subagent-stop-CLDFJKYQ.js} +7 -7
- package/dist/{task-completed-TBWBOAJ6.js → task-completed-V47JA3OV.js} +7 -7
- package/dist/{team-K6H4B3ZD.js → team-SJPDXELY.js} +45 -19
- package/dist/team-SJPDXELY.js.map +1 -0
- package/dist/turns-3ZQAF6HF.js +16 -0
- package/dist/turns-3ZQAF6HF.js.map +1 -0
- package/dist/ui/assets/index-BmsHIwjl.css +1 -0
- package/dist/ui/assets/index-Cn6cQwJy.js +842 -0
- package/dist/ui/index.html +2 -2
- package/dist/{update-GW774ZMW.js → update-ZNIYDQHJ.js} +8 -7
- package/dist/{update-GW774ZMW.js.map → update-ZNIYDQHJ.js.map} +1 -1
- package/dist/{user-prompt-submit-C47Y5Y5I.js → user-prompt-submit-6TX6VECI.js} +6 -6
- package/dist/{verify-MQAANTUR.js → verify-JHIMXTY5.js} +8 -8
- package/dist/{version-42DQW43N.js → version-UMEN7OJU.js} +2 -2
- package/dist/version-UMEN7OJU.js.map +1 -0
- package/package.json +6 -6
- package/dist/chunk-D4M2AV65.js.map +0 -1
- package/dist/chunk-JROOQQH6.js.map +0 -1
- package/dist/chunk-LGPBVBFY.js.map +0 -1
- package/dist/chunk-M5XWW7UI.js.map +0 -1
- package/dist/chunk-OAGY5APE.js.map +0 -1
- package/dist/chunk-PIRWYDOH.js.map +0 -1
- package/dist/chunk-PW5QVY44.js.map +0 -1
- package/dist/chunk-Q2AYS2QE.js.map +0 -1
- package/dist/chunk-SFC4GXEN.js.map +0 -1
- package/dist/chunk-SVQAPEYH.js.map +0 -1
- package/dist/chunk-TCGOSLW6.js.map +0 -1
- package/dist/executor-LKDWMGC7.js.map +0 -1
- package/dist/main-CMWNMCW2.js.map +0 -1
- package/dist/server-7OKRAJCM.js.map +0 -1
- package/dist/team-K6H4B3ZD.js.map +0 -1
- package/dist/ui/assets/index-BGbil7f1.css +0 -1
- package/dist/ui/assets/index-ZSGlKT25.js +0 -804
- /package/dist/{chunk-QL2RBFIC.js.map → chunk-5YQ6VOFZ.js.map} +0 -0
- /package/dist/{chunk-MCARJFBA.js.map → chunk-745SWTQX.js.map} +0 -0
- /package/dist/{chunk-HLGPGHT7.js.map → chunk-EVPG26CR.js.map} +0 -0
- /package/dist/{chunk-YRUJ5KGV.js.map → chunk-GDCSPMH4.js.map} +0 -0
- /package/dist/{chunk-LUQBT2Y4.js.map → chunk-HHZ3RTEI.js.map} +0 -0
- /package/dist/{chunk-D4ESHOOJ.js.map → chunk-KNTJOMWY.js.map} +0 -0
- /package/dist/{chunk-JHLALJPB.js.map → chunk-LD6U3L6O.js.map} +0 -0
- /package/dist/{chunk-SW62AX75.js.map → chunk-OKCSSDFC.js.map} +0 -0
- /package/dist/{chunk-RR75ZKEV.js.map → chunk-OQVKLTQY.js.map} +0 -0
- /package/dist/{chunk-J3L2RTYK.js.map → chunk-RBFECYNA.js.map} +0 -0
- /package/dist/{chunk-FYGFMIS6.js.map → chunk-ZKXW46HZ.js.map} +0 -0
- /package/dist/{client-ZBCGODTS.js.map → client-KZGZHHXT.js.map} +0 -0
- /package/dist/{detect-providers-JFE3QLJI.js.map → detect-providers-PAVE2X6O.js.map} +0 -0
- /package/dist/{init-wizard-C4WQA47U.js.map → init-wizard-HEY4HMG3.js.map} +0 -0
- /package/dist/{llm-O46QYWEM.js.map → installer-25TSX4SR.js.map} +0 -0
- /package/dist/{loader-4FMGOVWF.js.map → llm-T3QVHC3Y.js.map} +0 -0
- /package/dist/{loader-BQ4X4K3F.js.map → loader-JQLO6K44.js.map} +0 -0
- /package/dist/{openai-embeddings-HWAKOGUM.js.map → loader-WQKVWL5D.js.map} +0 -0
- /package/dist/{openrouter-GXZK7JXR.js.map → openai-embeddings-5T5ZP7LO.js.map} +0 -0
- /package/dist/{provider-check-CKZW3GQX.js.map → openrouter-RD2COFC7.js.map} +0 -0
- /package/dist/{post-compact-BPICHUPV.js.map → post-compact-EFKFT7TM.js.map} +0 -0
- /package/dist/{post-tool-use-OHJ2EH7I.js.map → post-tool-use-624YC6ZH.js.map} +0 -0
- /package/dist/{post-tool-use-failure-CBPY2TSN.js.map → post-tool-use-failure-QCHZAWQH.js.map} +0 -0
- /package/dist/{pre-compact-ULAA4XIB.js.map → pre-compact-7DWH2EM3.js.map} +0 -0
- /package/dist/{registry-ZHUVXGPO.js.map → provider-check-QN7OGXZA.js.map} +0 -0
- /package/dist/{resolution-events-WZHPQQMN.js.map → registry-2XQMCPA6.js.map} +0 -0
- /package/dist/{version-42DQW43N.js.map → resolution-events-5EVUEWHS.js.map} +0 -0
- /package/dist/{session-end-WRKDJEWM.js.map → session-end-4CM462MC.js.map} +0 -0
- /package/dist/{session-start-7VWGEVOX.js.map → session-start-ZGF7F6DE.js.map} +0 -0
- /package/dist/{stop-VJU4AAOQ.js.map → stop-DQEKVNST.js.map} +0 -0
- /package/dist/{stop-failure-ILPHO26U.js.map → stop-failure-EHH7AN5E.js.map} +0 -0
- /package/dist/{subagent-start-77MY4UMP.js.map → subagent-start-6R52PAFA.js.map} +0 -0
- /package/dist/{subagent-stop-DABERMXZ.js.map → subagent-stop-CLDFJKYQ.js.map} +0 -0
- /package/dist/{task-completed-TBWBOAJ6.js.map → task-completed-V47JA3OV.js.map} +0 -0
- /package/dist/{user-prompt-submit-C47Y5Y5I.js.map → user-prompt-submit-6TX6VECI.js.map} +0 -0
- /package/dist/{verify-MQAANTUR.js.map → verify-JHIMXTY5.js.map} +0 -0
package/dist/ui/index.html
CHANGED
|
@@ -5,8 +5,8 @@
|
|
|
5
5
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
|
6
6
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
|
7
7
|
<title>Myco</title>
|
|
8
|
-
<script type="module" crossorigin src="/assets/index-
|
|
9
|
-
<link rel="stylesheet" crossorigin href="/assets/index-
|
|
8
|
+
<script type="module" crossorigin src="/assets/index-Cn6cQwJy.js"></script>
|
|
9
|
+
<link rel="stylesheet" crossorigin href="/assets/index-BmsHIwjl.css">
|
|
10
10
|
</head>
|
|
11
11
|
<body>
|
|
12
12
|
<div id="root"></div>
|
|
@@ -2,19 +2,20 @@ import { createRequire as __cr } from 'node:module'; const require = __cr(import
|
|
|
2
2
|
import {
|
|
3
3
|
VAULT_GITIGNORE,
|
|
4
4
|
registerSymbionts
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-SBVDG5JP.js";
|
|
6
6
|
import "./chunk-SAKJMNSR.js";
|
|
7
|
-
import "./chunk-
|
|
7
|
+
import "./chunk-TNCBMGWB.js";
|
|
8
|
+
import "./chunk-HHZ3RTEI.js";
|
|
8
9
|
import "./chunk-MYX5NCRH.js";
|
|
9
|
-
import "./chunk-
|
|
10
|
-
import "./chunk-
|
|
11
|
-
import "./chunk-S6I62FAH.js";
|
|
12
|
-
import "./chunk-FYGFMIS6.js";
|
|
10
|
+
import "./chunk-UWXJCLCK.js";
|
|
11
|
+
import "./chunk-ZKXW46HZ.js";
|
|
13
12
|
import {
|
|
14
13
|
loadManifests,
|
|
15
14
|
resolvePackageRoot
|
|
16
15
|
} from "./chunk-QFMBZ72S.js";
|
|
17
16
|
import "./chunk-LPUQPDC2.js";
|
|
17
|
+
import "./chunk-TRA3R4EC.js";
|
|
18
|
+
import "./chunk-S6I62FAH.js";
|
|
18
19
|
import "./chunk-D7TYRPRM.js";
|
|
19
20
|
import "./chunk-E4VLWIJC.js";
|
|
20
21
|
import "./chunk-KH64DHOY.js";
|
|
@@ -72,4 +73,4 @@ async function run(args) {
|
|
|
72
73
|
export {
|
|
73
74
|
run
|
|
74
75
|
};
|
|
75
|
-
//# sourceMappingURL=update-
|
|
76
|
+
//# sourceMappingURL=update-ZNIYDQHJ.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/cli/update.ts"],"sourcesContent":["import { resolveVaultDir } from '../vault/resolve.js';\nimport { VAULT_GITIGNORE, registerSymbionts } from './shared.js';\nimport { loadManifests, resolvePackageRoot } from '../symbionts/detect.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nexport async function run(args: string[]): Promise<void> {\n // Support --project <path> for detached update scripts\n let projectRoot: string | undefined;\n const projectIdx = args.indexOf('--project');\n if (projectIdx !== -1 && args[projectIdx + 1]) {\n projectRoot = args[projectIdx + 1];\n }\n\n const vaultDir = projectRoot\n ? path.join(projectRoot, '.myco')\n : resolveVaultDir();\n if (!fs.existsSync(path.join(vaultDir, 'myco.yaml'))) {\n console.error(`No myco.yaml found in ${vaultDir}. Run 'myco init' first.`);\n process.exit(1);\n }\n\n console.log(`Updating Myco vault at ${vaultDir}\\n`);\n\n let updatedCount = 0;\n\n // --- Update .gitignore to match current template ---\n\n const gitignorePath = path.join(vaultDir, '.gitignore');\n const currentGitignore = fs.existsSync(gitignorePath)\n ? fs.readFileSync(gitignorePath, 'utf-8')\n : '';\n\n if (currentGitignore !== VAULT_GITIGNORE) {\n fs.writeFileSync(gitignorePath, VAULT_GITIGNORE, 'utf-8');\n console.log(' \\u2713 Updated .gitignore');\n updatedCount++;\n } else {\n console.log(' \\u2013 .gitignore is current');\n }\n\n // --- Update symbiont registration (only agents already configured) ---\n\n const resolvedProjectRoot = projectRoot ?? path.dirname(vaultDir);\n const allManifests = loadManifests();\n const pkgRoot = resolvePackageRoot();\n // Only update agents whose config directory already exists in the project\n const configured = allManifests.filter((m) =>\n fs.existsSync(path.join(resolvedProjectRoot, m.configDir)),\n );\n\n if (configured.length > 0) {\n const registered = registerSymbionts(configured, resolvedProjectRoot, pkgRoot, 'Updated');\n updatedCount += registered;\n } else {\n console.log(' \\u2013 No configured agents found');\n }\n\n // --- Summary ---\n\n console.log('');\n if (updatedCount > 0) {\n console.log(`Updated ${updatedCount} item${updatedCount > 1 ? 's' : ''}.`);\n } else {\n console.log('Everything is up to date.');\n }\n console.log('Run `myco doctor` to verify setup health.');\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"sources":["../src/cli/update.ts"],"sourcesContent":["import { resolveVaultDir } from '../vault/resolve.js';\nimport { VAULT_GITIGNORE, registerSymbionts } from './shared.js';\nimport { loadManifests, resolvePackageRoot } from '../symbionts/detect.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nexport async function run(args: string[]): Promise<void> {\n // Support --project <path> for detached update scripts\n let projectRoot: string | undefined;\n const projectIdx = args.indexOf('--project');\n if (projectIdx !== -1 && args[projectIdx + 1]) {\n projectRoot = args[projectIdx + 1];\n }\n\n const vaultDir = projectRoot\n ? path.join(projectRoot, '.myco')\n : resolveVaultDir();\n if (!fs.existsSync(path.join(vaultDir, 'myco.yaml'))) {\n console.error(`No myco.yaml found in ${vaultDir}. Run 'myco init' first.`);\n process.exit(1);\n }\n\n console.log(`Updating Myco vault at ${vaultDir}\\n`);\n\n let updatedCount = 0;\n\n // --- Update .gitignore to match current template ---\n\n const gitignorePath = path.join(vaultDir, '.gitignore');\n const currentGitignore = fs.existsSync(gitignorePath)\n ? fs.readFileSync(gitignorePath, 'utf-8')\n : '';\n\n if (currentGitignore !== VAULT_GITIGNORE) {\n fs.writeFileSync(gitignorePath, VAULT_GITIGNORE, 'utf-8');\n console.log(' \\u2713 Updated .gitignore');\n updatedCount++;\n } else {\n console.log(' \\u2013 .gitignore is current');\n }\n\n // --- Update symbiont registration (only agents already configured) ---\n\n const resolvedProjectRoot = projectRoot ?? path.dirname(vaultDir);\n const allManifests = loadManifests();\n const pkgRoot = resolvePackageRoot();\n // Only update agents whose config directory already exists in the project\n const configured = allManifests.filter((m) =>\n fs.existsSync(path.join(resolvedProjectRoot, m.configDir)),\n );\n\n if (configured.length > 0) {\n const registered = registerSymbionts(configured, resolvedProjectRoot, pkgRoot, 'Updated');\n updatedCount += registered;\n } else {\n console.log(' \\u2013 No configured agents found');\n }\n\n // --- Summary ---\n\n console.log('');\n if (updatedCount > 0) {\n console.log(`Updated ${updatedCount} item${updatedCount > 1 ? 's' : ''}.`);\n } else {\n console.log('Everything is up to date.');\n }\n console.log('Run `myco doctor` to verify setup health.');\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;AAGA,OAAO,QAAQ;AACf,OAAO,UAAU;AAEjB,eAAsB,IAAI,MAA+B;AAEvD,MAAI;AACJ,QAAM,aAAa,KAAK,QAAQ,WAAW;AAC3C,MAAI,eAAe,MAAM,KAAK,aAAa,CAAC,GAAG;AAC7C,kBAAc,KAAK,aAAa,CAAC;AAAA,EACnC;AAEA,QAAM,WAAW,cACb,KAAK,KAAK,aAAa,OAAO,IAC9B,gBAAgB;AACpB,MAAI,CAAC,GAAG,WAAW,KAAK,KAAK,UAAU,WAAW,CAAC,GAAG;AACpD,YAAQ,MAAM,yBAAyB,QAAQ,0BAA0B;AACzE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI,0BAA0B,QAAQ;AAAA,CAAI;AAElD,MAAI,eAAe;AAInB,QAAM,gBAAgB,KAAK,KAAK,UAAU,YAAY;AACtD,QAAM,mBAAmB,GAAG,WAAW,aAAa,IAChD,GAAG,aAAa,eAAe,OAAO,IACtC;AAEJ,MAAI,qBAAqB,iBAAiB;AACxC,OAAG,cAAc,eAAe,iBAAiB,OAAO;AACxD,YAAQ,IAAI,6BAA6B;AACzC;AAAA,EACF,OAAO;AACL,YAAQ,IAAI,gCAAgC;AAAA,EAC9C;AAIA,QAAM,sBAAsB,eAAe,KAAK,QAAQ,QAAQ;AAChE,QAAM,eAAe,cAAc;AACnC,QAAM,UAAU,mBAAmB;AAEnC,QAAM,aAAa,aAAa;AAAA,IAAO,CAAC,MACtC,GAAG,WAAW,KAAK,KAAK,qBAAqB,EAAE,SAAS,CAAC;AAAA,EAC3D;AAEA,MAAI,WAAW,SAAS,GAAG;AACzB,UAAM,aAAa,kBAAkB,YAAY,qBAAqB,SAAS,SAAS;AACxF,oBAAgB;AAAA,EAClB,OAAO;AACL,YAAQ,IAAI,qCAAqC;AAAA,EACnD;AAIA,UAAQ,IAAI,EAAE;AACd,MAAI,eAAe,GAAG;AACpB,YAAQ,IAAI,WAAW,YAAY,QAAQ,eAAe,IAAI,MAAM,EAAE,GAAG;AAAA,EAC3E,OAAO;AACL,YAAQ,IAAI,2BAA2B;AAAA,EACzC;AACA,UAAQ,IAAI,2CAA2C;AACzD;","names":[]}
|
|
@@ -5,15 +5,15 @@ import {
|
|
|
5
5
|
import {
|
|
6
6
|
normalizeHookInput,
|
|
7
7
|
readStdin
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-OQVKLTQY.js";
|
|
9
9
|
import {
|
|
10
10
|
DaemonClient
|
|
11
|
-
} from "./chunk-
|
|
12
|
-
import "./chunk-
|
|
13
|
-
import "./chunk-S6I62FAH.js";
|
|
14
|
-
import "./chunk-FYGFMIS6.js";
|
|
11
|
+
} from "./chunk-UWXJCLCK.js";
|
|
12
|
+
import "./chunk-ZKXW46HZ.js";
|
|
15
13
|
import "./chunk-QFMBZ72S.js";
|
|
16
14
|
import "./chunk-LPUQPDC2.js";
|
|
15
|
+
import "./chunk-TRA3R4EC.js";
|
|
16
|
+
import "./chunk-S6I62FAH.js";
|
|
17
17
|
import "./chunk-D7TYRPRM.js";
|
|
18
18
|
import "./chunk-E4VLWIJC.js";
|
|
19
19
|
import "./chunk-KH64DHOY.js";
|
|
@@ -63,4 +63,4 @@ ${sessionLine}` : sessionLine;
|
|
|
63
63
|
export {
|
|
64
64
|
main
|
|
65
65
|
};
|
|
66
|
-
//# sourceMappingURL=user-prompt-submit-
|
|
66
|
+
//# sourceMappingURL=user-prompt-submit-6TX6VECI.js.map
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
createEmbeddingProvider
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-
|
|
6
|
-
import "./chunk-
|
|
7
|
-
import "./chunk-
|
|
8
|
-
import "./chunk-
|
|
4
|
+
} from "./chunk-LD6U3L6O.js";
|
|
5
|
+
import "./chunk-HHZ3RTEI.js";
|
|
6
|
+
import "./chunk-RBFECYNA.js";
|
|
7
|
+
import "./chunk-OKCSSDFC.js";
|
|
8
|
+
import "./chunk-5YQ6VOFZ.js";
|
|
9
9
|
import {
|
|
10
10
|
loadConfig
|
|
11
|
-
} from "./chunk-
|
|
12
|
-
import "./chunk-
|
|
11
|
+
} from "./chunk-PSYLKCWQ.js";
|
|
12
|
+
import "./chunk-TRA3R4EC.js";
|
|
13
13
|
import "./chunk-S6I62FAH.js";
|
|
14
14
|
import "./chunk-D7TYRPRM.js";
|
|
15
15
|
import "./chunk-E4VLWIJC.js";
|
|
@@ -42,4 +42,4 @@ async function run(_args, vaultDir) {
|
|
|
42
42
|
export {
|
|
43
43
|
run
|
|
44
44
|
};
|
|
45
|
-
//# sourceMappingURL=verify-
|
|
45
|
+
//# sourceMappingURL=verify-JHIMXTY5.js.map
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
getPluginVersion
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-ZKXW46HZ.js";
|
|
5
5
|
import "./chunk-LPUQPDC2.js";
|
|
6
6
|
import "./chunk-PZUWP5VK.js";
|
|
7
7
|
export {
|
|
8
8
|
getPluginVersion
|
|
9
9
|
};
|
|
10
|
-
//# sourceMappingURL=version-
|
|
10
|
+
//# sourceMappingURL=version-UMEN7OJU.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@goondocks/myco",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.14.0",
|
|
4
4
|
"description": "Collective agent intelligence — Claude Code plugin",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -34,15 +34,15 @@
|
|
|
34
34
|
"url": "https://github.com/goondocks-co/myco.git"
|
|
35
35
|
},
|
|
36
36
|
"dependencies": {
|
|
37
|
-
"@anthropic-ai/claude-agent-sdk": "^0.2.
|
|
37
|
+
"@anthropic-ai/claude-agent-sdk": "^0.2.87",
|
|
38
38
|
"@anthropic-ai/sdk": "^0.80.0",
|
|
39
39
|
"@inquirer/prompts": "^8.3.2",
|
|
40
|
-
"@modelcontextprotocol/sdk": "^1.
|
|
40
|
+
"@modelcontextprotocol/sdk": "^1.28.0",
|
|
41
41
|
"better-sqlite3": "^12.8.0",
|
|
42
42
|
"chokidar": "^5.0.0",
|
|
43
43
|
"gray-matter": "^4.0.3",
|
|
44
44
|
"semver": "^7.7.4",
|
|
45
|
-
"sqlite-vec": "^0.1.
|
|
45
|
+
"sqlite-vec": "^0.1.8",
|
|
46
46
|
"yaml": "^2.4.0",
|
|
47
47
|
"zod": "^4.3.6"
|
|
48
48
|
},
|
|
@@ -52,7 +52,7 @@
|
|
|
52
52
|
"@types/semver": "^7.7.1",
|
|
53
53
|
"tsup": "^8.0.0",
|
|
54
54
|
"tsx": "^4.0.0",
|
|
55
|
-
"typescript": "^
|
|
56
|
-
"vitest": "^4.1.
|
|
55
|
+
"typescript": "^6.0.2",
|
|
56
|
+
"vitest": "^4.1.2"
|
|
57
57
|
}
|
|
58
58
|
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db/queries/search.ts"],"sourcesContent":["/**\n * Full-text search using SQLite FTS5.\n *\n * Searches prompt_batches and activities via their FTS5 virtual tables.\n * Semantic search (vector similarity) is handled by the external VectorStore —\n * this module covers text-based retrieval only.\n *\n * All queries use parameterized placeholders throughout.\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport {\n SEARCH_RESULTS_DEFAULT_LIMIT,\n SEARCH_PREVIEW_CHARS,\n} from '@myco/constants.js';\nimport type { VectorSearchResult } from '@myco/daemon/embedding/types.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** All result types that can appear in search results. */\nexport type SearchResultType =\n | 'session'\n | 'spore'\n | 'plan'\n | 'artifact'\n | 'prompt_batch'\n | 'activity';\n\n/** A single result returned from full-text or semantic search. */\nexport interface SearchResult {\n id: string;\n type: SearchResultType;\n title: string;\n preview: string;\n score: number;\n session_id?: string;\n}\n\n/** Options for fullTextSearch. */\nexport interface SearchOptions {\n /** Restrict results to a single type. */\n type?: string;\n /** Maximum number of results to return (default: SEARCH_RESULTS_DEFAULT_LIMIT). */\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Full-text search across capture tables using SQLite FTS5.\n *\n * Searches prompt_batches (indexed on user_prompt) and activities (indexed\n * on tool_name, tool_input, file_path). The raw query string is passed\n * directly to FTS5 MATCH — callers should sanitize if needed.\n *\n * FTS5 `rank` values are negative (lower = better match). This function\n * converts them to positive scores via `Math.abs()` so higher = better\n * in the returned results.\n *\n * When `options.type` is specified, only the matching table branch is queried.\n *\n * @param query — search string (FTS5 MATCH syntax)\n * @param options — optional type filter and result limit\n * @returns SearchResult[] ordered by score DESC\n */\nexport function fullTextSearch(\n query: string,\n options: SearchOptions = {},\n): SearchResult[] {\n const db = getDatabase();\n const limit = options.limit ?? SEARCH_RESULTS_DEFAULT_LIMIT;\n const typeFilter = options.type;\n\n const results: SearchResult[] = [];\n\n // -- prompt_batches branch ------------------------------------------------\n if (typeFilter === undefined || typeFilter === 'prompt_batch') {\n const batchRows = db.prepare(\n `SELECT pb.id, pb.prompt_number, pb.session_id,\n substr(COALESCE(pb.user_prompt, ''), 1, ?) AS preview,\n fts.rank\n FROM prompt_batches_fts fts\n JOIN prompt_batches pb ON pb.id = fts.rowid\n WHERE prompt_batches_fts MATCH ?\n ORDER BY fts.rank\n LIMIT ?`\n ).all(SEARCH_PREVIEW_CHARS, query, limit) as Array<{\n id: number;\n prompt_number: number | null;\n session_id: string | null;\n preview: string;\n rank: number;\n }>;\n\n for (const row of batchRows) {\n results.push({\n id: String(row.id),\n type: 'prompt_batch',\n title: row.prompt_number != null\n ? `Batch #${row.prompt_number}`\n : `Batch ${row.id}`,\n preview: row.preview,\n score: Math.abs(row.rank),\n ...(row.session_id != null ? { session_id: row.session_id } : {}),\n });\n }\n }\n\n // -- activities branch ----------------------------------------------------\n if (typeFilter === undefined || typeFilter === 'activity') {\n const activityRows = db.prepare(\n `SELECT a.id, a.tool_name, a.tool_input, a.file_path, a.session_id,\n fts.rank\n FROM activities_fts fts\n JOIN activities a ON a.id = fts.rowid\n WHERE activities_fts MATCH ?\n ORDER BY fts.rank\n LIMIT ?`\n ).all(query, limit) as Array<{\n id: number;\n tool_name: string;\n tool_input: string | null;\n file_path: string | null;\n session_id: string | null;\n rank: number;\n }>;\n\n for (const row of activityRows) {\n const preview = (row.tool_input ?? row.file_path ?? '').slice(0, SEARCH_PREVIEW_CHARS);\n results.push({\n id: String(row.id),\n type: 'activity',\n title: row.tool_name,\n preview,\n score: Math.abs(row.rank),\n ...(row.session_id != null ? { session_id: row.session_id } : {}),\n });\n }\n }\n\n // Sort combined results by score DESC and apply limit.\n results.sort((a, b) => b.score - a.score);\n return results.slice(0, limit);\n}\n\n// ---------------------------------------------------------------------------\n// Hydration — convert VectorSearchResults into SearchResults\n// ---------------------------------------------------------------------------\n\n/** Row shape returned from sessions table for hydration. */\ninterface SessionRow {\n id: string;\n title: string | null;\n summary: string | null;\n session_id?: undefined;\n}\n\n/** Row shape returned from spores table for hydration. */\ninterface SporeRow {\n id: string;\n observation_type: string;\n content: string;\n session_id: string | null;\n}\n\n/** Row shape returned from plans table for hydration. */\ninterface PlanRow {\n id: string;\n title: string | null;\n content: string | null;\n session_id: string | null;\n}\n\n/** Row shape returned from artifacts table for hydration. */\ninterface ArtifactRow {\n id: string;\n title: string;\n content: string | null;\n}\n\n/**\n * Hydrate vector search results into SearchResults by fetching full records\n * from the record store.\n *\n * Groups results by namespace, queries each table for the relevant IDs, then\n * maps them into SearchResult format with titles and previews.\n */\nexport function hydrateSearchResults(\n vectorResults: VectorSearchResult[],\n): SearchResult[] {\n if (vectorResults.length === 0) return [];\n\n const db = getDatabase();\n const results: SearchResult[] = [];\n\n // Group result IDs by namespace\n const byNamespace = new Map<string, VectorSearchResult[]>();\n for (const vr of vectorResults) {\n const group = byNamespace.get(vr.namespace) ?? [];\n group.push(vr);\n byNamespace.set(vr.namespace, group);\n }\n\n // Use json_each so the statement text is stable and SQLite can cache the plan.\n const sessionStmt = db.prepare(\n `SELECT id, title, summary FROM sessions WHERE id IN (SELECT value FROM json_each(?))`,\n );\n const sporeStmt = db.prepare(\n `SELECT id, observation_type, content, session_id FROM spores WHERE id IN (SELECT value FROM json_each(?))`,\n );\n const planStmt = db.prepare(\n `SELECT id, title, content, session_id FROM plans WHERE id IN (SELECT value FROM json_each(?))`,\n );\n const artifactStmt = db.prepare(\n `SELECT id, title, content FROM artifacts WHERE id IN (SELECT value FROM json_each(?))`,\n );\n\n // --- sessions ---\n const sessionResults = byNamespace.get('sessions');\n if (sessionResults && sessionResults.length > 0) {\n const ids = sessionResults.map((r) => r.id);\n const rows = sessionStmt.all(JSON.stringify(ids)) as SessionRow[];\n\n const rowMap = new Map(rows.map((r) => [r.id, r]));\n for (const vr of sessionResults) {\n const row = rowMap.get(vr.id);\n if (!row) continue;\n results.push({\n id: row.id,\n type: 'session',\n title: row.title ?? `Session ${row.id.slice(-6)}`,\n preview: (row.summary ?? '').slice(0, SEARCH_PREVIEW_CHARS),\n score: vr.similarity,\n });\n }\n }\n\n // --- spores ---\n const sporeResults = byNamespace.get('spores');\n if (sporeResults && sporeResults.length > 0) {\n const ids = sporeResults.map((r) => r.id);\n const rows = sporeStmt.all(JSON.stringify(ids)) as SporeRow[];\n\n const rowMap = new Map(rows.map((r) => [r.id, r]));\n for (const vr of sporeResults) {\n const row = rowMap.get(vr.id);\n if (!row) continue;\n results.push({\n id: row.id,\n type: 'spore',\n title: row.observation_type,\n preview: row.content.slice(0, SEARCH_PREVIEW_CHARS),\n score: vr.similarity,\n ...(row.session_id != null ? { session_id: row.session_id } : {}),\n });\n }\n }\n\n // --- plans ---\n const planResults = byNamespace.get('plans');\n if (planResults && planResults.length > 0) {\n const ids = planResults.map((r) => r.id);\n const rows = planStmt.all(JSON.stringify(ids)) as PlanRow[];\n\n const rowMap = new Map(rows.map((r) => [r.id, r]));\n for (const vr of planResults) {\n const row = rowMap.get(vr.id);\n if (!row) continue;\n results.push({\n id: row.id,\n type: 'plan',\n title: row.title ?? `Plan ${row.id.slice(-6)}`,\n preview: (row.content ?? '').slice(0, SEARCH_PREVIEW_CHARS),\n score: vr.similarity,\n ...(row.session_id != null ? { session_id: row.session_id } : {}),\n });\n }\n }\n\n // --- artifacts ---\n const artifactResults = byNamespace.get('artifacts');\n if (artifactResults && artifactResults.length > 0) {\n const ids = artifactResults.map((r) => r.id);\n const rows = artifactStmt.all(JSON.stringify(ids)) as ArtifactRow[];\n\n const rowMap = new Map(rows.map((r) => [r.id, r]));\n for (const vr of artifactResults) {\n const row = rowMap.get(vr.id);\n if (!row) continue;\n results.push({\n id: row.id,\n type: 'artifact',\n title: row.title,\n preview: (row.content ?? '').slice(0, SEARCH_PREVIEW_CHARS),\n score: vr.similarity,\n });\n }\n }\n\n // Preserve the original similarity-based ordering from vector search\n results.sort((a, b) => b.score - a.score);\n return results;\n}\n"],"mappings":";;;;;;;;;;AAqEO,SAAS,eACd,OACA,UAAyB,CAAC,GACV;AAChB,QAAM,KAAK,YAAY;AACvB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,aAAa,QAAQ;AAE3B,QAAM,UAA0B,CAAC;AAGjC,MAAI,eAAe,UAAa,eAAe,gBAAgB;AAC7D,UAAM,YAAY,GAAG;AAAA,MACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAQF,EAAE,IAAI,sBAAsB,OAAO,KAAK;AAQxC,eAAW,OAAO,WAAW;AAC3B,cAAQ,KAAK;AAAA,QACX,IAAI,OAAO,IAAI,EAAE;AAAA,QACjB,MAAM;AAAA,QACN,OAAO,IAAI,iBAAiB,OACxB,UAAU,IAAI,aAAa,KAC3B,SAAS,IAAI,EAAE;AAAA,QACnB,SAAS,IAAI;AAAA,QACb,OAAO,KAAK,IAAI,IAAI,IAAI;AAAA,QACxB,GAAI,IAAI,cAAc,OAAO,EAAE,YAAY,IAAI,WAAW,IAAI,CAAC;AAAA,MACjE,CAAC;AAAA,IACH;AAAA,EACF;AAGA,MAAI,eAAe,UAAa,eAAe,YAAY;AACzD,UAAM,eAAe,GAAG;AAAA,MACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOF,EAAE,IAAI,OAAO,KAAK;AASlB,eAAW,OAAO,cAAc;AAC9B,YAAM,WAAW,IAAI,cAAc,IAAI,aAAa,IAAI,MAAM,GAAG,oBAAoB;AACrF,cAAQ,KAAK;AAAA,QACX,IAAI,OAAO,IAAI,EAAE;AAAA,QACjB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX;AAAA,QACA,OAAO,KAAK,IAAI,IAAI,IAAI;AAAA,QACxB,GAAI,IAAI,cAAc,OAAO,EAAE,YAAY,IAAI,WAAW,IAAI,CAAC;AAAA,MACjE,CAAC;AAAA,IACH;AAAA,EACF;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACxC,SAAO,QAAQ,MAAM,GAAG,KAAK;AAC/B;AA4CO,SAAS,qBACd,eACgB;AAChB,MAAI,cAAc,WAAW,EAAG,QAAO,CAAC;AAExC,QAAM,KAAK,YAAY;AACvB,QAAM,UAA0B,CAAC;AAGjC,QAAM,cAAc,oBAAI,IAAkC;AAC1D,aAAW,MAAM,eAAe;AAC9B,UAAM,QAAQ,YAAY,IAAI,GAAG,SAAS,KAAK,CAAC;AAChD,UAAM,KAAK,EAAE;AACb,gBAAY,IAAI,GAAG,WAAW,KAAK;AAAA,EACrC;AAGA,QAAM,cAAc,GAAG;AAAA,IACrB;AAAA,EACF;AACA,QAAM,YAAY,GAAG;AAAA,IACnB;AAAA,EACF;AACA,QAAM,WAAW,GAAG;AAAA,IAClB;AAAA,EACF;AACA,QAAM,eAAe,GAAG;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,iBAAiB,YAAY,IAAI,UAAU;AACjD,MAAI,kBAAkB,eAAe,SAAS,GAAG;AAC/C,UAAM,MAAM,eAAe,IAAI,CAAC,MAAM,EAAE,EAAE;AAC1C,UAAM,OAAO,YAAY,IAAI,KAAK,UAAU,GAAG,CAAC;AAEhD,UAAM,SAAS,IAAI,IAAI,KAAK,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AACjD,eAAW,MAAM,gBAAgB;AAC/B,YAAM,MAAM,OAAO,IAAI,GAAG,EAAE;AAC5B,UAAI,CAAC,IAAK;AACV,cAAQ,KAAK;AAAA,QACX,IAAI,IAAI;AAAA,QACR,MAAM;AAAA,QACN,OAAO,IAAI,SAAS,WAAW,IAAI,GAAG,MAAM,EAAE,CAAC;AAAA,QAC/C,UAAU,IAAI,WAAW,IAAI,MAAM,GAAG,oBAAoB;AAAA,QAC1D,OAAO,GAAG;AAAA,MACZ,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,eAAe,YAAY,IAAI,QAAQ;AAC7C,MAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,UAAM,MAAM,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE;AACxC,UAAM,OAAO,UAAU,IAAI,KAAK,UAAU,GAAG,CAAC;AAE9C,UAAM,SAAS,IAAI,IAAI,KAAK,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AACjD,eAAW,MAAM,cAAc;AAC7B,YAAM,MAAM,OAAO,IAAI,GAAG,EAAE;AAC5B,UAAI,CAAC,IAAK;AACV,cAAQ,KAAK;AAAA,QACX,IAAI,IAAI;AAAA,QACR,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX,SAAS,IAAI,QAAQ,MAAM,GAAG,oBAAoB;AAAA,QAClD,OAAO,GAAG;AAAA,QACV,GAAI,IAAI,cAAc,OAAO,EAAE,YAAY,IAAI,WAAW,IAAI,CAAC;AAAA,MACjE,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,cAAc,YAAY,IAAI,OAAO;AAC3C,MAAI,eAAe,YAAY,SAAS,GAAG;AACzC,UAAM,MAAM,YAAY,IAAI,CAAC,MAAM,EAAE,EAAE;AACvC,UAAM,OAAO,SAAS,IAAI,KAAK,UAAU,GAAG,CAAC;AAE7C,UAAM,SAAS,IAAI,IAAI,KAAK,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AACjD,eAAW,MAAM,aAAa;AAC5B,YAAM,MAAM,OAAO,IAAI,GAAG,EAAE;AAC5B,UAAI,CAAC,IAAK;AACV,cAAQ,KAAK;AAAA,QACX,IAAI,IAAI;AAAA,QACR,MAAM;AAAA,QACN,OAAO,IAAI,SAAS,QAAQ,IAAI,GAAG,MAAM,EAAE,CAAC;AAAA,QAC5C,UAAU,IAAI,WAAW,IAAI,MAAM,GAAG,oBAAoB;AAAA,QAC1D,OAAO,GAAG;AAAA,QACV,GAAI,IAAI,cAAc,OAAO,EAAE,YAAY,IAAI,WAAW,IAAI,CAAC;AAAA,MACjE,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,kBAAkB,YAAY,IAAI,WAAW;AACnD,MAAI,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,UAAM,MAAM,gBAAgB,IAAI,CAAC,MAAM,EAAE,EAAE;AAC3C,UAAM,OAAO,aAAa,IAAI,KAAK,UAAU,GAAG,CAAC;AAEjD,UAAM,SAAS,IAAI,IAAI,KAAK,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AACjD,eAAW,MAAM,iBAAiB;AAChC,YAAM,MAAM,OAAO,IAAI,GAAG,EAAE;AAC5B,UAAI,CAAC,IAAK;AACV,cAAQ,KAAK;AAAA,QACX,IAAI,IAAI;AAAA,QACR,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX,UAAU,IAAI,WAAW,IAAI,MAAM,GAAG,oBAAoB;AAAA,QAC1D,OAAO,GAAG;AAAA,MACZ,CAAC;AAAA,IACH;AAAA,EACF;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACxC,SAAO;AACT;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db/queries/sessions.ts"],"sourcesContent":["/**\n * Session CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DEFAULT_MACHINE_ID } from '@myco/constants.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of sessions returned by listSessions when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Session status value when a session is closed normally. */\nconst STATUS_COMPLETED = 'completed';\n\n/** Default session status for new sessions. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default prompt count for new sessions. */\nconst DEFAULT_PROMPT_COUNT = 0;\n\n/** Default tool count for new sessions. */\nconst DEFAULT_TOOL_COUNT = 0;\n\n/** Default processed flag for new sessions. */\nconst DEFAULT_PROCESSED = 0;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting/upserting a session. */\nexport interface SessionInsert {\n id: string;\n agent: string;\n started_at: number;\n created_at: number;\n user?: string | null;\n project_root?: string | null;\n branch?: string | null;\n ended_at?: number | null;\n status?: string;\n prompt_count?: number;\n tool_count?: number;\n title?: string | null;\n summary?: string | null;\n transcript_path?: string | null;\n parent_session_id?: string | null;\n parent_session_reason?: string | null;\n processed?: number;\n content_hash?: string | null;\n machine_id?: string;\n}\n\n/** Row shape returned from session queries (all columns). */\nexport interface SessionRow {\n id: string;\n agent: string;\n user: string | null;\n project_root: string | null;\n branch: string | null;\n started_at: number;\n ended_at: number | null;\n status: string;\n prompt_count: number;\n tool_count: number;\n title: string | null;\n summary: string | null;\n transcript_path: string | null;\n parent_session_id: string | null;\n parent_session_reason: string | null;\n processed: number;\n content_hash: string | null;\n embedded: number;\n created_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Updatable fields for `updateSession`. */\nexport interface SessionUpdate {\n agent?: string;\n user?: string | null;\n project_root?: string | null;\n branch?: string | null;\n ended_at?: number | null;\n status?: string;\n prompt_count?: number;\n tool_count?: number;\n title?: string | null;\n summary?: string | null;\n transcript_path?: string | null;\n parent_session_id?: string | null;\n parent_session_reason?: string | null;\n processed?: number;\n content_hash?: string | null;\n}\n\n/** Filter options for `listSessions`. */\nexport interface ListSessionsOptions {\n limit?: number;\n offset?: number;\n status?: string;\n agent?: string;\n search?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst SESSION_COLUMNS = [\n 'id',\n 'agent',\n '\"user\"',\n 'project_root',\n 'branch',\n 'started_at',\n 'ended_at',\n 'status',\n 'prompt_count',\n 'tool_count',\n 'title',\n 'summary',\n 'transcript_path',\n 'parent_session_id',\n 'parent_session_reason',\n 'processed',\n 'content_hash',\n 'embedded',\n 'created_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = SESSION_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Normalize a SQLite result row into a typed SessionRow.\n *\n * The quoted \"user\" column comes back as `user` in the result object.\n */\nfunction toSessionRow(row: Record<string, unknown>): SessionRow {\n return {\n id: row.id as string,\n agent: row.agent as string,\n user: (row.user as string) ?? null,\n project_root: (row.project_root as string) ?? null,\n branch: (row.branch as string) ?? null,\n started_at: row.started_at as number,\n ended_at: (row.ended_at as number) ?? null,\n status: row.status as string,\n prompt_count: row.prompt_count as number,\n tool_count: row.tool_count as number,\n title: (row.title as string) ?? null,\n summary: (row.summary as string) ?? null,\n transcript_path: (row.transcript_path as string) ?? null,\n parent_session_id: (row.parent_session_id as string) ?? null,\n parent_session_reason: (row.parent_session_reason as string) ?? null,\n processed: row.processed as number,\n content_hash: (row.content_hash as string) ?? null,\n embedded: (row.embedded as number) ?? 0,\n created_at: row.created_at as number,\n machine_id: (row.machine_id as string) ?? DEFAULT_MACHINE_ID,\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a session or update it if the id already exists.\n *\n * On conflict the row is updated with the values from `data`, preserving\n * any columns not supplied via COALESCE with EXCLUDED values.\n */\nexport function upsertSession(data: SessionInsert): SessionRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO sessions (\n id, agent, \"user\", project_root, branch,\n started_at, ended_at, status, prompt_count, tool_count,\n title, summary, transcript_path,\n parent_session_id, parent_session_reason,\n processed, content_hash, created_at, machine_id\n ) VALUES (\n ?, ?, ?, ?, ?,\n ?, ?, ?, ?, ?,\n ?, ?, ?,\n ?, ?,\n ?, ?, ?, ?\n )\n ON CONFLICT (id) DO UPDATE SET\n agent = EXCLUDED.agent,\n \"user\" = EXCLUDED.\"user\",\n project_root = EXCLUDED.project_root,\n branch = EXCLUDED.branch,\n started_at = EXCLUDED.started_at,\n ended_at = COALESCE(EXCLUDED.ended_at, sessions.ended_at),\n status = COALESCE(EXCLUDED.status, sessions.status),\n prompt_count = CASE WHEN ? THEN EXCLUDED.prompt_count ELSE sessions.prompt_count END,\n tool_count = CASE WHEN ? THEN EXCLUDED.tool_count ELSE sessions.tool_count END,\n title = COALESCE(EXCLUDED.title, sessions.title),\n summary = COALESCE(EXCLUDED.summary, sessions.summary),\n transcript_path = COALESCE(EXCLUDED.transcript_path, sessions.transcript_path),\n parent_session_id = EXCLUDED.parent_session_id,\n parent_session_reason = EXCLUDED.parent_session_reason,\n processed = COALESCE(EXCLUDED.processed, sessions.processed),\n content_hash = EXCLUDED.content_hash`,\n ).run(\n data.id,\n data.agent,\n data.user ?? null,\n data.project_root ?? null,\n data.branch ?? null,\n data.started_at,\n data.ended_at ?? null,\n data.status ?? DEFAULT_STATUS,\n data.prompt_count ?? DEFAULT_PROMPT_COUNT,\n data.tool_count ?? DEFAULT_TOOL_COUNT,\n data.title ?? null,\n data.summary ?? null,\n data.transcript_path ?? null,\n data.parent_session_id ?? null,\n data.parent_session_reason ?? null,\n data.processed ?? DEFAULT_PROCESSED,\n data.content_hash ?? null,\n data.created_at,\n data.machine_id ?? DEFAULT_MACHINE_ID,\n data.prompt_count !== undefined ? 1 : 0,\n data.tool_count !== undefined ? 1 : 0,\n );\n\n const row = toSessionRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n syncRow('sessions', row);\n\n return row;\n}\n\n/**\n * Retrieve a single session by id.\n *\n * @returns the session row, or null if not found.\n */\nexport function getSession(id: string): SessionRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM sessions WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toSessionRow(row);\n}\n\n/** Build WHERE clause and bound params from session filter options. */\nfunction buildSessionsWhere(\n options: Omit<ListSessionsOptions, 'limit' | 'offset'>,\n): { where: string; params: unknown[] } {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n\n if (options.agent !== undefined) {\n conditions.push(`agent = ?`);\n params.push(options.agent);\n }\n\n if (options.search !== undefined && options.search.length > 0) {\n conditions.push(`(title LIKE ? OR id LIKE ?)`);\n const pattern = `%${options.search}%`;\n params.push(pattern, pattern);\n }\n\n return {\n where: conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '',\n params,\n };\n}\n\n/**\n * List sessions with optional filters, ordered by created_at DESC.\n */\nexport function listSessions(\n options: ListSessionsOptions = {},\n): SessionRow[] {\n const db = getDatabase();\n const { where, params } = buildSessionsWhere(options);\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n const offset = options.offset ?? 0;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM sessions\n ${where}\n ORDER BY created_at DESC\n LIMIT ?\n OFFSET ?`,\n ).all(...params, limit, offset) as Record<string, unknown>[];\n\n return rows.map(toSessionRow);\n}\n\n/**\n * Count sessions matching optional filters (for pagination totals).\n */\nexport function countSessions(\n options: Omit<ListSessionsOptions, 'limit' | 'offset'> = {},\n): number {\n const db = getDatabase();\n const { where, params } = buildSessionsWhere(options);\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM sessions ${where}`,\n ).get(...params) as { count: number };\n\n return row.count;\n}\n\n/**\n * Update specific fields on an existing session.\n *\n * @returns the updated row, or null if the session does not exist.\n */\nexport function updateSession(\n id: string,\n updates: SessionUpdate,\n): SessionRow | null {\n const db = getDatabase();\n\n const setClauses: string[] = [];\n const params: unknown[] = [];\n\n const fieldMap: Record<string, string> = {\n agent: 'agent',\n user: '\"user\"',\n project_root: 'project_root',\n branch: 'branch',\n ended_at: 'ended_at',\n status: 'status',\n prompt_count: 'prompt_count',\n tool_count: 'tool_count',\n title: 'title',\n summary: 'summary',\n transcript_path: 'transcript_path',\n parent_session_id: 'parent_session_id',\n parent_session_reason: 'parent_session_reason',\n processed: 'processed',\n content_hash: 'content_hash',\n };\n\n for (const [key, column] of Object.entries(fieldMap)) {\n if (key in updates) {\n setClauses.push(`${column} = ?`);\n params.push((updates as Record<string, unknown>)[key] ?? null);\n }\n }\n\n if (setClauses.length === 0) return getSession(id);\n\n params.push(id);\n\n db.prepare(\n `UPDATE sessions\n SET ${setClauses.join(', ')}\n WHERE id = ?`,\n ).run(...params);\n\n const updated = getSession(id);\n\n if (updated) syncRow('sessions', updated);\n\n return updated;\n}\n\n/**\n * Close a session — set status to 'completed' and record the end time.\n *\n * @returns the updated row, or null if the session does not exist.\n */\nexport function closeSession(\n id: string,\n endedAt: number,\n): SessionRow | null {\n const db = getDatabase();\n\n db.prepare(\n `UPDATE sessions\n SET status = ?, ended_at = ?\n WHERE id = ?`,\n ).run(STATUS_COMPLETED, endedAt, id);\n\n const closed = getSession(id);\n\n if (closed) syncRow('sessions', closed);\n\n return closed;\n}\n\n/**\n * Delete a session and all its child rows (batches, activities, attachments).\n *\n * No ON DELETE CASCADE in the schema, so we delete children first.\n * Returns true if the session existed and was deleted.\n */\nexport function deleteSession(id: string): boolean {\n const db = getDatabase();\n\n db.prepare(`DELETE FROM activities WHERE session_id = ?`).run(id);\n db.prepare(`DELETE FROM attachments WHERE session_id = ?`).run(id);\n db.prepare(`DELETE FROM prompt_batches WHERE session_id = ?`).run(id);\n const info = db.prepare(`DELETE FROM sessions WHERE id = ?`).run(id);\n\n return info.changes > 0;\n}\n\n// ---------------------------------------------------------------------------\n// Cascade delete + impact query\n// ---------------------------------------------------------------------------\n\n/** Counts of related data that would be affected by a session delete. */\nexport interface SessionImpact {\n promptCount: number;\n sporeCount: number;\n attachmentCount: number;\n graphEdgeCount: number;\n}\n\n/** Result of a cascade delete operation. */\nexport interface DeleteCascadeResult {\n deleted: boolean;\n counts: {\n prompts: number;\n spores: number;\n attachments: number;\n graphEdges: number;\n resolutionEvents: number;\n };\n /** Spore IDs that were deleted (needed for vault file + vector cleanup). */\n deletedSporeIds: string[];\n /** Attachment file paths that were deleted from DB (needed for disk cleanup). */\n deletedAttachmentPaths: string[];\n}\n\n/**\n * Get counts of all data related to a session, for pre-delete impact display.\n */\nexport function getSessionImpact(sessionId: string): SessionImpact {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT\n (SELECT COUNT(*) FROM prompt_batches WHERE session_id = ?) AS promptCount,\n (SELECT COUNT(*) FROM spores WHERE session_id = ?) AS sporeCount,\n (SELECT COUNT(*) FROM attachments WHERE session_id = ?) AS attachmentCount,\n (SELECT COUNT(*) FROM graph_edges WHERE session_id = ?) AS graphEdgeCount`,\n ).get(sessionId, sessionId, sessionId, sessionId) as SessionImpact;\n\n return row;\n}\n\n/**\n * Delete a session and ALL related data in a single transaction.\n *\n * Returns counts of deleted rows and IDs needed for post-transaction\n * cleanup (vault files, embedding vectors).\n */\nexport function deleteSessionCascade(sessionId: string): DeleteCascadeResult {\n const db = getDatabase();\n\n const zeroCounts: DeleteCascadeResult = {\n deleted: false,\n counts: { prompts: 0, spores: 0, attachments: 0, graphEdges: 0, resolutionEvents: 0 },\n deletedSporeIds: [],\n deletedAttachmentPaths: [],\n };\n\n // Check session exists first\n const exists = db.prepare(`SELECT id FROM sessions WHERE id = ?`).get(sessionId);\n if (!exists) return zeroCounts;\n\n // Collect IDs/paths needed for post-transaction cleanup before deleting\n const sporeIds = (db.prepare(\n `SELECT id FROM spores WHERE session_id = ?`,\n ).all(sessionId) as { id: string }[]).map((r) => r.id);\n\n const attachmentPaths = (db.prepare(\n `SELECT file_path FROM attachments WHERE session_id = ?`,\n ).all(sessionId) as { file_path: string }[]).map((r) => r.file_path);\n\n // Run all deletes in a single transaction\n const result = db.transaction(() => {\n db.prepare(`DELETE FROM activities WHERE session_id = ?`).run(sessionId);\n const attachments = db.prepare(`DELETE FROM attachments WHERE session_id = ?`).run(sessionId);\n const prompts = db.prepare(`DELETE FROM prompt_batches WHERE session_id = ?`).run(sessionId);\n const resEvents = db.prepare(`DELETE FROM resolution_events WHERE session_id = ?`).run(sessionId);\n const edges = db.prepare(`DELETE FROM graph_edges WHERE session_id = ?`).run(sessionId);\n const spores = db.prepare(`DELETE FROM spores WHERE session_id = ?`).run(sessionId);\n const session = db.prepare(`DELETE FROM sessions WHERE id = ?`).run(sessionId);\n\n return {\n deleted: session.changes > 0,\n counts: {\n prompts: prompts.changes,\n spores: spores.changes,\n attachments: attachments.changes,\n graphEdges: edges.changes,\n resolutionEvents: resEvents.changes,\n },\n };\n })();\n\n return {\n ...result,\n deletedSporeIds: sporeIds,\n deletedAttachmentPaths: attachmentPaths,\n };\n}\n"],"mappings":";;;;;;;;;;;;AAgBA,IAAM,qBAAqB;AAG3B,IAAM,mBAAmB;AAGzB,IAAM,iBAAiB;AAGvB,IAAM,uBAAuB;AAG7B,IAAM,qBAAqB;AAG3B,IAAM,oBAAoB;AAsF1B,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,gBAAgB,KAAK,IAAI;AAWhD,SAAS,aAAa,KAA0C;AAC9D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,MAAO,IAAI,QAAmB;AAAA,IAC9B,cAAe,IAAI,gBAA2B;AAAA,IAC9C,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAY,IAAI;AAAA,IAChB,UAAW,IAAI,YAAuB;AAAA,IACtC,QAAQ,IAAI;AAAA,IACZ,cAAc,IAAI;AAAA,IAClB,YAAY,IAAI;AAAA,IAChB,OAAQ,IAAI,SAAoB;AAAA,IAChC,SAAU,IAAI,WAAsB;AAAA,IACpC,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,mBAAoB,IAAI,qBAAgC;AAAA,IACxD,uBAAwB,IAAI,yBAAoC;AAAA,IAChE,WAAW,IAAI;AAAA,IACf,cAAe,IAAI,gBAA2B;AAAA,IAC9C,UAAW,IAAI,YAAuB;AAAA,IACtC,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAYO,SAAS,cAAc,MAAiC;AAC7D,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,QAAQ;AAAA,IACb,KAAK,gBAAgB;AAAA,IACrB,KAAK,UAAU;AAAA,IACf,KAAK;AAAA,IACL,KAAK,YAAY;AAAA,IACjB,KAAK,UAAU;AAAA,IACf,KAAK,gBAAgB;AAAA,IACrB,KAAK,cAAc;AAAA,IACnB,KAAK,SAAS;AAAA,IACd,KAAK,WAAW;AAAA,IAChB,KAAK,mBAAmB;AAAA,IACxB,KAAK,qBAAqB;AAAA,IAC1B,KAAK,yBAAyB;AAAA,IAC9B,KAAK,aAAa;AAAA,IAClB,KAAK,gBAAgB;AAAA,IACrB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,iBAAiB,SAAY,IAAI;AAAA,IACtC,KAAK,eAAe,SAAY,IAAI;AAAA,EACtC;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAU,cAAc,6BAA6B,EAAE,IAAI,KAAK,EAAE;AAAA,EAC/E;AAEA,UAAQ,YAAY,GAAG;AAEvB,SAAO;AACT;AAOO,SAAS,WAAW,IAA+B;AACxD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAU,cAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,aAAa,GAAG;AACzB;AAGA,SAAS,mBACP,SACsC;AACtC,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AAEA,MAAI,QAAQ,UAAU,QAAW;AAC/B,eAAW,KAAK,WAAW;AAC3B,WAAO,KAAK,QAAQ,KAAK;AAAA,EAC3B;AAEA,MAAI,QAAQ,WAAW,UAAa,QAAQ,OAAO,SAAS,GAAG;AAC7D,eAAW,KAAK,6BAA6B;AAC7C,UAAM,UAAU,IAAI,QAAQ,MAAM;AAClC,WAAO,KAAK,SAAS,OAAO;AAAA,EAC9B;AAEA,SAAO;AAAA,IACL,OAAO,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAAA,IACrE;AAAA,EACF;AACF;AAKO,SAAS,aACd,UAA+B,CAAC,GAClB;AACd,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,mBAAmB,OAAO;AACpD,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,QAAQ,OAAO,MAAM;AAE9B,SAAO,KAAK,IAAI,YAAY;AAC9B;AAKO,SAAS,cACd,UAAyD,CAAC,GAClD;AACR,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,mBAAmB,OAAO;AAEpD,QAAM,MAAM,GAAG;AAAA,IACb,0CAA0C,KAAK;AAAA,EACjD,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,IAAI;AACb;AAOO,SAAS,cACd,IACA,SACmB;AACnB,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,QAAM,WAAmC;AAAA,IACvC,OAAO;AAAA,IACP,MAAM;AAAA,IACN,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY;AAAA,IACZ,OAAO;AAAA,IACP,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,uBAAuB;AAAA,IACvB,WAAW;AAAA,IACX,cAAc;AAAA,EAChB;AAEA,aAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACpD,QAAI,OAAO,SAAS;AAClB,iBAAW,KAAK,GAAG,MAAM,MAAM;AAC/B,aAAO,KAAM,QAAoC,GAAG,KAAK,IAAI;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO,WAAW,EAAE;AAEjD,SAAO,KAAK,EAAE;AAEd,KAAG;AAAA,IACD;AAAA,WACO,WAAW,KAAK,IAAI,CAAC;AAAA;AAAA,EAE9B,EAAE,IAAI,GAAG,MAAM;AAEf,QAAM,UAAU,WAAW,EAAE;AAE7B,MAAI,QAAS,SAAQ,YAAY,OAAO;AAExC,SAAO;AACT;AAOO,SAAS,aACd,IACA,SACmB;AACnB,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,kBAAkB,SAAS,EAAE;AAEnC,QAAM,SAAS,WAAW,EAAE;AAE5B,MAAI,OAAQ,SAAQ,YAAY,MAAM;AAEtC,SAAO;AACT;AAkDO,SAAS,iBAAiB,WAAkC;AACjE,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE,IAAI,WAAW,WAAW,WAAW,SAAS;AAEhD,SAAO;AACT;AAQO,SAAS,qBAAqB,WAAwC;AAC3E,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAkC;AAAA,IACtC,SAAS;AAAA,IACT,QAAQ,EAAE,SAAS,GAAG,QAAQ,GAAG,aAAa,GAAG,YAAY,GAAG,kBAAkB,EAAE;AAAA,IACpF,iBAAiB,CAAC;AAAA,IAClB,wBAAwB,CAAC;AAAA,EAC3B;AAGA,QAAM,SAAS,GAAG,QAAQ,sCAAsC,EAAE,IAAI,SAAS;AAC/E,MAAI,CAAC,OAAQ,QAAO;AAGpB,QAAM,WAAY,GAAG;AAAA,IACnB;AAAA,EACF,EAAE,IAAI,SAAS,EAAuB,IAAI,CAAC,MAAM,EAAE,EAAE;AAErD,QAAM,kBAAmB,GAAG;AAAA,IAC1B;AAAA,EACF,EAAE,IAAI,SAAS,EAA8B,IAAI,CAAC,MAAM,EAAE,SAAS;AAGnE,QAAM,SAAS,GAAG,YAAY,MAAM;AAClC,OAAG,QAAQ,6CAA6C,EAAE,IAAI,SAAS;AACvE,UAAM,cAAc,GAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AAC5F,UAAM,UAAU,GAAG,QAAQ,iDAAiD,EAAE,IAAI,SAAS;AAC3F,UAAM,YAAY,GAAG,QAAQ,oDAAoD,EAAE,IAAI,SAAS;AAChG,UAAM,QAAQ,GAAG,QAAQ,8CAA8C,EAAE,IAAI,SAAS;AACtF,UAAM,SAAS,GAAG,QAAQ,yCAAyC,EAAE,IAAI,SAAS;AAClF,UAAM,UAAU,GAAG,QAAQ,mCAAmC,EAAE,IAAI,SAAS;AAE7E,WAAO;AAAA,MACL,SAAS,QAAQ,UAAU;AAAA,MAC3B,QAAQ;AAAA,QACN,SAAS,QAAQ;AAAA,QACjB,QAAQ,OAAO;AAAA,QACf,aAAa,YAAY;AAAA,QACzB,YAAY,MAAM;AAAA,QAClB,kBAAkB,UAAU;AAAA,MAC9B;AAAA,IACF;AAAA,EACF,CAAC,EAAE;AAEH,SAAO;AAAA,IACL,GAAG;AAAA,IACH,iBAAiB;AAAA,IACjB,wBAAwB;AAAA,EAC1B;AACF;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db/queries/resolution-events.ts"],"sourcesContent":["/**\n * Resolution event CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DEFAULT_MACHINE_ID } from '@myco/constants.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of events returned by listResolutionEvents when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a resolution event. */\nexport interface ResolutionEventInsert {\n id: string;\n agent_id: string;\n spore_id: string;\n action: string;\n created_at: number;\n new_spore_id?: string | null;\n reason?: string | null;\n session_id?: string | null;\n machine_id?: string;\n}\n\n/** Row shape returned from resolution_events queries (all columns). */\nexport interface ResolutionEventRow {\n id: string;\n agent_id: string;\n spore_id: string;\n action: string;\n new_spore_id: string | null;\n reason: string | null;\n session_id: string | null;\n created_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Filter options for `listResolutionEvents`. */\nexport interface ListResolutionEventsOptions {\n agent_id?: string;\n spore_id?: string;\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst EVENT_COLUMNS = [\n 'id',\n 'agent_id',\n 'spore_id',\n 'action',\n 'new_spore_id',\n 'reason',\n 'session_id',\n 'created_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = EVENT_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed ResolutionEventRow. */\nfunction toResolutionEventRow(row: Record<string, unknown>): ResolutionEventRow {\n return {\n id: row.id as string,\n agent_id: row.agent_id as string,\n spore_id: row.spore_id as string,\n action: row.action as string,\n new_spore_id: (row.new_spore_id as string) ?? null,\n reason: (row.reason as string) ?? null,\n session_id: (row.session_id as string) ?? null,\n created_at: row.created_at as number,\n machine_id: (row.machine_id as string) ?? DEFAULT_MACHINE_ID,\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new resolution event.\n */\nexport function insertResolutionEvent(\n data: ResolutionEventInsert,\n): ResolutionEventRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO resolution_events (\n id, agent_id, spore_id, action, new_spore_id, reason, session_id, created_at, machine_id\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n ).run(\n data.id,\n data.agent_id,\n data.spore_id,\n data.action,\n data.new_spore_id ?? null,\n data.reason ?? null,\n data.session_id ?? null,\n data.created_at,\n data.machine_id ?? DEFAULT_MACHINE_ID,\n );\n\n const row = toResolutionEventRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM resolution_events WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n syncRow('resolution_events', row);\n\n return row;\n}\n\n/**\n * List resolution events with optional filters, ordered by created_at DESC.\n */\nexport function listResolutionEvents(\n options: ListResolutionEventsOptions = {},\n): ResolutionEventRow[] {\n const db = getDatabase();\n\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.agent_id !== undefined) {\n conditions.push(`agent_id = ?`);\n params.push(options.agent_id);\n }\n\n if (options.spore_id !== undefined) {\n conditions.push(`spore_id = ?`);\n params.push(options.spore_id);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n\n params.push(limit);\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM resolution_events\n ${where}\n ORDER BY created_at DESC\n LIMIT ?`,\n ).all(...params) as Record<string, unknown>[];\n\n return rows.map(toResolutionEventRow);\n}\n"],"mappings":";;;;;;;;;;;;AAgBA,IAAM,qBAAqB;AA4C3B,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,cAAc,KAAK,IAAI;AAO9C,SAAS,qBAAqB,KAAkD;AAC9E,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,UAAU,IAAI;AAAA,IACd,QAAQ,IAAI;AAAA,IACZ,cAAe,IAAI,gBAA2B;AAAA,IAC9C,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AASO,SAAS,sBACd,MACoB;AACpB,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,EAGF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,gBAAgB;AAAA,IACrB,KAAK,UAAU;AAAA,IACf,KAAK,cAAc;AAAA,IACnB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,EACrB;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAU,cAAc,sCAAsC,EAAE,IAAI,KAAK,EAAE;AAAA,EACxF;AAEA,UAAQ,qBAAqB,GAAG;AAEhC,SAAO;AACT;AAKO,SAAS,qBACd,UAAuC,CAAC,GAClB;AACtB,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AAEA,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AAEA,QAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAC5E,QAAM,QAAQ,QAAQ,SAAS;AAE/B,SAAO,KAAK,KAAK;AAEjB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA,EAGV,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,KAAK,IAAI,oBAAoB;AACtC;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/config/updates.ts"],"sourcesContent":["import type { MycoConfig, EmbeddingProviderConfig, ContextConfig, TaskProviderOverride, PhaseOverride } from './schema.js';\n\n/**\n * Set a value at a dot-separated path, returning a new config object.\n * Creates intermediate objects along the path as needed.\n */\nexport function withValue(config: MycoConfig, dotPath: string, value: unknown): MycoConfig {\n const clone = structuredClone(config) as Record<string, unknown>;\n const segments = dotPath.split('.');\n let current: Record<string, unknown> = clone;\n\n for (let i = 0; i < segments.length - 1; i++) {\n const segment = segments[i];\n if (current[segment] === undefined || current[segment] === null || typeof current[segment] !== 'object') {\n current[segment] = {};\n }\n current = current[segment] as Record<string, unknown>;\n }\n\n current[segments[segments.length - 1]] = value;\n\n return clone as unknown as MycoConfig;\n}\n\n/** Provider override shape used in task config updates. Null means delete. */\ninterface ProviderInput {\n type: 'cloud' | 'ollama' | 'lmstudio';\n model?: string;\n base_url?: string;\n context_length?: number;\n}\n\n/** Phase override input. Null fields mean delete. */\ninterface PhaseInput {\n provider?: ProviderInput | null;\n model?: string | null;\n maxTurns?: number | null;\n}\n\n/** Input shape for task config updates. Null values mean \"delete this field\". */\nexport interface TaskConfigUpdate {\n provider?: ProviderInput | null;\n model?: string | null;\n maxTurns?: number | null;\n timeoutSeconds?: number | null;\n phases?: Record<string, PhaseInput | null> | null;\n}\n\n/**\n * Apply partial task config updates, returning a new config object.\n * Null values delete fields. Empty task entries and phase maps are cleaned up.\n */\nexport function withTaskConfig(\n config: MycoConfig,\n taskId: string,\n update: TaskConfigUpdate,\n): MycoConfig {\n const tasks = { ...(config.agent.tasks ?? {}) };\n const entry: TaskProviderOverride = { ...(tasks[taskId] ?? {}) };\n\n // Apply top-level fields\n if ('provider' in update) {\n if (update.provider === null) {\n delete entry.provider;\n } else if (update.provider !== undefined) {\n entry.provider = { ...update.provider };\n }\n }\n\n if ('model' in update) {\n if (update.model === null) delete entry.model;\n else if (update.model !== undefined) entry.model = update.model;\n }\n\n if ('maxTurns' in update) {\n if (update.maxTurns === null) delete entry.maxTurns;\n else if (update.maxTurns !== undefined) entry.maxTurns = update.maxTurns;\n }\n\n if ('timeoutSeconds' in update) {\n if (update.timeoutSeconds === null) delete entry.timeoutSeconds;\n else if (update.timeoutSeconds !== undefined) entry.timeoutSeconds = update.timeoutSeconds;\n }\n\n // Apply phase overrides\n if ('phases' in update) {\n if (update.phases === null) {\n delete entry.phases;\n } else if (update.phases !== undefined) {\n const phases: Record<string, PhaseOverride> = { ...(entry.phases ?? {}) };\n\n for (const [phaseName, phaseValue] of Object.entries(update.phases)) {\n if (phaseValue === null) {\n delete phases[phaseName];\n } else {\n const pe: PhaseOverride = { ...(phases[phaseName] ?? {}) };\n if ('provider' in phaseValue) {\n if (phaseValue.provider === null) delete pe.provider;\n else if (phaseValue.provider !== undefined) pe.provider = { ...phaseValue.provider };\n }\n if ('model' in phaseValue) {\n if (phaseValue.model === null) delete pe.model;\n else if (phaseValue.model !== undefined) pe.model = phaseValue.model;\n }\n if ('maxTurns' in phaseValue) {\n if (phaseValue.maxTurns === null) delete pe.maxTurns;\n else if (phaseValue.maxTurns !== undefined) pe.maxTurns = phaseValue.maxTurns;\n }\n phases[phaseName] = pe;\n }\n }\n\n // Clean up empty phases map\n if (Object.keys(phases).length === 0) {\n delete entry.phases;\n } else {\n entry.phases = phases;\n }\n }\n }\n\n // Clean up empty task entry\n if (Object.keys(entry).length === 0) {\n delete tasks[taskId];\n } else {\n tasks[taskId] = entry;\n }\n\n return {\n ...config,\n agent: {\n ...config.agent,\n tasks: Object.keys(tasks).length > 0 ? tasks : undefined,\n },\n };\n}\n\n/**\n * Merge partial embedding updates into config, returning a new config object.\n */\nexport function withEmbedding(\n config: MycoConfig,\n updates: Partial<EmbeddingProviderConfig>,\n): MycoConfig {\n return {\n ...config,\n embedding: { ...config.embedding, ...updates },\n };\n}\n\n/**\n * Merge partial context injection updates into config, returning a new config object.\n */\nexport function withContext(\n config: MycoConfig,\n updates: Partial<ContextConfig>,\n): MycoConfig {\n return {\n ...config,\n context: { ...config.context, ...updates },\n };\n}\n"],"mappings":";;;AAMO,SAAS,UAAU,QAAoB,SAAiB,OAA4B;AACzF,QAAM,QAAQ,gBAAgB,MAAM;AACpC,QAAM,WAAW,QAAQ,MAAM,GAAG;AAClC,MAAI,UAAmC;AAEvC,WAAS,IAAI,GAAG,IAAI,SAAS,SAAS,GAAG,KAAK;AAC5C,UAAM,UAAU,SAAS,CAAC;AAC1B,QAAI,QAAQ,OAAO,MAAM,UAAa,QAAQ,OAAO,MAAM,QAAQ,OAAO,QAAQ,OAAO,MAAM,UAAU;AACvG,cAAQ,OAAO,IAAI,CAAC;AAAA,IACtB;AACA,cAAU,QAAQ,OAAO;AAAA,EAC3B;AAEA,UAAQ,SAAS,SAAS,SAAS,CAAC,CAAC,IAAI;AAEzC,SAAO;AACT;AA8BO,SAAS,eACd,QACA,QACA,QACY;AACZ,QAAM,QAAQ,EAAE,GAAI,OAAO,MAAM,SAAS,CAAC,EAAG;AAC9C,QAAM,QAA8B,EAAE,GAAI,MAAM,MAAM,KAAK,CAAC,EAAG;AAG/D,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,MAAM;AAC5B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,aAAa,QAAW;AACxC,YAAM,WAAW,EAAE,GAAG,OAAO,SAAS;AAAA,IACxC;AAAA,EACF;AAEA,MAAI,WAAW,QAAQ;AACrB,QAAI,OAAO,UAAU,KAAM,QAAO,MAAM;AAAA,aAC/B,OAAO,UAAU,OAAW,OAAM,QAAQ,OAAO;AAAA,EAC5D;AAEA,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,KAAM,QAAO,MAAM;AAAA,aAClC,OAAO,aAAa,OAAW,OAAM,WAAW,OAAO;AAAA,EAClE;AAEA,MAAI,oBAAoB,QAAQ;AAC9B,QAAI,OAAO,mBAAmB,KAAM,QAAO,MAAM;AAAA,aACxC,OAAO,mBAAmB,OAAW,OAAM,iBAAiB,OAAO;AAAA,EAC9E;AAGA,MAAI,YAAY,QAAQ;AACtB,QAAI,OAAO,WAAW,MAAM;AAC1B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,WAAW,QAAW;AACtC,YAAM,SAAwC,EAAE,GAAI,MAAM,UAAU,CAAC,EAAG;AAExE,iBAAW,CAAC,WAAW,UAAU,KAAK,OAAO,QAAQ,OAAO,MAAM,GAAG;AACnE,YAAI,eAAe,MAAM;AACvB,iBAAO,OAAO,SAAS;AAAA,QACzB,OAAO;AACL,gBAAM,KAAoB,EAAE,GAAI,OAAO,SAAS,KAAK,CAAC,EAAG;AACzD,cAAI,cAAc,YAAY;AAC5B,gBAAI,WAAW,aAAa,KAAM,QAAO,GAAG;AAAA,qBACnC,WAAW,aAAa,OAAW,IAAG,WAAW,EAAE,GAAG,WAAW,SAAS;AAAA,UACrF;AACA,cAAI,WAAW,YAAY;AACzB,gBAAI,WAAW,UAAU,KAAM,QAAO,GAAG;AAAA,qBAChC,WAAW,UAAU,OAAW,IAAG,QAAQ,WAAW;AAAA,UACjE;AACA,cAAI,cAAc,YAAY;AAC5B,gBAAI,WAAW,aAAa,KAAM,QAAO,GAAG;AAAA,qBACnC,WAAW,aAAa,OAAW,IAAG,WAAW,WAAW;AAAA,UACvE;AACA,iBAAO,SAAS,IAAI;AAAA,QACtB;AAAA,MACF;AAGA,UAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AACpC,eAAO,MAAM;AAAA,MACf,OAAO;AACL,cAAM,SAAS;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,KAAK,KAAK,EAAE,WAAW,GAAG;AACnC,WAAO,MAAM,MAAM;AAAA,EACrB,OAAO;AACL,UAAM,MAAM,IAAI;AAAA,EAClB;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,OAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV,OAAO,OAAO,KAAK,KAAK,EAAE,SAAS,IAAI,QAAQ;AAAA,IACjD;AAAA,EACF;AACF;AAKO,SAAS,cACd,QACA,SACY;AACZ,SAAO;AAAA,IACL,GAAG;AAAA,IACH,WAAW,EAAE,GAAG,OAAO,WAAW,GAAG,QAAQ;AAAA,EAC/C;AACF;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/config/loader.ts","../src/config/schema.ts","../src/config/migrations.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport YAML from 'yaml';\nimport { MycoConfigSchema, type MycoConfig, type BackupConfig, type TeamConfig } from './schema.js';\nimport { runMigrations, CURRENT_MIGRATION_VERSION } from './migrations.js';\n\nexport const CONFIG_FILENAME = 'myco.yaml';\n\nexport function loadConfig(vaultDir: string): MycoConfig {\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n\n if (!fs.existsSync(configPath)) {\n throw new Error(`myco.yaml not found in ${vaultDir}`);\n }\n\n const raw = fs.readFileSync(configPath, 'utf-8');\n const parsed = YAML.parse(raw) as Record<string, unknown>;\n\n // Detect v1 config and guide migration\n if (parsed.version === 1 || (parsed.intelligence as Record<string, unknown>)?.backend) {\n throw new Error(\n 'Myco config uses v1 format. Run /myco:setup-llm to reconfigure for v2.',\n );\n }\n\n // --- v2 → v3 migration ---\n let v2Migrated = false;\n if (parsed.version === 2) {\n // Extract intelligence.embedding to top-level embedding\n const intel = parsed.intelligence as Record<string, unknown> | undefined;\n const embeddingConfig = intel?.embedding as Record<string, unknown> | undefined;\n if (embeddingConfig && !parsed.embedding) {\n // Map v2 'lm-studio' to v3 'openai-compatible' for embedding provider\n if (embeddingConfig.provider === 'lm-studio') {\n embeddingConfig.provider = 'openai-compatible';\n }\n parsed.embedding = embeddingConfig;\n }\n\n // Keep daemon.port and daemon.log_level, drop grace_period and max_log_size\n const daemon = parsed.daemon as Record<string, unknown> | undefined;\n if (daemon) {\n const { port, log_level } = daemon;\n parsed.daemon = { port: port ?? null, log_level: log_level ?? 'info' };\n }\n\n // Keep capture basics, drop token-related fields; migrate artifact_watch → plan_dirs\n const capture = parsed.capture as Record<string, unknown> | undefined;\n if (capture) {\n const { transcript_paths, artifact_watch, plan_dirs, artifact_extensions, buffer_max_events } = capture;\n parsed.capture = {\n transcript_paths,\n plan_dirs: plan_dirs ?? artifact_watch,\n artifact_extensions,\n buffer_max_events,\n };\n }\n\n // Drop removed top-level sections\n delete parsed.intelligence;\n delete parsed.context;\n delete parsed.team;\n delete parsed.digest;\n delete parsed.pipeline;\n\n // Set version to 3\n parsed.version = 3;\n v2Migrated = true;\n\n process.stderr.write('[myco migration] Migrated config from v2 to v3\\n');\n }\n\n // Run numbered migrations (for v3+ forward migrations)\n const migrationsRan = runMigrations(parsed, vaultDir, (msg) => {\n process.stderr.write(`[myco migration] ${msg}\\n`);\n });\n\n // Parse with Zod to fill in defaults for new config sections\n const config = MycoConfigSchema.parse(parsed);\n\n // Write back if v2→v3 migration ran, numbered migrations ran, or new defaults were added\n const needsWrite = v2Migrated\n || migrationsRan\n || (parsed.config_version as number ?? 0) < CURRENT_MIGRATION_VERSION\n || parsed.version !== config.version;\n\n if (needsWrite) {\n const fullConfig = JSON.parse(JSON.stringify(config)) as Record<string, unknown>;\n fs.writeFileSync(configPath, YAML.stringify(fullConfig), 'utf-8');\n }\n\n return config;\n}\n\nexport function saveConfig(vaultDir: string, config: MycoConfig): void {\n // Validate before writing — OAK lesson: validate on write, not just read\n const validated = MycoConfigSchema.parse(config);\n\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n fs.mkdirSync(vaultDir, { recursive: true });\n fs.writeFileSync(configPath, YAML.stringify(validated), 'utf-8');\n}\n\nexport function updateConfig(\n vaultDir: string,\n fn: (config: MycoConfig) => MycoConfig,\n): MycoConfig {\n const current = loadConfig(vaultDir);\n const updated = fn(current);\n saveConfig(vaultDir, updated);\n return updated;\n}\n\nexport function updateBackupConfig(\n vaultDir: string,\n backup: Partial<BackupConfig>,\n): MycoConfig {\n return updateConfig(vaultDir, (config) => ({\n ...config,\n backup: { ...config.backup, ...backup },\n }));\n}\n\nexport function updateTeamConfig(\n vaultDir: string,\n team: Partial<TeamConfig>,\n): MycoConfig {\n return updateConfig(vaultDir, (config) => ({\n ...config,\n team: { ...config.team, ...team },\n }));\n}\n","import { z } from 'zod';\n\nconst EmbeddingProviderSchema = z.object({\n provider: z.enum(['ollama', 'openai-compatible', 'openrouter', 'openai']).default('ollama'),\n model: z.string().default('bge-m3'),\n base_url: z.string().url().optional(),\n});\n\nconst DaemonSchema = z.object({\n port: z.number().int().min(1024).max(65535).nullable().default(null),\n log_level: z.enum(['debug', 'info', 'warn', 'error']).default('info'),\n log_retention_days: z.number().int().min(1).max(365).default(30),\n});\n\nconst CaptureSchema = z.object({\n transcript_paths: z.array(z.string()).default([]),\n plan_dirs: z.array(z.string()).default([]),\n artifact_extensions: z.array(z.string()).default(['.md']),\n buffer_max_events: z.number().int().positive().default(500),\n});\n\n/** Provider config shape used in both task-level and phase-level overrides. */\nconst ProviderOverrideSchema = z.object({\n type: z.enum(['cloud', 'ollama', 'lmstudio']),\n base_url: z.string().optional(),\n model: z.string().optional(),\n /** Context window size for local models (Ollama num_ctx, LM Studio context_length). */\n context_length: z.number().int().positive().optional(),\n});\n\n/** Per-phase overrides within a task — keyed by phase name. */\nconst PhaseOverrideSchema = z.object({\n provider: ProviderOverrideSchema.optional(),\n model: z.string().optional(),\n maxTurns: z.number().int().positive().optional(),\n});\n\n/** Per-task config override — stored in myco.yaml under agent.tasks. */\nconst TaskProviderOverrideSchema = z.object({\n provider: ProviderOverrideSchema.optional(),\n model: z.string().optional(),\n maxTurns: z.number().int().positive().optional(),\n timeoutSeconds: z.number().int().positive().optional(),\n phases: z.record(z.string(), PhaseOverrideSchema).optional(),\n});\n\nconst ContextSchema = z.object({\n /** Which digest tier to inject at session start. */\n digest_tier: z.number().int().default(5000),\n /** Enable semantic spore search on each user prompt. */\n prompt_search: z.boolean().default(true),\n /** Max spores to inject per prompt (0-10). */\n prompt_max_spores: z.number().int().min(0).max(10).default(3),\n});\n\nconst AgentSchema = z.object({\n /** Whether the daemon automatically runs the agent on unprocessed batches. */\n auto_run: z.boolean().default(true),\n /** Seconds between agent timer checks. */\n interval_seconds: z.number().int().positive().default(300),\n /** Number of batches between event-driven summary triggers (0 to disable). */\n summary_batch_interval: z.number().int().min(0).default(5),\n /** Global default provider — applies to all tasks unless overridden per-task. */\n provider: ProviderOverrideSchema.optional(),\n /** Global default model — applies to all tasks unless overridden per-task. */\n model: z.string().optional(),\n /** Per-task overrides keyed by task name. */\n tasks: z.record(z.string(), TaskProviderOverrideSchema).optional(),\n});\n\nconst BackupSchema = z.object({\n /** Override directory for backup files. Supports ~ for home directory. When unset, defaults to .myco/backups. */\n dir: z.string().optional(),\n});\n\nconst TeamSchema = z.object({\n /** Whether team sync is enabled. */\n enabled: z.boolean().default(false),\n /** Cloudflare Worker URL for team sync. */\n worker_url: z.string().url().optional(),\n /** Team identifier for sync grouping. */\n team_id: z.string().optional(),\n /** Sync interval in minutes. */\n interval_minutes: z.number().int().min(1).max(1440).default(15),\n});\n\nexport const MycoConfigSchema = z.preprocess(\n (raw: unknown) => {\n if (raw && typeof raw === 'object' && 'curation' in raw && !('agent' in raw)) {\n const { curation, ...rest } = raw as Record<string, unknown>;\n return { ...rest, agent: curation };\n }\n return raw;\n },\n z.object({\n version: z.literal(3),\n config_version: z.number().int().nonnegative().default(0),\n embedding: EmbeddingProviderSchema.default(() => EmbeddingProviderSchema.parse({})),\n daemon: DaemonSchema.default(() => DaemonSchema.parse({})),\n capture: CaptureSchema.default(() => CaptureSchema.parse({})),\n agent: AgentSchema.default(() => AgentSchema.parse({})),\n context: ContextSchema.default(() => ContextSchema.parse({})),\n backup: BackupSchema.default(() => BackupSchema.parse({})),\n team: TeamSchema.default(() => TeamSchema.parse({})),\n }),\n);\n\nexport type MycoConfig = z.output<typeof MycoConfigSchema>;\nexport type EmbeddingProviderConfig = z.infer<typeof EmbeddingProviderSchema>;\nexport type TaskProviderOverride = z.infer<typeof TaskProviderOverrideSchema>;\nexport type PhaseOverride = z.infer<typeof PhaseOverrideSchema>;\nexport type ContextConfig = z.infer<typeof ContextSchema>;\nexport type BackupConfig = z.infer<typeof BackupSchema>;\nexport type TeamConfig = z.infer<typeof TeamSchema>;\n","/**\n * Config and vault migrations — run once per version, tracked by config_version.\n *\n * Each migration has a version number, a name, and a function that receives\n * the raw parsed YAML doc and the vault directory. Migrations run in order\n * and are skipped if config_version is already past them.\n *\n * To add a new migration:\n * 1. Add an entry to MIGRATIONS with the next version number\n * 2. Write the migrate function — it receives the mutable doc and vaultDir\n * 3. The framework handles version tracking and writing the config back\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nexport interface Migration {\n version: number;\n name: string;\n migrate: (doc: Record<string, unknown>, vaultDir: string) => void;\n}\n\n/** Regex matching both quoted and unquoted YAML: type: memory, type: \"memory\", type: 'memory' */\nconst MEMORY_TYPE_PATTERN = /type:\\s*[\"']?memory[\"']?/g;\n\nexport const MIGRATIONS: Migration[] = [\n {\n version: 1,\n name: 'rename-memories-to-spores',\n migrate: (doc, vaultDir) => {\n // Config: rename context.layers.memories → context.layers.spores\n const context = doc.context as Record<string, unknown> | undefined;\n const layers = context?.layers as Record<string, unknown> | undefined;\n if (layers && 'memories' in layers && !('spores' in layers)) {\n layers.spores = layers.memories;\n delete layers.memories;\n }\n\n // Vault: rename memories/ directory → spores/\n const memoriesDir = path.join(vaultDir, 'memories');\n const sporesDir = path.join(vaultDir, 'spores');\n\n if (!fs.existsSync(memoriesDir)) return;\n\n if (fs.existsSync(sporesDir)) {\n // Both exist (interrupted migration) — merge remaining files\n const moveRemaining = (srcDir: string, destDir: string): void => {\n for (const entry of fs.readdirSync(srcDir, { withFileTypes: true })) {\n const srcPath = path.join(srcDir, entry.name);\n const destPath = path.join(destDir, entry.name);\n if (entry.isDirectory()) {\n if (!fs.existsSync(destPath)) fs.mkdirSync(destPath, { recursive: true });\n moveRemaining(srcPath, destPath);\n } else if (!fs.existsSync(destPath)) {\n fs.renameSync(srcPath, destPath);\n }\n }\n };\n moveRemaining(memoriesDir, sporesDir);\n fs.rmSync(memoriesDir, { recursive: true, force: true });\n } else {\n fs.renameSync(memoriesDir, sporesDir);\n }\n\n // Update frontmatter type: memory → type: spore (handles quoted and unquoted)\n const walkUpdate = (dir: string): void => {\n for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) { walkUpdate(fullPath); continue; }\n if (!entry.name.endsWith('.md')) continue;\n const content = fs.readFileSync(fullPath, 'utf-8');\n MEMORY_TYPE_PATTERN.lastIndex = 0;\n if (MEMORY_TYPE_PATTERN.test(content)) {\n MEMORY_TYPE_PATTERN.lastIndex = 0;\n fs.writeFileSync(fullPath, content.replace(MEMORY_TYPE_PATTERN, 'type: spore'));\n }\n }\n };\n walkUpdate(sporesDir);\n\n // Legacy: update wikilink references in Markdown files (pre-SQLite migration): [[memories/...]] → [[spores/...]]\n const walkLinks = (dir: string): void => {\n for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) { walkLinks(fullPath); continue; }\n if (!entry.name.endsWith('.md')) continue;\n const content = fs.readFileSync(fullPath, 'utf-8');\n if (content.includes('memories/')) {\n fs.writeFileSync(fullPath, content.replace(/memories\\//g, 'spores/'));\n }\n }\n };\n walkLinks(vaultDir);\n },\n },\n {\n version: 2,\n name: 'consolidation-boolean-to-object',\n migrate: (doc) => {\n const digest = doc.digest as Record<string, unknown> | undefined;\n if (!digest) return;\n\n const consolidation = digest.consolidation;\n if (typeof consolidation === 'boolean') {\n digest.consolidation = { enabled: consolidation, max_tokens: 2048 };\n }\n },\n },\n];\n\n/** Current migration version — the highest version in MIGRATIONS. */\nexport const CURRENT_MIGRATION_VERSION = MIGRATIONS[MIGRATIONS.length - 1]?.version ?? 0;\n\n/**\n * Run all pending migrations on the raw config doc.\n * Returns true if any migrations ran (caller should reindex).\n */\nexport function runMigrations(\n doc: Record<string, unknown>,\n vaultDir: string,\n log?: (message: string) => void,\n): boolean {\n const currentVersion = (doc.config_version as number) ?? 0;\n let ran = false;\n\n for (const migration of MIGRATIONS) {\n if (migration.version <= currentVersion) continue;\n\n migration.migrate(doc, vaultDir);\n doc.config_version = migration.version;\n ran = true;\n }\n\n if (ran) {\n const from = currentVersion;\n const to = (doc.config_version as number) ?? 0;\n log?.(`Migrated config from v${from} to v${to}`);\n }\n\n return ran;\n}\n"],"mappings":";;;;;;;;;;;;AAEA,kBAAiB;AAFjB,OAAOA,SAAQ;AACf,OAAOC,WAAU;;;ACCjB,IAAM,0BAA0B,iBAAE,OAAO;AAAA,EACvC,UAAU,iBAAE,KAAK,CAAC,UAAU,qBAAqB,cAAc,QAAQ,CAAC,EAAE,QAAQ,QAAQ;AAAA,EAC1F,OAAO,iBAAE,OAAO,EAAE,QAAQ,QAAQ;AAAA,EAClC,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AACtC,CAAC;AAED,IAAM,eAAe,iBAAE,OAAO;AAAA,EAC5B,MAAM,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,IAAI,EAAE,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI;AAAA,EACnE,WAAW,iBAAE,KAAK,CAAC,SAAS,QAAQ,QAAQ,OAAO,CAAC,EAAE,QAAQ,MAAM;AAAA,EACpE,oBAAoB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG,EAAE,QAAQ,EAAE;AACjE,CAAC;AAED,IAAM,gBAAgB,iBAAE,OAAO;AAAA,EAC7B,kBAAkB,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EAChD,WAAW,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EACzC,qBAAqB,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,KAAK,CAAC;AAAA,EACxD,mBAAmB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,GAAG;AAC5D,CAAC;AAGD,IAAM,yBAAyB,iBAAE,OAAO;AAAA,EACtC,MAAM,iBAAE,KAAK,CAAC,SAAS,UAAU,UAAU,CAAC;AAAA,EAC5C,UAAU,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AACvD,CAAC;AAGD,IAAM,sBAAsB,iBAAE,OAAO;AAAA,EACnC,UAAU,uBAAuB,SAAS;AAAA,EAC1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AACjD,CAAC;AAGD,IAAM,6BAA6B,iBAAE,OAAO;AAAA,EAC1C,UAAU,uBAAuB,SAAS;AAAA,EAC1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EAC/C,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EACrD,QAAQ,iBAAE,OAAO,iBAAE,OAAO,GAAG,mBAAmB,EAAE,SAAS;AAC7D,CAAC;AAED,IAAM,gBAAgB,iBAAE,OAAO;AAAA;AAAA,EAE7B,aAAa,iBAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,GAAI;AAAA;AAAA,EAE1C,eAAe,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAEvC,mBAAmB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,EAAE,QAAQ,CAAC;AAC9D,CAAC;AAED,IAAM,cAAc,iBAAE,OAAO;AAAA;AAAA,EAE3B,UAAU,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAElC,kBAAkB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,GAAG;AAAA;AAAA,EAEzD,wBAAwB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,QAAQ,CAAC;AAAA;AAAA,EAEzD,UAAU,uBAAuB,SAAS;AAAA;AAAA,EAE1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,OAAO,iBAAE,OAAO,iBAAE,OAAO,GAAG,0BAA0B,EAAE,SAAS;AACnE,CAAC;AAED,IAAM,eAAe,iBAAE,OAAO;AAAA;AAAA,EAE5B,KAAK,iBAAE,OAAO,EAAE,SAAS;AAC3B,CAAC;AAED,IAAM,aAAa,iBAAE,OAAO;AAAA;AAAA,EAE1B,SAAS,iBAAE,QAAQ,EAAE,QAAQ,KAAK;AAAA;AAAA,EAElC,YAAY,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA,EAEtC,SAAS,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE7B,kBAAkB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,IAAI,EAAE,QAAQ,EAAE;AAChE,CAAC;AAEM,IAAM,mBAAmB,iBAAE;AAAA,EAChC,CAAC,QAAiB;AAChB,QAAI,OAAO,OAAO,QAAQ,YAAY,cAAc,OAAO,EAAE,WAAW,MAAM;AAC5E,YAAM,EAAE,UAAU,GAAG,KAAK,IAAI;AAC9B,aAAO,EAAE,GAAG,MAAM,OAAO,SAAS;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EACA,iBAAE,OAAO;AAAA,IACP,SAAS,iBAAE,QAAQ,CAAC;AAAA,IACpB,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,QAAQ,CAAC;AAAA,IACxD,WAAW,wBAAwB,QAAQ,MAAM,wBAAwB,MAAM,CAAC,CAAC,CAAC;AAAA,IAClF,QAAQ,aAAa,QAAQ,MAAM,aAAa,MAAM,CAAC,CAAC,CAAC;AAAA,IACzD,SAAS,cAAc,QAAQ,MAAM,cAAc,MAAM,CAAC,CAAC,CAAC;AAAA,IAC5D,OAAO,YAAY,QAAQ,MAAM,YAAY,MAAM,CAAC,CAAC,CAAC;AAAA,IACtD,SAAS,cAAc,QAAQ,MAAM,cAAc,MAAM,CAAC,CAAC,CAAC;AAAA,IAC5D,QAAQ,aAAa,QAAQ,MAAM,aAAa,MAAM,CAAC,CAAC,CAAC;AAAA,IACzD,MAAM,WAAW,QAAQ,MAAM,WAAW,MAAM,CAAC,CAAC,CAAC;AAAA,EACrD,CAAC;AACH;;;AC5FA,OAAO,QAAQ;AACf,OAAO,UAAU;AASjB,IAAM,sBAAsB;AAErB,IAAM,aAA0B;AAAA,EACrC;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,SAAS,CAAC,KAAK,aAAa;AAE1B,YAAM,UAAU,IAAI;AACpB,YAAM,SAAS,SAAS;AACxB,UAAI,UAAU,cAAc,UAAU,EAAE,YAAY,SAAS;AAC3D,eAAO,SAAS,OAAO;AACvB,eAAO,OAAO;AAAA,MAChB;AAGA,YAAM,cAAc,KAAK,KAAK,UAAU,UAAU;AAClD,YAAM,YAAY,KAAK,KAAK,UAAU,QAAQ;AAE9C,UAAI,CAAC,GAAG,WAAW,WAAW,EAAG;AAEjC,UAAI,GAAG,WAAW,SAAS,GAAG;AAE5B,cAAM,gBAAgB,CAAC,QAAgB,YAA0B;AAC/D,qBAAW,SAAS,GAAG,YAAY,QAAQ,EAAE,eAAe,KAAK,CAAC,GAAG;AACnE,kBAAM,UAAU,KAAK,KAAK,QAAQ,MAAM,IAAI;AAC5C,kBAAM,WAAW,KAAK,KAAK,SAAS,MAAM,IAAI;AAC9C,gBAAI,MAAM,YAAY,GAAG;AACvB,kBAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,IAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AACxE,4BAAc,SAAS,QAAQ;AAAA,YACjC,WAAW,CAAC,GAAG,WAAW,QAAQ,GAAG;AACnC,iBAAG,WAAW,SAAS,QAAQ;AAAA,YACjC;AAAA,UACF;AAAA,QACF;AACA,sBAAc,aAAa,SAAS;AACpC,WAAG,OAAO,aAAa,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,MACzD,OAAO;AACL,WAAG,WAAW,aAAa,SAAS;AAAA,MACtC;AAGA,YAAM,aAAa,CAAC,QAAsB;AACxC,mBAAW,SAAS,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC,GAAG;AAChE,gBAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAI,MAAM,YAAY,GAAG;AAAE,uBAAW,QAAQ;AAAG;AAAA,UAAU;AAC3D,cAAI,CAAC,MAAM,KAAK,SAAS,KAAK,EAAG;AACjC,gBAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,8BAAoB,YAAY;AAChC,cAAI,oBAAoB,KAAK,OAAO,GAAG;AACrC,gCAAoB,YAAY;AAChC,eAAG,cAAc,UAAU,QAAQ,QAAQ,qBAAqB,aAAa,CAAC;AAAA,UAChF;AAAA,QACF;AAAA,MACF;AACA,iBAAW,SAAS;AAGpB,YAAM,YAAY,CAAC,QAAsB;AACvC,mBAAW,SAAS,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC,GAAG;AAChE,gBAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAI,MAAM,YAAY,GAAG;AAAE,sBAAU,QAAQ;AAAG;AAAA,UAAU;AAC1D,cAAI,CAAC,MAAM,KAAK,SAAS,KAAK,EAAG;AACjC,gBAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,cAAI,QAAQ,SAAS,WAAW,GAAG;AACjC,eAAG,cAAc,UAAU,QAAQ,QAAQ,eAAe,SAAS,CAAC;AAAA,UACtE;AAAA,QACF;AAAA,MACF;AACA,gBAAU,QAAQ;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,SAAS,CAAC,QAAQ;AAChB,YAAM,SAAS,IAAI;AACnB,UAAI,CAAC,OAAQ;AAEb,YAAM,gBAAgB,OAAO;AAC7B,UAAI,OAAO,kBAAkB,WAAW;AACtC,eAAO,gBAAgB,EAAE,SAAS,eAAe,YAAY,KAAK;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AACF;AAGO,IAAM,4BAA4B,WAAW,WAAW,SAAS,CAAC,GAAG,WAAW;AAMhF,SAAS,cACd,KACA,UACA,KACS;AACT,QAAM,iBAAkB,IAAI,kBAA6B;AACzD,MAAI,MAAM;AAEV,aAAW,aAAa,YAAY;AAClC,QAAI,UAAU,WAAW,eAAgB;AAEzC,cAAU,QAAQ,KAAK,QAAQ;AAC/B,QAAI,iBAAiB,UAAU;AAC/B,UAAM;AAAA,EACR;AAEA,MAAI,KAAK;AACP,UAAM,OAAO;AACb,UAAM,KAAM,IAAI,kBAA6B;AAC7C,UAAM,yBAAyB,IAAI,QAAQ,EAAE,EAAE;AAAA,EACjD;AAEA,SAAO;AACT;;;AFtIO,IAAM,kBAAkB;AAExB,SAAS,WAAW,UAA8B;AACvD,QAAM,aAAaC,MAAK,KAAK,UAAU,eAAe;AAEtD,MAAI,CAACC,IAAG,WAAW,UAAU,GAAG;AAC9B,UAAM,IAAI,MAAM,0BAA0B,QAAQ,EAAE;AAAA,EACtD;AAEA,QAAM,MAAMA,IAAG,aAAa,YAAY,OAAO;AAC/C,QAAM,SAAS,YAAAC,QAAK,MAAM,GAAG;AAG7B,MAAI,OAAO,YAAY,KAAM,OAAO,cAA0C,SAAS;AACrF,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAGA,MAAI,aAAa;AACjB,MAAI,OAAO,YAAY,GAAG;AAExB,UAAM,QAAQ,OAAO;AACrB,UAAM,kBAAkB,OAAO;AAC/B,QAAI,mBAAmB,CAAC,OAAO,WAAW;AAExC,UAAI,gBAAgB,aAAa,aAAa;AAC5C,wBAAgB,WAAW;AAAA,MAC7B;AACA,aAAO,YAAY;AAAA,IACrB;AAGA,UAAM,SAAS,OAAO;AACtB,QAAI,QAAQ;AACV,YAAM,EAAE,MAAM,UAAU,IAAI;AAC5B,aAAO,SAAS,EAAE,MAAM,QAAQ,MAAM,WAAW,aAAa,OAAO;AAAA,IACvE;AAGA,UAAM,UAAU,OAAO;AACvB,QAAI,SAAS;AACX,YAAM,EAAE,kBAAkB,gBAAgB,WAAW,qBAAqB,kBAAkB,IAAI;AAChG,aAAO,UAAU;AAAA,QACf;AAAA,QACA,WAAW,aAAa;AAAA,QACxB;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AAGd,WAAO,UAAU;AACjB,iBAAa;AAEb,YAAQ,OAAO,MAAM,kDAAkD;AAAA,EACzE;AAGA,QAAM,gBAAgB,cAAc,QAAQ,UAAU,CAAC,QAAQ;AAC7D,YAAQ,OAAO,MAAM,oBAAoB,GAAG;AAAA,CAAI;AAAA,EAClD,CAAC;AAGD,QAAM,SAAS,iBAAiB,MAAM,MAAM;AAG5C,QAAM,aAAa,cACd,kBACC,OAAO,kBAA4B,KAAK,6BACzC,OAAO,YAAY,OAAO;AAE/B,MAAI,YAAY;AACd,UAAM,aAAa,KAAK,MAAM,KAAK,UAAU,MAAM,CAAC;AACpD,IAAAD,IAAG,cAAc,YAAY,YAAAC,QAAK,UAAU,UAAU,GAAG,OAAO;AAAA,EAClE;AAEA,SAAO;AACT;AAEO,SAAS,WAAW,UAAkB,QAA0B;AAErE,QAAM,YAAY,iBAAiB,MAAM,MAAM;AAE/C,QAAM,aAAaF,MAAK,KAAK,UAAU,eAAe;AACtD,EAAAC,IAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAC1C,EAAAA,IAAG,cAAc,YAAY,YAAAC,QAAK,UAAU,SAAS,GAAG,OAAO;AACjE;AAEO,SAAS,aACd,UACA,IACY;AACZ,QAAM,UAAU,WAAW,QAAQ;AACnC,QAAM,UAAU,GAAG,OAAO;AAC1B,aAAW,UAAU,OAAO;AAC5B,SAAO;AACT;AAEO,SAAS,mBACd,UACA,QACY;AACZ,SAAO,aAAa,UAAU,CAAC,YAAY;AAAA,IACzC,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,OAAO,QAAQ,GAAG,OAAO;AAAA,EACxC,EAAE;AACJ;AAEO,SAAS,iBACd,UACA,MACY;AACZ,SAAO,aAAa,UAAU,CAAC,YAAY;AAAA,IACzC,GAAG;AAAA,IACH,MAAM,EAAE,GAAG,OAAO,MAAM,GAAG,KAAK;AAAA,EAClC,EAAE;AACJ;","names":["fs","path","path","fs","YAML"]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/constants/update.ts","../src/constants.ts"],"sourcesContent":["import path from 'node:path';\nimport os from 'node:os';\n\n/** npm registry URL for the Myco package. */\nexport const NPM_REGISTRY_URL = 'https://registry.npmjs.org/@goondocks/myco';\n\n/** Global Myco directory for machine-wide state. */\nexport const MYCO_GLOBAL_DIR = path.join(os.homedir(), '.myco');\n\n/** Path to the cached update check result. */\nexport const UPDATE_CHECK_CACHE_PATH = path.join(MYCO_GLOBAL_DIR, 'last-update-check.json');\n\n/** Path to the update configuration file (channel, interval). */\nexport const UPDATE_CONFIG_PATH = path.join(MYCO_GLOBAL_DIR, 'update.yaml');\n\n/** Path to the update error file (written by update script on failure). */\nexport const UPDATE_ERROR_PATH = path.join(MYCO_GLOBAL_DIR, 'update-error.json');\n\n/** Default check interval in hours. */\nexport const UPDATE_CHECK_INTERVAL_HOURS = 6;\n\n/** Milliseconds per hour. */\nexport const MS_PER_HOUR = 3_600_000;\n\n/** npm package name. */\nexport const NPM_PACKAGE_NAME = '@goondocks/myco';\n\n/** Delay in seconds before update script starts (allows daemon to exit). */\nexport const UPDATE_SCRIPT_DELAY_SECONDS = 2;\n\n/** Valid release channels. */\nexport const RELEASE_CHANNELS = ['stable', 'beta'] as const;\nexport type ReleaseChannel = (typeof RELEASE_CHANNELS)[number];\n\n/** Default release channel. */\nexport const DEFAULT_RELEASE_CHANNEL: ReleaseChannel = 'stable';\n","/**\n * Shared constants for the Myco codebase.\n * Per CLAUDE.md: \"No Magic Literals — Numeric and string constants\n * MUST NOT appear inline in logic.\"\n */\n\nexport { LOG_KINDS, type LogKind, kindToComponent } from './constants/log-kinds.js';\n\n// --- Agent phase prompt composition ---\n/**\n * Maximum chars per phase summary passed to subsequent phases.\n * Set to 4000 to ensure the digest-assess phase findings pass\n * untruncated to parallel tier phases.\n */\nexport const PHASE_SUMMARY_MAX_CHARS = 4000;\n\n// --- Token estimation ---\n/** Approximate characters per token for the chars/4 heuristic. */\nexport const CHARS_PER_TOKEN = 4;\n\n/** Estimate token count from character length using the CHARS_PER_TOKEN heuristic. */\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / CHARS_PER_TOKEN);\n}\n\n// --- Time (primitives — must precede derived constants) ---\n/** Milliseconds per second. */\nexport const MS_PER_SECOND = 1000;\n\n// --- Embedding ---\n/** Max characters of text sent to the embedding model. */\nexport const EMBEDDING_INPUT_LIMIT = 8000;\n\n/** Max rows per embedding worker cycle. */\nexport const EMBEDDING_BATCH_SIZE = 10;\n\n/** Content hash algorithm for staleness detection. */\nexport const CONTENT_HASH_ALGORITHM = 'sha256';\n\n// --- Truncation limits (display/preview) ---\n/** Max chars for a user prompt preview in event summaries. */\nexport const PROMPT_PREVIEW_CHARS = 300;\n/** Max chars for an AI response preview in event summaries. */\nexport const AI_RESPONSE_PREVIEW_CHARS = 500;\n/** Max chars for a command string preview. */\nexport const COMMAND_PREVIEW_CHARS = 80;\n/** Max chars for a content snippet in search results. */\nexport const CONTENT_SNIPPET_CHARS = 120;\n/** Max chars for a tool output preview in hooks. */\nexport const TOOL_OUTPUT_PREVIEW_CHARS = 200;\n/** Max chars for a session summary preview in MCP tools. */\nexport const SESSION_SUMMARY_PREVIEW_CHARS = 300;\n/** Max chars for a recall summary preview. */\nexport const RECALL_SUMMARY_PREVIEW_CHARS = 200;\n/** Max chars for search result and hydrated context previews. */\nexport const SEARCH_PREVIEW_CHARS = 300;\n\n// --- Log preview limits (short previews for structured log fields) ---\n/** Max chars for a user prompt preview in log entries. */\nexport const LOG_PROMPT_PREVIEW_CHARS = 50;\n/** Max chars for an assistant message preview in log entries. */\nexport const LOG_MESSAGE_PREVIEW_CHARS = 80;\n/** Max chars for injected context preview in log entries. */\nexport const LOG_CONTEXT_PREVIEW_CHARS = 200;\n\n// --- Context injection layer budgets (chars, not tokens — used with .slice()) ---\nexport const CONTEXT_SESSION_PREVIEW_CHARS = 80;\nexport const CONTEXT_SPORE_PREVIEW_CHARS = 80;\n\n// --- Processor maxTokens budgets ---\n/** Response token budget for observation extraction. */\nexport const EXTRACTION_MAX_TOKENS = 2048;\n/** Response token budget for session summary. */\nexport const SUMMARY_MAX_TOKENS = 512;\n/** Response token budget for session title generation. */\nexport const TITLE_MAX_TOKENS = 32;\n\n// --- Timeouts ---\n/** Daemon client HTTP request timeout (ms). */\nexport const DAEMON_CLIENT_TIMEOUT_MS = 2000;\n/** Health check timeout (ms) — fail fast if daemon isn't responding. */\nexport const DAEMON_HEALTH_CHECK_TIMEOUT_MS = 500;\n/** LLM request timeout (ms). All LLM calls are background daemon work — no need to be aggressive. */\nexport const LLM_REQUEST_TIMEOUT_MS = 180_000;\n/** Embedding request timeout (ms). Embeddings run in background batch processing — generous timeout. */\nexport const EMBEDDING_REQUEST_TIMEOUT_MS = 60_000;\n/** Digest LLM request timeout (ms). Digest cycles use large context windows and may need model loading time. */\nexport const DIGEST_LLM_REQUEST_TIMEOUT_MS = 600_000;\n/** Stdin read timeout for hooks (ms). */\nexport const STDIN_TIMEOUT_MS = 100;\n/** Provider detection timeout for detect-providers CLI command (ms). */\nexport const PROVIDER_DETECT_TIMEOUT_MS = 3000;\n\n// --- Time ---\n/** Milliseconds in one day. */\nexport const MS_PER_DAY = 24 * 60 * 60 * 1000;\n\n/** Current Unix epoch in seconds. */\nexport function epochSeconds(): number {\n return Math.floor(Date.now() / MS_PER_SECOND);\n}\n\n// --- Buffer cleanup ---\n/** Max age for stale buffer files before cleanup (ms). */\nexport const STALE_BUFFER_MAX_AGE_MS = 1 * MS_PER_DAY;\n\n// --- Retry backoff ---\n/** Retry delays for daemon health check (ms). */\nexport const DAEMON_HEALTH_RETRY_DELAYS = [100, 200, 400, 800, 1500];\n\n/** Grace period after daemon.json is written before stale checks can trigger a restart (ms).\n * Prevents rapid restart loops from concurrent hooks or session reloads. */\nexport const DAEMON_STALE_GRACE_PERIOD_MS = 60_000;\n\n/** Grace period for SIGTERM before escalating to SIGKILL (ms).\n * Gives the old daemon a chance to shut down cleanly, but force-kills\n * to guarantee the configured port is reclaimed. */\nexport const DAEMON_EVICT_TIMEOUT_MS = 3000;\n/** Poll interval when waiting for an evicted daemon to die (ms). */\nexport const DAEMON_EVICT_POLL_MS = 100;\n\n// --- Slug limits ---\n/** Max length for slugified artifact IDs. */\n\n// --- Turn rendering ---\n/** Max file paths displayed per turn in session notes. */\nexport const TURN_MAX_FILES_DISPLAYED = 10;\n\n// --- Transcript mining ---\n/** Minimum content length to consider a transcript entry meaningful. */\nexport const MIN_TRANSCRIPT_CONTENT_LENGTH = 10;\n\n// --- Graph edge types (lineage — auto-created by daemon) ---\n/** Spore was extracted during this session. */\nexport const EDGE_TYPE_FROM_SESSION = 'FROM_SESSION';\n/** Spore was extracted from this prompt batch. */\nexport const EDGE_TYPE_EXTRACTED_FROM = 'EXTRACTED_FROM';\n/** Wisdom spore was derived from (consolidated) this source spore. */\nexport const EDGE_TYPE_DERIVED_FROM = 'DERIVED_FROM';\n/** Session contains this prompt batch. */\nexport const EDGE_TYPE_HAS_BATCH = 'HAS_BATCH';\n// --- Query defaults ---\n/** Default row limit for query module list operations. */\nexport const QUERY_DEFAULT_LIST_LIMIT = 100;\n/** Default confidence score for graph edges. */\nexport const GRAPH_EDGE_DEFAULT_CONFIDENCE = 1.0;\n\n// --- Query limits ---\n/** Max recent sessions to check for lineage heuristics. */\nexport const LINEAGE_RECENT_SESSIONS_LIMIT = 5;\n/** Max related spores to query for session notes. */\nexport const RELATED_SPORES_LIMIT = 50;\n\n// --- Context injection ---\n/** Max spores to inject per prompt. */\nexport const PROMPT_CONTEXT_MAX_SPORES = 3;\n/** Minimum similarity score for prompt context injection (0-1). */\nexport const PROMPT_CONTEXT_MIN_SIMILARITY = 0.3;\n/** Max token budget for session-start context injection. */\nexport const SESSION_CONTEXT_MAX_TOKENS = 500;\n/** Max token budget for per-prompt context injection. */\nexport const PROMPT_CONTEXT_MAX_TOKENS = 300;\n/** Minimum prompt length to trigger context search. */\nexport const PROMPT_CONTEXT_MIN_LENGTH = 10;\n\n/** Over-fetch multiplier for vector search to compensate for post-filtering. */\nexport const PROMPT_VECTOR_OVER_FETCH = 2;\n\n// --- Spore status filtering ---\n/** Spore statuses excluded from search results and context injection. */\nexport const EXCLUDED_SPORE_STATUSES = new Set(['superseded', 'archived']);\n\n// --- Agent identity ---\n/** Default agent ID for the built-in intelligence agent. */\nexport const DEFAULT_AGENT_ID = 'myco-agent';\n/** Agent ID for user-initiated MCP operations. */\nexport const USER_AGENT_ID = 'user';\n/** Agent name for user-initiated MCP operations. */\nexport const USER_AGENT_NAME = 'User (MCP)';\n\n// --- MCP tool defaults ---\n/** Default result limit for myco_search. */\nexport const MCP_SEARCH_DEFAULT_LIMIT = 10;\n/** Default result limit for myco_sessions. */\nexport const MCP_SESSIONS_DEFAULT_LIMIT = 20;\n/** Default result limit for myco_logs. */\nexport const MCP_LOGS_DEFAULT_LIMIT = 50;\n\n// --- Feed ---\n/** Default number of entries returned by the activity feed. */\nexport const FEED_DEFAULT_LIMIT = 50;\n\n// --- Digest — Tiers ---\n/** Available token-budget tiers for digest synthesis. */\nexport const DIGEST_TIERS = [1500, 5000, 10000] as const;\nexport type DigestTier = (typeof DIGEST_TIERS)[number];\n\n// --- Digest — Context window minimums per tier ---\n/** Minimum context window (tokens) required to run a digest at a given tier. */\nexport const DIGEST_TIER_MIN_CONTEXT: Record<number, number> = {\n 1500: 6500,\n 5000: 18500,\n 10000: 30500,\n};\n\n// --- Digest — Substrate ---\n/** Default minimum substrate notes required before a digest cycle runs. */\nexport const DIGEST_MIN_NOTES_FOR_CYCLE = 10;\n\n/** Scoring weights by note type when selecting substrate for synthesis. */\nexport const DIGEST_SUBSTRATE_TYPE_WEIGHTS: Record<string, number> = {\n session: 3,\n spore: 3,\n plan: 2,\n artifact: 1,\n team: 1,\n};\n\n// --- LLM reasoning control ---\n/** Reasoning mode for all Myco LLM calls. Suppresses chain-of-thought tokens from reasoning models. */\nexport const LLM_REASONING_MODE = 'off' as const;\n\n// --- Digest — System prompt overhead estimate ---\n\n// --- Vault intelligence ---\n/** Max candidate spores after post-filtering for supersession check. */\nexport const SUPERSESSION_CANDIDATE_LIMIT = 5;\n\n/** Over-fetch from vector index before post-filtering by status/type. */\nexport const SUPERSESSION_VECTOR_FETCH_LIMIT = 20;\n\n/** Max output tokens for supersession LLM evaluation. */\nexport const SUPERSESSION_MAX_TOKENS = 256;\n\n/** Similarity threshold for clustering related spores in batch agent processing. */\nexport const AGENT_CLUSTER_SIMILARITY = 0.75;\n\n// --- Search ---\n/** Default number of results returned by vector search and fullTextSearch. */\nexport const SEARCH_RESULTS_DEFAULT_LIMIT = 20;\n/** Minimum cosine similarity score for semantic search results (0-1). */\nexport const SEARCH_SIMILARITY_THRESHOLD = 0.3;\n\n// --- Pipeline processing ---\n/** Default page size for pipeline items API listing. */\nexport const PIPELINE_ITEMS_DEFAULT_LIMIT = 50;\n\n// --- Pipeline retry ---\n/** Max retries for parse (structural) pipeline failures — fail fast. */\nexport const PIPELINE_PARSE_MAX_RETRIES = 1;\n/** Exponential backoff multiplier for successive pipeline retries. */\nexport const PIPELINE_BACKOFF_MULTIPLIER = 4;\n\n// --- Pipeline stages (ordered) ---\nexport const PIPELINE_STAGES = ['capture', 'extraction', 'embedding', 'consolidation', 'digest'] as const;\nexport type PipelineStage = typeof PIPELINE_STAGES[number];\n\n// --- Pipeline statuses ---\nexport const PIPELINE_STATUSES = ['pending', 'processing', 'succeeded', 'failed', 'blocked', 'skipped', 'poisoned'] as const;\nexport type PipelineStatus = typeof PIPELINE_STATUSES[number];\n\n// --- Provider roles for circuit breakers ---\nexport const PIPELINE_PROVIDER_ROLES = ['llm', 'embedding', 'digest-llm'] as const;\nexport type PipelineProviderRole = typeof PIPELINE_PROVIDER_ROLES[number];\n\n// --- Stage to provider role mapping ---\nexport const STAGE_PROVIDER_MAP: Record<PipelineStage, PipelineProviderRole | null> = {\n capture: null,\n extraction: 'llm',\n embedding: 'embedding',\n consolidation: 'digest-llm',\n digest: 'digest-llm',\n};\n\n/**\n * Stages processed by the pipeline tick timer.\n * Capture is handled at registration time, digest is gated by the metabolism timer.\n */\nexport const PIPELINE_TICK_STAGES: PipelineStage[] = ['extraction', 'embedding', 'consolidation'];\n\n// --- Item type to applicable stages ---\n// Sessions skip consolidation — consolidation applies to the spores\n// extracted FROM sessions, not the session work item itself.\n// Lineage detection stays outside the pipeline (fire-and-forget, non-critical).\nexport const ITEM_STAGE_MAP: Record<string, PipelineStage[]> = {\n session: ['capture', 'extraction', 'embedding', 'digest'],\n spore: ['capture', 'embedding', 'consolidation', 'digest'],\n artifact: ['capture', 'embedding', 'digest'],\n};\n\n// --- User task registry ---\n/** Subdirectory within the vault for user-created task YAML files. */\nexport const USER_TASKS_DIR = 'tasks';\n\n/** Source label for user-created tasks. */\nexport const USER_TASK_SOURCE = 'user';\n\n/** Source label for built-in tasks shipped with the package. */\nexport const BUILT_IN_SOURCE = 'built-in';\n\n/** Task name validation pattern (lowercase, hyphens, digits). */\nexport const TASK_NAME_PATTERN = /^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/;\n\n/** Maximum length for task names. */\nexport const MAX_TASK_NAME_LENGTH = 50;\n\n// --- Automatic consolidation ---\n/** Minimum cluster size required before asking LLM to consolidate. */\nexport const CONSOLIDATION_MIN_CLUSTER_SIZE = 3;\n\n/** Over-fetch from vector index before post-filtering by status/type. */\nexport const CONSOLIDATION_VECTOR_FETCH_LIMIT = 20;\n\n/** Max output tokens for consolidation LLM synthesis.\n * Must be large enough for the full JSON response including content field. */\nexport const CONSOLIDATION_MAX_TOKENS = 2048;\n\n// --- Power management ---\n/** Time without activity before transitioning to idle (ms). */\nexport const POWER_IDLE_THRESHOLD_MS = 5 * 60 * MS_PER_SECOND;\n/** Time without activity before transitioning to sleep (ms). */\nexport const POWER_SLEEP_THRESHOLD_MS = 30 * 60 * MS_PER_SECOND;\n/** Time without activity before transitioning to deep sleep (ms). */\nexport const POWER_DEEP_SLEEP_THRESHOLD_MS = 90 * 60 * MS_PER_SECOND;\n/** Job cycle interval during active/idle states (ms). */\nexport const POWER_ACTIVE_INTERVAL_MS = 60 * MS_PER_SECOND;\n/** Job cycle interval during sleep state (ms). */\nexport const POWER_SLEEP_INTERVAL_MS = 5 * 60 * MS_PER_SECOND;\n\n// --- Session maintenance ---\n/** Time without new prompts before an active session is auto-completed (ms). */\nexport const STALE_SESSION_THRESHOLD_MS = 60 * 60 * MS_PER_SECOND;\n/** Max prompt count for a session to be considered dead and auto-deleted. */\nexport const DEAD_SESSION_MAX_PROMPTS = 1;\n\n// --- Init wizard ---\n/** Minimum Node.js major version required by Myco. */\nexport const MIN_NODE_MAJOR_VERSION = 22;\n\n/** Recommended context window for local intelligence models. */\nexport const RECOMMENDED_LOCAL_CONTEXT_WINDOW = 8192;\n\n/** Default Ollama embedding model recommended during init. */\nexport const DEFAULT_OLLAMA_EMBEDDING_MODEL = 'bge-m3';\n\n/** Default OpenAI embedding model recommended during init. */\nexport const DEFAULT_OPENAI_EMBEDDING_MODEL = 'text-embedding-3-small';\n\n// --- Sync protocol ---\n/** Protocol version for backup and team sync wire format. */\nexport const SYNC_PROTOCOL_VERSION = 1;\n\n// --- Team sync ---\n/** Default machine ID for rows created before multi-machine support. */\nexport const DEFAULT_MACHINE_ID = 'local';\n/** Prefix for team search result source attribution. */\nexport const TEAM_SOURCE_PREFIX = 'team:';\n/** Timeout for team search requests (ms). */\nexport const TEAM_SEARCH_TIMEOUT_MS = 3000;\n/** Timeout for team health check requests (ms). */\nexport const TEAM_HEALTH_TIMEOUT_MS = 5000;\n/** Secrets key for the team API key in secrets.env. */\nexport const TEAM_API_KEY_SECRET = 'MYCO_TEAM_API_KEY';\n/** Timeout for wrangler CLI commands (ms). */\nexport const WRANGLER_COMMAND_TIMEOUT_MS = 60_000;\n\n// --- HTTP response flush ---\n/** Delay before initiating shutdown — allows the HTTP response to flush. */\nexport const RESTART_RESPONSE_FLUSH_MS = 500;\n\n// --- Self-update ---\nexport {\n NPM_REGISTRY_URL,\n MYCO_GLOBAL_DIR,\n UPDATE_CHECK_CACHE_PATH,\n UPDATE_CONFIG_PATH,\n UPDATE_ERROR_PATH,\n UPDATE_CHECK_INTERVAL_HOURS,\n MS_PER_HOUR,\n NPM_PACKAGE_NAME,\n UPDATE_SCRIPT_DELAY_SECONDS,\n RELEASE_CHANNELS,\n DEFAULT_RELEASE_CHANNEL,\n type ReleaseChannel,\n} from './constants/update.js';\n"],"mappings":";;;AAAA,OAAO,UAAU;AACjB,OAAO,QAAQ;AAGR,IAAM,mBAAmB;AAGzB,IAAM,kBAAkB,KAAK,KAAK,GAAG,QAAQ,GAAG,OAAO;AAGvD,IAAM,0BAA0B,KAAK,KAAK,iBAAiB,wBAAwB;AAGnF,IAAM,qBAAqB,KAAK,KAAK,iBAAiB,aAAa;AAGnE,IAAM,oBAAoB,KAAK,KAAK,iBAAiB,mBAAmB;AAGxE,IAAM,8BAA8B;AAGpC,IAAM,cAAc;AAGpB,IAAM,mBAAmB;AAGzB,IAAM,8BAA8B;AAGpC,IAAM,mBAAmB,CAAC,UAAU,MAAM;AAI1C,IAAM,0BAA0C;;;ACrBhD,IAAM,0BAA0B;AAIhC,IAAM,kBAAkB;AAGxB,SAAS,eAAe,MAAsB;AACnD,SAAO,KAAK,KAAK,KAAK,SAAS,eAAe;AAChD;AAIO,IAAM,gBAAgB;AAOtB,IAAM,uBAAuB;AAG7B,IAAM,yBAAyB;AAI/B,IAAM,uBAAuB;AAM7B,IAAM,wBAAwB;AAE9B,IAAM,4BAA4B;AAMlC,IAAM,uBAAuB;AAI7B,IAAM,2BAA2B;AAEjC,IAAM,4BAA4B;AAElC,IAAM,4BAA4B;AAGlC,IAAM,gCAAgC;AACtC,IAAM,8BAA8B;AAYpC,IAAM,2BAA2B;AAEjC,IAAM,iCAAiC;AAEvC,IAAM,yBAAyB;AAE/B,IAAM,+BAA+B;AAIrC,IAAM,mBAAmB;AAEzB,IAAM,6BAA6B;AAInC,IAAM,aAAa,KAAK,KAAK,KAAK;AAGlC,SAAS,eAAuB;AACrC,SAAO,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa;AAC9C;AAIO,IAAM,0BAA0B,IAAI;AAIpC,IAAM,6BAA6B,CAAC,KAAK,KAAK,KAAK,KAAK,IAAI;AAI5D,IAAM,+BAA+B;AAKrC,IAAM,0BAA0B;AAEhC,IAAM,uBAAuB;AAe7B,IAAM,yBAAyB;AAE/B,IAAM,2BAA2B;AAEjC,IAAM,yBAAyB;AAE/B,IAAM,sBAAsB;AAG5B,IAAM,2BAA2B;AAEjC,IAAM,gCAAgC;AAYtC,IAAM,gCAAgC;AAItC,IAAM,4BAA4B;AAElC,IAAM,4BAA4B;AAGlC,IAAM,2BAA2B;AAIjC,IAAM,0BAA0B,oBAAI,IAAI,CAAC,cAAc,UAAU,CAAC;AAIlE,IAAM,mBAAmB;AAEzB,IAAM,gBAAgB;AAEtB,IAAM,kBAAkB;AAIxB,IAAM,2BAA2B;AAEjC,IAAM,6BAA6B;AAMnC,IAAM,qBAAqB;AAI3B,IAAM,eAAe,CAAC,MAAM,KAAM,GAAK;AA6CvC,IAAM,+BAA+B;AAErC,IAAM,8BAA8B;AAmDpC,IAAM,iBAAiB;AAGvB,IAAM,mBAAmB;AAGzB,IAAM,kBAAkB;AAGxB,IAAM,oBAAoB;AAG1B,IAAM,uBAAuB;AAe7B,IAAM,0BAA0B,IAAI,KAAK;AAEzC,IAAM,2BAA2B,KAAK,KAAK;AAE3C,IAAM,gCAAgC,KAAK,KAAK;AAEhD,IAAM,2BAA2B,KAAK;AAEtC,IAAM,0BAA0B,IAAI,KAAK;AAIzC,IAAM,6BAA6B,KAAK,KAAK;AAE7C,IAAM,2BAA2B;AAOjC,IAAM,mCAAmC;AAGzC,IAAM,iCAAiC;AAGvC,IAAM,iCAAiC;AAIvC,IAAM,wBAAwB;AAI9B,IAAM,qBAAqB;AAE3B,IAAM,qBAAqB;AAE3B,IAAM,yBAAyB;AAE/B,IAAM,yBAAyB;AAE/B,IAAM,sBAAsB;AAE5B,IAAM,8BAA8B;AAIpC,IAAM,4BAA4B;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db/schema.ts"],"sourcesContent":["/**\n * SQLite database schema -- all capture, intelligence, and agent state tables.\n *\n * Uses `CREATE TABLE IF NOT EXISTS` and `CREATE INDEX IF NOT EXISTS` throughout\n * for idempotency. Running `createSchema()` multiple times is always safe.\n *\n * Timestamp convention: all timestamps are INTEGER (Unix epoch seconds).\n * Content hashing: all `content_hash` columns are TEXT with UNIQUE constraint.\n * Embedding dimensions: 1024 (bge-m3 default) -- used by external sqlite-vec store.\n *\n * Vector columns live in a separate sqlite-vec virtual table, not inline.\n * Tables that participate in vector search carry an `embedded INTEGER DEFAULT 0`\n * flag so the embedder knows which rows still need vectors.\n */\n\nimport type { Database } from 'better-sqlite3';\nimport { epochSeconds, DEFAULT_MACHINE_ID } from '@myco/constants.js';\n\n/** Current schema version -- fresh start for the SQLite era. */\nexport const SCHEMA_VERSION = 4;\n\n// Re-export for backwards compat (other modules import from schema.ts)\nexport { DEFAULT_MACHINE_ID };\n\n/** Embedding vector dimensions (bge-m3 default). */\nexport const EMBEDDING_DIMENSIONS = 1024;\n\n// ---------------------------------------------------------------------------\n// DDL statements\n// ---------------------------------------------------------------------------\n\nconst SCHEMA_VERSION_TABLE = `\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at INTEGER NOT NULL\n )`;\n\n// -- Capture Layer ----------------------------------------------------------\n\nconst SESSIONS_TABLE = `\n CREATE TABLE IF NOT EXISTS sessions (\n id TEXT PRIMARY KEY,\n agent TEXT NOT NULL,\n \"user\" TEXT,\n project_root TEXT,\n branch TEXT,\n started_at INTEGER NOT NULL,\n ended_at INTEGER,\n status TEXT DEFAULT 'active',\n prompt_count INTEGER DEFAULT 0,\n tool_count INTEGER DEFAULT 0,\n title TEXT,\n summary TEXT,\n transcript_path TEXT,\n parent_session_id TEXT,\n parent_session_reason TEXT,\n processed INTEGER DEFAULT 0,\n content_hash TEXT UNIQUE,\n created_at INTEGER NOT NULL,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst PROMPT_BATCHES_TABLE = `\n CREATE TABLE IF NOT EXISTS prompt_batches (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL REFERENCES sessions(id),\n prompt_number INTEGER,\n user_prompt TEXT,\n response_summary TEXT,\n classification TEXT,\n started_at INTEGER,\n ended_at INTEGER,\n status TEXT DEFAULT 'active',\n activity_count INTEGER DEFAULT 0,\n processed INTEGER DEFAULT 0,\n content_hash TEXT UNIQUE,\n created_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ACTIVITIES_TABLE = `\n CREATE TABLE IF NOT EXISTS activities (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n tool_name TEXT NOT NULL,\n tool_input TEXT,\n tool_output_summary TEXT,\n file_path TEXT,\n files_affected TEXT,\n duration_ms INTEGER,\n success INTEGER DEFAULT 1,\n error_message TEXT,\n timestamp INTEGER NOT NULL,\n processed INTEGER DEFAULT 0,\n content_hash TEXT UNIQUE,\n created_at INTEGER NOT NULL\n )`;\n\nconst PLANS_TABLE = `\n CREATE TABLE IF NOT EXISTS plans (\n id TEXT PRIMARY KEY,\n status TEXT DEFAULT 'active',\n author TEXT,\n title TEXT,\n content TEXT,\n source_path TEXT,\n tags TEXT,\n session_id TEXT REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n content_hash TEXT,\n processed INTEGER DEFAULT 0,\n created_at INTEGER NOT NULL,\n updated_at INTEGER,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ARTIFACTS_TABLE = `\n CREATE TABLE IF NOT EXISTS artifacts (\n id TEXT PRIMARY KEY,\n artifact_type TEXT,\n source_path TEXT NOT NULL,\n title TEXT NOT NULL,\n content TEXT,\n last_captured_by TEXT,\n tags TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst TEAM_MEMBERS_TABLE = `\n CREATE TABLE IF NOT EXISTS team_members (\n id TEXT PRIMARY KEY,\n \"user\" TEXT NOT NULL,\n role TEXT,\n joined TEXT,\n tags TEXT,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ATTACHMENTS_TABLE = `\n CREATE TABLE IF NOT EXISTS attachments (\n id TEXT PRIMARY KEY,\n session_id TEXT REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n file_path TEXT NOT NULL,\n media_type TEXT,\n description TEXT,\n data BLOB,\n content_hash TEXT,\n created_at INTEGER NOT NULL\n )`;\n\n// -- Intelligence Layer -----------------------------------------------------\n\nconst AGENTS_TABLE = `\n CREATE TABLE IF NOT EXISTS agents (\n id TEXT PRIMARY KEY,\n name TEXT NOT NULL,\n provider TEXT,\n model TEXT,\n system_prompt_hash TEXT,\n config TEXT,\n source TEXT NOT NULL DEFAULT 'built-in',\n system_prompt TEXT,\n max_turns INTEGER,\n timeout_seconds INTEGER,\n tool_access TEXT,\n enabled INTEGER NOT NULL DEFAULT 1,\n created_at INTEGER NOT NULL,\n updated_at INTEGER\n )`;\n\nconst SPORES_TABLE = `\n CREATE TABLE IF NOT EXISTS spores (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n session_id TEXT REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n observation_type TEXT NOT NULL,\n status TEXT DEFAULT 'active',\n content TEXT NOT NULL,\n context TEXT,\n importance INTEGER DEFAULT 5,\n file_path TEXT,\n tags TEXT,\n content_hash TEXT UNIQUE,\n properties TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ENTITIES_TABLE = `\n CREATE TABLE IF NOT EXISTS entities (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n properties TEXT,\n first_seen INTEGER NOT NULL,\n last_seen INTEGER NOT NULL,\n status TEXT DEFAULT 'active',\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER,\n UNIQUE (agent_id, type, name)\n )`;\n\nconst GRAPH_EDGES_TABLE = `\n CREATE TABLE IF NOT EXISTS graph_edges (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n source_id TEXT NOT NULL,\n source_type TEXT NOT NULL,\n target_id TEXT NOT NULL,\n target_type TEXT NOT NULL,\n type TEXT NOT NULL,\n session_id TEXT,\n confidence REAL DEFAULT 1.0,\n properties TEXT,\n created_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ENTITY_MENTIONS_TABLE = `\n CREATE TABLE IF NOT EXISTS entity_mentions (\n entity_id TEXT NOT NULL REFERENCES entities(id),\n note_id TEXT NOT NULL,\n note_type TEXT NOT NULL,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER,\n UNIQUE (entity_id, note_id, note_type, agent_id)\n )`;\n\nconst RESOLUTION_EVENTS_TABLE = `\n CREATE TABLE IF NOT EXISTS resolution_events (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n spore_id TEXT NOT NULL REFERENCES spores(id),\n action TEXT NOT NULL,\n new_spore_id TEXT,\n reason TEXT,\n session_id TEXT,\n created_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst DIGEST_EXTRACTS_TABLE = `\n CREATE TABLE IF NOT EXISTS digest_extracts (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n tier INTEGER NOT NULL,\n content TEXT NOT NULL,\n substrate_hash TEXT,\n generated_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER,\n UNIQUE (agent_id, tier)\n )`;\n\n// -- Agent State Layer ------------------------------------------------------\n\nconst AGENT_RUNS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_runs (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n task TEXT,\n instruction TEXT,\n status TEXT DEFAULT 'pending',\n started_at INTEGER,\n completed_at INTEGER,\n tokens_used INTEGER,\n cost_usd REAL,\n actions_taken TEXT,\n error TEXT\n )`;\n\nconst AGENT_REPORTS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_reports (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n run_id TEXT NOT NULL REFERENCES agent_runs(id),\n agent_id TEXT NOT NULL REFERENCES agents(id),\n action TEXT NOT NULL,\n summary TEXT NOT NULL,\n details TEXT,\n created_at INTEGER NOT NULL\n )`;\n\nconst AGENT_TURNS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_turns (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n run_id TEXT NOT NULL REFERENCES agent_runs(id),\n agent_id TEXT NOT NULL REFERENCES agents(id),\n turn_number INTEGER NOT NULL,\n tool_name TEXT NOT NULL,\n tool_input TEXT,\n tool_output_summary TEXT,\n started_at INTEGER,\n completed_at INTEGER\n )`;\n\nconst AGENT_TASKS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_tasks (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n source TEXT NOT NULL DEFAULT 'built-in',\n display_name TEXT,\n description TEXT,\n prompt TEXT NOT NULL,\n is_default INTEGER DEFAULT 0,\n tool_overrides TEXT,\n model TEXT,\n config TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER\n )`;\n\nconst AGENT_STATE_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_state (\n agent_id TEXT NOT NULL REFERENCES agents(id),\n key TEXT NOT NULL,\n value TEXT NOT NULL,\n updated_at INTEGER NOT NULL,\n PRIMARY KEY (agent_id, key)\n )`;\n\n// -- Sync Layer -------------------------------------------------------------\n\nconst TEAM_OUTBOX_TABLE = `\n CREATE TABLE IF NOT EXISTS team_outbox (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n table_name TEXT NOT NULL,\n row_id TEXT NOT NULL,\n operation TEXT NOT NULL DEFAULT 'upsert',\n payload TEXT NOT NULL,\n machine_id TEXT NOT NULL,\n created_at INTEGER NOT NULL,\n sent_at INTEGER\n )`;\n\n// -- Logging Layer ----------------------------------------------------------\n\nconst LOG_ENTRIES_TABLE = `\n CREATE TABLE IF NOT EXISTS log_entries (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n timestamp TEXT NOT NULL,\n level TEXT NOT NULL,\n component TEXT NOT NULL,\n kind TEXT NOT NULL,\n message TEXT NOT NULL,\n data TEXT,\n session_id TEXT\n )`;\n\n// -- FTS5 Virtual Tables ----------------------------------------------------\n\nconst FTS_TABLES = [\n `CREATE VIRTUAL TABLE IF NOT EXISTS prompt_batches_fts\n USING fts5(user_prompt, content='prompt_batches', content_rowid='id')`,\n\n `CREATE VIRTUAL TABLE IF NOT EXISTS activities_fts\n USING fts5(tool_name, tool_input, file_path, content='activities', content_rowid='id')`,\n\n `CREATE VIRTUAL TABLE IF NOT EXISTS log_entries_fts\n USING fts5(message, content='log_entries', content_rowid='id')`,\n\n // FTS5 sync triggers for log_entries (external-content table)\n `CREATE TRIGGER IF NOT EXISTS log_entries_ai AFTER INSERT ON log_entries BEGIN\n INSERT INTO log_entries_fts(rowid, message) VALUES (new.id, new.message);\n END`,\n\n `CREATE TRIGGER IF NOT EXISTS log_entries_ad AFTER DELETE ON log_entries BEGIN\n INSERT INTO log_entries_fts(log_entries_fts, rowid, message) VALUES('delete', old.id, old.message);\n END`,\n];\n\n// -- Indexes ----------------------------------------------------------------\n\nconst SECONDARY_INDEXES = [\n // Sessions\n 'CREATE INDEX IF NOT EXISTS idx_sessions_status ON sessions (status)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_processed ON sessions (processed)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_started_at ON sessions (started_at)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_agent ON sessions (agent)',\n\n // Prompt batches\n 'CREATE INDEX IF NOT EXISTS idx_prompt_batches_session_id ON prompt_batches (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_prompt_batches_processed ON prompt_batches (processed)',\n 'CREATE INDEX IF NOT EXISTS idx_prompt_batches_status ON prompt_batches (status)',\n\n // Activities\n 'CREATE INDEX IF NOT EXISTS idx_activities_session_id ON activities (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_prompt_batch_id ON activities (prompt_batch_id)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_tool_name ON activities (tool_name)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_timestamp ON activities (timestamp)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_processed ON activities (processed)',\n\n // Spores\n 'CREATE INDEX IF NOT EXISTS idx_spores_agent_id ON spores (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_session_id ON spores (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_status ON spores (status)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_observation_type ON spores (observation_type)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_created_at ON spores (created_at)',\n\n // Entities\n 'CREATE INDEX IF NOT EXISTS idx_entities_agent_id ON entities (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_entities_type ON entities (type)',\n\n // Graph edges\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges (source_id, source_type)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges (target_id, target_type)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_type ON graph_edges (type)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_agent ON graph_edges (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_source_type ON graph_edges (source_id, type)',\n\n // Entity mentions\n 'CREATE INDEX IF NOT EXISTS idx_entity_mentions_entity_id ON entity_mentions (entity_id)',\n 'CREATE INDEX IF NOT EXISTS idx_entity_mentions_agent_id ON entity_mentions (agent_id)',\n\n // Resolution events\n 'CREATE INDEX IF NOT EXISTS idx_resolution_events_spore_id ON resolution_events (spore_id)',\n 'CREATE INDEX IF NOT EXISTS idx_resolution_events_agent_id ON resolution_events (agent_id)',\n\n // Digest extracts\n 'CREATE INDEX IF NOT EXISTS idx_digest_extracts_agent_id ON digest_extracts (agent_id)',\n\n // Agent runs\n 'CREATE INDEX IF NOT EXISTS idx_agent_runs_agent_id ON agent_runs (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_agent_runs_status ON agent_runs (status)',\n 'CREATE INDEX IF NOT EXISTS idx_agent_runs_agent_status ON agent_runs (agent_id, status)',\n\n // Agent reports\n 'CREATE INDEX IF NOT EXISTS idx_agent_reports_run_id ON agent_reports (run_id)',\n\n // Agent turns\n 'CREATE INDEX IF NOT EXISTS idx_agent_turns_run_id ON agent_turns (run_id)',\n\n // Agent tasks\n 'CREATE INDEX IF NOT EXISTS idx_agent_tasks_agent_id ON agent_tasks (agent_id)',\n\n // Plans\n 'CREATE INDEX IF NOT EXISTS idx_plans_session_id ON plans (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_source_path ON plans (source_path)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_content_hash ON plans (content_hash)',\n // Attachments\n 'CREATE INDEX IF NOT EXISTS idx_attachments_file_path ON attachments (file_path)',\n\n // Team outbox\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_pending ON team_outbox (sent_at, created_at)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_table_name ON team_outbox (table_name)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_row_lookup ON team_outbox (table_name, row_id)',\n\n // Machine ID (synced tables)\n 'CREATE INDEX IF NOT EXISTS idx_sessions_machine_id ON sessions (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_machine_id ON spores (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_machine_id ON graph_edges (machine_id)',\n\n // Log entries\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_timestamp ON log_entries (timestamp)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_level ON log_entries (level)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_component ON log_entries (component)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_kind ON log_entries (kind)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_session_id ON log_entries (session_id)',\n];\n\n// -- Ordered table creation -------------------------------------------------\n\nconst TABLE_DDLS = [\n SCHEMA_VERSION_TABLE,\n // Capture layer (order matters for FK references)\n SESSIONS_TABLE,\n PROMPT_BATCHES_TABLE,\n ACTIVITIES_TABLE,\n PLANS_TABLE,\n ARTIFACTS_TABLE,\n TEAM_MEMBERS_TABLE,\n ATTACHMENTS_TABLE,\n // Intelligence layer\n AGENTS_TABLE,\n SPORES_TABLE,\n ENTITIES_TABLE,\n GRAPH_EDGES_TABLE,\n ENTITY_MENTIONS_TABLE,\n RESOLUTION_EVENTS_TABLE,\n DIGEST_EXTRACTS_TABLE,\n // Agent state layer\n AGENT_RUNS_TABLE,\n AGENT_REPORTS_TABLE,\n AGENT_TURNS_TABLE,\n AGENT_TASKS_TABLE,\n AGENT_STATE_TABLE,\n // Sync layer\n TEAM_OUTBOX_TABLE,\n // Logging layer\n LOG_ENTRIES_TABLE,\n];\n\n// ---------------------------------------------------------------------------\n// Migrations\n// ---------------------------------------------------------------------------\n\n/**\n * Migrate a version-1 database to version-2.\n *\n * Version 2 adds:\n * - plans.session_id, plans.prompt_batch_id, plans.content_hash\n * - attachments.data, attachments.content_hash\n * - indexes: idx_plans_session_id, idx_plans_source_path, idx_plans_content_hash\n *\n * Each ALTER TABLE is wrapped in try/catch so re-running is safe -- SQLite\n * throws \"duplicate column name\" if the column already exists, which we ignore.\n */\nfunction migrateV1ToV2(db: Database): void {\n db.exec('BEGIN');\n try {\n const alterStatements = [\n 'ALTER TABLE plans ADD COLUMN session_id TEXT REFERENCES sessions(id)',\n 'ALTER TABLE plans ADD COLUMN prompt_batch_id INTEGER REFERENCES prompt_batches(id)',\n 'ALTER TABLE plans ADD COLUMN content_hash TEXT',\n 'ALTER TABLE attachments ADD COLUMN data BLOB',\n 'ALTER TABLE attachments ADD COLUMN content_hash TEXT',\n ];\n\n for (const stmt of alterStatements) {\n try {\n db.exec(stmt);\n } catch {\n // Column already exists -- safe to ignore on re-run\n }\n }\n\n // Indexes use IF NOT EXISTS so they are idempotent\n const newIndexes = [\n 'CREATE INDEX IF NOT EXISTS idx_plans_session_id ON plans (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_source_path ON plans (source_path)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_content_hash ON plans (content_hash)',\n 'CREATE INDEX IF NOT EXISTS idx_attachments_file_path ON attachments (file_path)',\n ];\n\n for (const idx of newIndexes) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(2, epochSeconds());\n\n db.exec('COMMIT');\n } catch (err) {\n db.exec('ROLLBACK');\n throw err;\n }\n}\n\n/**\n * Migrate a version-2 database to version-3.\n *\n * Version 3 adds:\n * - log_entries table\n * - log_entries_fts virtual table (FTS5)\n * - indexes: idx_log_entries_timestamp, _level, _component, _kind, _session_id\n *\n * Uses `CREATE ... IF NOT EXISTS` throughout for idempotency.\n */\nfunction migrateV2ToV3(db: Database): void {\n db.exec('BEGIN');\n try {\n db.exec(LOG_ENTRIES_TABLE);\n\n db.exec(\n `CREATE VIRTUAL TABLE IF NOT EXISTS log_entries_fts\n USING fts5(message, content='log_entries', content_rowid='id')`\n );\n\n // FTS5 sync triggers for log_entries\n db.exec(\n `CREATE TRIGGER IF NOT EXISTS log_entries_ai AFTER INSERT ON log_entries BEGIN\n INSERT INTO log_entries_fts(rowid, message) VALUES (new.id, new.message);\n END`\n );\n db.exec(\n `CREATE TRIGGER IF NOT EXISTS log_entries_ad AFTER DELETE ON log_entries BEGIN\n INSERT INTO log_entries_fts(log_entries_fts, rowid, message) VALUES('delete', old.id, old.message);\n END`\n );\n\n const newIndexes = [\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_timestamp ON log_entries (timestamp)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_level ON log_entries (level)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_component ON log_entries (component)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_kind ON log_entries (kind)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_session_id ON log_entries (session_id)',\n ];\n\n for (const idx of newIndexes) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(3, epochSeconds());\n\n db.exec('COMMIT');\n } catch (err) {\n db.exec('ROLLBACK');\n throw err;\n }\n}\n\n/**\n * Migrate a version-3 database to version-4.\n *\n * Version 4 adds multi-machine support:\n * - machine_id TEXT NOT NULL DEFAULT 'local' on all synced tables\n * - synced_at INTEGER on all synced tables\n * - team_outbox table + indexes\n * - machine_id indexes on high-traffic tables\n *\n * Backfills existing rows with the provided machineId.\n */\nfunction migrateV3ToV4(db: Database, machineId: string): void {\n db.exec('BEGIN');\n try {\n // Tables that need machine_id + synced_at columns\n const syncedTables = [\n 'sessions',\n 'prompt_batches',\n 'spores',\n 'entities',\n 'graph_edges',\n 'entity_mentions',\n 'resolution_events',\n 'plans',\n 'artifacts',\n 'digest_extracts',\n 'team_members',\n ];\n\n for (const table of syncedTables) {\n try {\n db.exec(`ALTER TABLE ${table} ADD COLUMN machine_id TEXT NOT NULL DEFAULT 'local'`);\n } catch {\n // Column already exists -- safe to ignore on re-run\n }\n try {\n db.exec(`ALTER TABLE ${table} ADD COLUMN synced_at INTEGER`);\n } catch {\n // Column already exists -- safe to ignore on re-run\n }\n }\n\n // Backfill machine_id on existing rows\n for (const table of syncedTables) {\n db.prepare(`UPDATE ${table} SET machine_id = ? WHERE machine_id = 'local'`).run(machineId);\n }\n\n // Create team_outbox table\n db.exec(TEAM_OUTBOX_TABLE);\n\n // Create new indexes (IF NOT EXISTS for idempotency)\n const newIndexes = [\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_pending ON team_outbox (sent_at, created_at)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_table_name ON team_outbox (table_name)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_row_lookup ON team_outbox (table_name, row_id)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_machine_id ON sessions (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_machine_id ON spores (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_machine_id ON graph_edges (machine_id)',\n ];\n\n for (const idx of newIndexes) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(4, epochSeconds());\n\n db.exec('COMMIT');\n } catch (err) {\n db.exec('ROLLBACK');\n throw err;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Create all database tables, indexes, and record the schema version.\n *\n * Fully idempotent -- safe to call on every startup. Uses `IF NOT EXISTS`\n * for all DDL and `ON CONFLICT DO NOTHING` for the version row.\n *\n * @param db — better-sqlite3 Database instance.\n * @param machineId — machine identifier for backfilling existing rows during\n * v3→v4 migration. Defaults to `'local'` (tests, init).\n */\nexport function createSchema(db: Database, machineId: string = DEFAULT_MACHINE_ID): void {\n // Fast-path: skip if already at current version\n try {\n const row = db.prepare(\n 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1'\n ).get() as { version: number } | undefined;\n if (row?.version === SCHEMA_VERSION) return;\n // Migration path: version 1 → 2 (then fall through to check for 2 → 3)\n if (row?.version === 1) {\n migrateV1ToV2(db);\n }\n // Migration path: version 2 → 3\n const afterV1Migration = (db.prepare(\n 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1'\n ).get() as { version: number } | undefined)?.version ?? 0;\n if (afterV1Migration < 3) {\n migrateV2ToV3(db);\n }\n // Migration path: version 3 → 4\n const afterV2Migration = (db.prepare(\n 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1'\n ).get() as { version: number } | undefined)?.version ?? 0;\n if (afterV2Migration < 4) {\n migrateV3ToV4(db, machineId);\n }\n return;\n } catch {\n // Table doesn't exist yet -- first run\n }\n\n for (const ddl of TABLE_DDLS) {\n db.exec(ddl);\n }\n\n for (const ddl of FTS_TABLES) {\n db.exec(ddl);\n }\n\n for (const idx of SECONDARY_INDEXES) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(SCHEMA_VERSION, epochSeconds());\n}\n"],"mappings":";;;;;;;AAmBO,IAAM,iBAAiB;AAMvB,IAAM,uBAAuB;AAMpC,IAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAQ7B,IAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyBvB,IAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmB7B,IAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmBzB,IAAM,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBpB,IAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgBxB,IAAM,qBAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW3B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAe1B,IAAM,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkBrB,IAAM,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBrB,IAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAevB,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiB1B,IAAM,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW9B,IAAM,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAchC,IAAM,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAe9B,IAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAezB,IAAM,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW5B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAa1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgB1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAc1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAc1B,IAAM,aAAa;AAAA,EACjB;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA,EAIA;AAAA;AAAA;AAAA,EAIA;AAAA;AAAA;AAGF;AAIA,IAAM,oBAAoB;AAAA;AAAA,EAExB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAIA,IAAM,aAAa;AAAA,EACjB;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AACF;AAiBA,SAAS,cAAc,IAAoB;AACzC,KAAG,KAAK,OAAO;AACf,MAAI;AACF,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,QAAQ,iBAAiB;AAClC,UAAI;AACF,WAAG,KAAK,IAAI;AAAA,MACd,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,OAAO,YAAY;AAC5B,SAAG,KAAK,GAAG;AAAA,IACb;AAEA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,GAAG,aAAa,CAAC;AAEvB,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,KAAK;AACZ,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAYA,SAAS,cAAc,IAAoB;AACzC,KAAG,KAAK,OAAO;AACf,MAAI;AACF,OAAG,KAAK,iBAAiB;AAEzB,OAAG;AAAA,MACD;AAAA;AAAA,IAEF;AAGA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF;AACA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF;AAEA,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,OAAO,YAAY;AAC5B,SAAG,KAAK,GAAG;AAAA,IACb;AAEA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,GAAG,aAAa,CAAC;AAEvB,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,KAAK;AACZ,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAaA,SAAS,cAAc,IAAc,WAAyB;AAC5D,KAAG,KAAK,OAAO;AACf,MAAI;AAEF,UAAM,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,SAAS,cAAc;AAChC,UAAI;AACF,WAAG,KAAK,eAAe,KAAK,sDAAsD;AAAA,MACpF,QAAQ;AAAA,MAER;AACA,UAAI;AACF,WAAG,KAAK,eAAe,KAAK,+BAA+B;AAAA,MAC7D,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,eAAW,SAAS,cAAc;AAChC,SAAG,QAAQ,UAAU,KAAK,gDAAgD,EAAE,IAAI,SAAS;AAAA,IAC3F;AAGA,OAAG,KAAK,iBAAiB;AAGzB,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,OAAO,YAAY;AAC5B,SAAG,KAAK,GAAG;AAAA,IACb;AAEA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,GAAG,aAAa,CAAC;AAEvB,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,KAAK;AACZ,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAgBO,SAAS,aAAa,IAAc,YAAoB,oBAA0B;AAEvF,MAAI;AACF,UAAM,MAAM,GAAG;AAAA,MACb;AAAA,IACF,EAAE,IAAI;AACN,QAAI,KAAK,YAAY,eAAgB;AAErC,QAAI,KAAK,YAAY,GAAG;AACtB,oBAAc,EAAE;AAAA,IAClB;AAEA,UAAM,mBAAoB,GAAG;AAAA,MAC3B;AAAA,IACF,EAAE,IAAI,GAAuC,WAAW;AACxD,QAAI,mBAAmB,GAAG;AACxB,oBAAc,EAAE;AAAA,IAClB;AAEA,UAAM,mBAAoB,GAAG;AAAA,MAC3B;AAAA,IACF,EAAE,IAAI,GAAuC,WAAW;AACxD,QAAI,mBAAmB,GAAG;AACxB,oBAAc,IAAI,SAAS;AAAA,IAC7B;AACA;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,aAAW,OAAO,YAAY;AAC5B,OAAG,KAAK,GAAG;AAAA,EACb;AAEA,aAAW,OAAO,YAAY;AAC5B,OAAG,KAAK,GAAG;AAAA,EACb;AAEA,aAAW,OAAO,mBAAmB;AACnC,OAAG,KAAK,GAAG;AAAA,EACb;AAEA,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,gBAAgB,aAAa,CAAC;AACtC;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db/queries/spores.ts"],"sourcesContent":["/**\n * Spore CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DEFAULT_MACHINE_ID } from '@myco/constants.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of spores returned by listSpores when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Default spore status for new spores. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default importance score for new spores. */\nexport const DEFAULT_IMPORTANCE = 5;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a spore. */\nexport interface SporeInsert {\n id: string;\n agent_id: string;\n observation_type: string;\n content: string;\n created_at: number;\n session_id?: string | null;\n prompt_batch_id?: number | null;\n status?: string;\n context?: string | null;\n importance?: number;\n file_path?: string | null;\n tags?: string | null;\n content_hash?: string | null;\n properties?: string | null;\n updated_at?: number | null;\n machine_id?: string;\n}\n\n/** Row shape returned from spore queries (all columns). */\nexport interface SporeRow {\n id: string;\n agent_id: string;\n session_id: string | null;\n prompt_batch_id: number | null;\n observation_type: string;\n status: string;\n content: string;\n context: string | null;\n importance: number;\n file_path: string | null;\n tags: string | null;\n content_hash: string | null;\n properties: string | null;\n embedded: number;\n created_at: number;\n updated_at: number | null;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Filter options for `listSpores`. */\nexport interface ListSporesOptions {\n agent_id?: string;\n observation_type?: string;\n status?: string;\n session_id?: string;\n search?: string;\n limit?: number;\n offset?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst SPORE_COLUMNS = [\n 'id',\n 'agent_id',\n 'session_id',\n 'prompt_batch_id',\n 'observation_type',\n 'status',\n 'content',\n 'context',\n 'importance',\n 'file_path',\n 'tags',\n 'content_hash',\n 'properties',\n 'embedded',\n 'created_at',\n 'updated_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = SPORE_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed SporeRow. */\nfunction toSporeRow(row: Record<string, unknown>): SporeRow {\n return {\n id: row.id as string,\n agent_id: row.agent_id as string,\n session_id: (row.session_id as string) ?? null,\n prompt_batch_id: (row.prompt_batch_id as number) ?? null,\n observation_type: row.observation_type as string,\n status: row.status as string,\n content: row.content as string,\n context: (row.context as string) ?? null,\n importance: row.importance as number,\n file_path: (row.file_path as string) ?? null,\n tags: (row.tags as string) ?? null,\n content_hash: (row.content_hash as string) ?? null,\n properties: (row.properties as string) ?? null,\n embedded: (row.embedded as number) ?? 0,\n created_at: row.created_at as number,\n updated_at: (row.updated_at as number) ?? null,\n machine_id: (row.machine_id as string) ?? DEFAULT_MACHINE_ID,\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new spore.\n *\n * Requires a valid `agent_id` (foreign key to agents table).\n */\nexport function insertSpore(data: SporeInsert): SporeRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO spores (\n id, agent_id, session_id, prompt_batch_id,\n observation_type, status, content, context,\n importance, file_path, tags, content_hash,\n properties, created_at, updated_at, machine_id\n ) VALUES (\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?, ?, ?\n )`,\n ).run(\n data.id,\n data.agent_id,\n data.session_id ?? null,\n data.prompt_batch_id ?? null,\n data.observation_type,\n data.status ?? DEFAULT_STATUS,\n data.content,\n data.context ?? null,\n data.importance ?? DEFAULT_IMPORTANCE,\n data.file_path ?? null,\n data.tags ?? null,\n data.content_hash ?? null,\n data.properties ?? null,\n data.created_at,\n data.updated_at ?? null,\n data.machine_id ?? DEFAULT_MACHINE_ID,\n );\n\n const row = toSporeRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM spores WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n syncRow('spores', row);\n\n return row;\n}\n\n/**\n * Retrieve a single spore by id.\n *\n * @returns the spore row, or null if not found.\n */\nexport function getSpore(id: string): SporeRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM spores WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toSporeRow(row);\n}\n\n/**\n * List spores with optional filters, ordered by created_at DESC.\n */\n/** Build WHERE clause and bound params from spore filter options. */\nfunction buildSporeWhere(\n options: Omit<ListSporesOptions, 'limit' | 'offset'>,\n): { where: string; params: unknown[] } {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.agent_id !== undefined) {\n conditions.push(`agent_id = ?`);\n params.push(options.agent_id);\n }\n if (options.observation_type !== undefined) {\n conditions.push(`observation_type = ?`);\n params.push(options.observation_type);\n }\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n if (options.session_id !== undefined) {\n conditions.push(`session_id = ?`);\n params.push(options.session_id);\n }\n if (options.search !== undefined && options.search.length > 0) {\n conditions.push(`(content LIKE ? OR observation_type LIKE ?)`);\n const pattern = `%${options.search}%`;\n params.push(pattern, pattern);\n }\n\n return {\n where: conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '',\n params,\n };\n}\n\n/**\n * List spores with optional filters, ordered by created_at DESC.\n */\nexport function listSpores(\n options: ListSporesOptions = {},\n): SporeRow[] {\n const db = getDatabase();\n const { where, params } = buildSporeWhere(options);\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n const offset = options.offset ?? 0;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM spores\n ${where}\n ORDER BY created_at DESC\n LIMIT ?\n OFFSET ?`,\n ).all(...params, limit, offset) as Record<string, unknown>[];\n\n return rows.map(toSporeRow);\n}\n\n/**\n * Count spores matching optional filters (for pagination totals).\n */\nexport function countSpores(\n options: Omit<ListSporesOptions, 'limit' | 'offset'> = {},\n): number {\n const db = getDatabase();\n const { where, params } = buildSporeWhere(options);\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM spores ${where}`,\n ).get(...params) as { count: number };\n\n return row.count;\n}\n\n/**\n * Update the status and updated_at timestamp of a spore.\n *\n * @returns the updated row, or null if the spore does not exist.\n */\nexport function updateSporeStatus(\n id: string,\n status: string,\n updatedAt: number,\n): SporeRow | null {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE spores\n SET status = ?, updated_at = ?\n WHERE id = ?`,\n ).run(status, updatedAt, id);\n\n if (info.changes === 0) return null;\n\n const row = toSporeRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM spores WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n\n syncRow('spores', row);\n\n return row;\n}\n"],"mappings":";;;;;;;;;;;;AAgBA,IAAM,qBAAqB;AAG3B,IAAM,iBAAiB;AAGhB,IAAM,qBAAqB;AA+DlC,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,cAAc,KAAK,IAAI;AAO9C,SAAS,WAAW,KAAwC;AAC1D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,YAAa,IAAI,cAAyB;AAAA,IAC1C,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,kBAAkB,IAAI;AAAA,IACtB,QAAQ,IAAI;AAAA,IACZ,SAAS,IAAI;AAAA,IACb,SAAU,IAAI,WAAsB;AAAA,IACpC,YAAY,IAAI;AAAA,IAChB,WAAY,IAAI,aAAwB;AAAA,IACxC,MAAO,IAAI,QAAmB;AAAA,IAC9B,cAAe,IAAI,gBAA2B;AAAA,IAC9C,YAAa,IAAI,cAAyB;AAAA,IAC1C,UAAW,IAAI,YAAuB;AAAA,IACtC,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAWO,SAAS,YAAY,MAA6B;AACvD,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,mBAAmB;AAAA,IACxB,KAAK;AAAA,IACL,KAAK,UAAU;AAAA,IACf,KAAK;AAAA,IACL,KAAK,WAAW;AAAA,IAChB,KAAK,cAAc;AAAA,IACnB,KAAK,aAAa;AAAA,IAClB,KAAK,QAAQ;AAAA,IACb,KAAK,gBAAgB;AAAA,IACrB,KAAK,cAAc;AAAA,IACnB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,cAAc;AAAA,EACrB;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAU,cAAc,2BAA2B,EAAE,IAAI,KAAK,EAAE;AAAA,EAC7E;AAEA,UAAQ,UAAU,GAAG;AAErB,SAAO;AACT;AAOO,SAAS,SAAS,IAA6B;AACpD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAU,cAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,WAAW,GAAG;AACvB;AAMA,SAAS,gBACP,SACsC;AACtC,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AACA,MAAI,QAAQ,qBAAqB,QAAW;AAC1C,eAAW,KAAK,sBAAsB;AACtC,WAAO,KAAK,QAAQ,gBAAgB;AAAA,EACtC;AACA,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AACA,MAAI,QAAQ,eAAe,QAAW;AACpC,eAAW,KAAK,gBAAgB;AAChC,WAAO,KAAK,QAAQ,UAAU;AAAA,EAChC;AACA,MAAI,QAAQ,WAAW,UAAa,QAAQ,OAAO,SAAS,GAAG;AAC7D,eAAW,KAAK,6CAA6C;AAC7D,UAAM,UAAU,IAAI,QAAQ,MAAM;AAClC,WAAO,KAAK,SAAS,OAAO;AAAA,EAC9B;AAEA,SAAO;AAAA,IACL,OAAO,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAAA,IACrE;AAAA,EACF;AACF;AAKO,SAAS,WACd,UAA6B,CAAC,GAClB;AACZ,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,gBAAgB,OAAO;AACjD,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,QAAQ,OAAO,MAAM;AAE9B,SAAO,KAAK,IAAI,UAAU;AAC5B;AAKO,SAAS,YACd,UAAuD,CAAC,GAChD;AACR,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,gBAAgB,OAAO;AAEjD,QAAM,MAAM,GAAG;AAAA,IACb,wCAAwC,KAAK;AAAA,EAC/C,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,IAAI;AACb;AAOO,SAAS,kBACd,IACA,QACA,WACiB;AACjB,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,QAAQ,WAAW,EAAE;AAE3B,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAU,cAAc,2BAA2B,EAAE,IAAI,EAAE;AAAA,EACxE;AAEA,UAAQ,UAAU,GAAG;AAErB,SAAO;AACT;","names":[]}
|