@goondocks/myco 0.6.1 → 0.6.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -1
- package/.claude-plugin/plugin.json +1 -1
- package/dist/{chunk-QGJ2ZIUZ.js → chunk-25FY74AP.js} +56 -22
- package/dist/chunk-25FY74AP.js.map +1 -0
- package/dist/{chunk-2YBUL3IL.js → chunk-4WL5X7VS.js} +3 -3
- package/dist/{chunk-24DOZEUJ.js → chunk-ALBVNGCF.js} +591 -27
- package/dist/chunk-ALBVNGCF.js.map +1 -0
- package/dist/{chunk-E7OBRBCQ.js → chunk-CK24O5YQ.js} +12 -2
- package/dist/chunk-CK24O5YQ.js.map +1 -0
- package/dist/{chunk-2GSX3BK2.js → chunk-CPVXNRGW.js} +4 -4
- package/dist/{chunk-L25U7PIG.js → chunk-CQ4RKK67.js} +2 -2
- package/dist/{chunk-GDYYJTTT.js → chunk-DBMHUMG3.js} +3 -3
- package/dist/{chunk-5FNZ7AMX.js → chunk-IWBWZQK6.js} +2 -2
- package/dist/{chunk-MQSYSQ6T.js → chunk-JSK7L46L.js} +11 -6
- package/dist/{chunk-MQSYSQ6T.js.map → chunk-JSK7L46L.js.map} +1 -1
- package/dist/{chunk-KUMVJIJW.js → chunk-LDKXXKF6.js} +6 -10
- package/dist/{chunk-KUMVJIJW.js.map → chunk-LDKXXKF6.js.map} +1 -1
- package/dist/{chunk-2ZBB3MQT.js → chunk-PQWQC3RF.js} +444 -21
- package/dist/chunk-PQWQC3RF.js.map +1 -0
- package/dist/{chunk-5QWZT4AB.js → chunk-RNWALAFP.js} +2 -2
- package/dist/{chunk-3EM23DMD.js → chunk-RXJHB7W4.js} +2 -2
- package/dist/{chunk-GNR3QAER.js → chunk-RY76WEN3.js} +2 -2
- package/dist/{chunk-6BSDCZ5Q.js → chunk-WBLTISAK.js} +8 -3
- package/dist/chunk-WBLTISAK.js.map +1 -0
- package/dist/{chunk-ZMYNRTTD.js → chunk-WU4PCNIK.js} +4 -3
- package/dist/chunk-WU4PCNIK.js.map +1 -0
- package/dist/{chunk-YTANWAGE.js → chunk-XNAM6Z4O.js} +2 -2
- package/dist/{chunk-P3WO3N3I.js → chunk-YG6MLLGL.js} +19 -3
- package/dist/{chunk-P3WO3N3I.js.map → chunk-YG6MLLGL.js.map} +1 -1
- package/dist/{cli-K7SUTP7A.js → cli-EGWAINIE.js} +20 -20
- package/dist/{client-YJMNTITQ.js → client-FDKJ4BY7.js} +5 -5
- package/dist/{config-G5GGT5A6.js → config-HDUFDOQN.js} +3 -3
- package/dist/{curate-6T5NKVXK.js → curate-OHIJFBYF.js} +10 -11
- package/dist/{curate-6T5NKVXK.js.map → curate-OHIJFBYF.js.map} +1 -1
- package/dist/{detect-providers-S3M5TAMW.js → detect-providers-4U3ZPW5G.js} +3 -3
- package/dist/{digest-O35VHYFP.js → digest-I2XYCK2M.js} +11 -13
- package/dist/{digest-O35VHYFP.js.map → digest-I2XYCK2M.js.map} +1 -1
- package/dist/{init-TFLSATB3.js → init-ZO2XQT6U.js} +8 -8
- package/dist/{main-JEUQS3BY.js → main-XZ6X4BUX.js} +177 -40
- package/dist/main-XZ6X4BUX.js.map +1 -0
- package/dist/{rebuild-7SH5GSNX.js → rebuild-NAH4EW5B.js} +10 -11
- package/dist/{rebuild-7SH5GSNX.js.map → rebuild-NAH4EW5B.js.map} +1 -1
- package/dist/reprocess-6FOP37XS.js +79 -0
- package/dist/reprocess-6FOP37XS.js.map +1 -0
- package/dist/{restart-NLJLB52D.js → restart-WSA4JSE3.js} +6 -6
- package/dist/{search-2BVRF54H.js → search-QXJQUB35.js} +6 -6
- package/dist/{server-4AMZNP4F.js → server-VXN3CJ4Y.js} +14 -18
- package/dist/{server-4AMZNP4F.js.map → server-VXN3CJ4Y.js.map} +1 -1
- package/dist/{session-start-AZAF3DTE.js → session-start-KQ4KCQMZ.js} +9 -9
- package/dist/setup-digest-QNCM3PNQ.js +15 -0
- package/dist/setup-llm-EAOIUSPJ.js +15 -0
- package/dist/src/cli.js +4 -4
- package/dist/src/daemon/main.js +4 -4
- package/dist/src/hooks/post-tool-use.js +5 -5
- package/dist/src/hooks/session-end.js +5 -5
- package/dist/src/hooks/session-start.js +4 -4
- package/dist/src/hooks/stop.js +7 -7
- package/dist/src/hooks/user-prompt-submit.js +5 -5
- package/dist/src/mcp/server.js +4 -4
- package/dist/src/prompts/consolidation.md +2 -0
- package/dist/src/prompts/digest-7500.md +68 -0
- package/dist/{stats-MKDIZFIQ.js → stats-43OESUEB.js} +6 -6
- package/dist/ui/assets/index-Bk4X_8-Z.css +1 -0
- package/dist/ui/assets/index-D3SY7ZHY.js +299 -0
- package/dist/ui/index.html +2 -2
- package/dist/{verify-7DW7LAND.js → verify-IIAHBAAU.js} +6 -6
- package/dist/{version-RQLD7VBP.js → version-NKOECSVH.js} +4 -4
- package/package.json +1 -1
- package/dist/chunk-24DOZEUJ.js.map +0 -1
- package/dist/chunk-2ZBB3MQT.js.map +0 -1
- package/dist/chunk-3JCXYLHD.js +0 -33
- package/dist/chunk-3JCXYLHD.js.map +0 -1
- package/dist/chunk-6BSDCZ5Q.js.map +0 -1
- package/dist/chunk-B5UZSHQV.js +0 -250
- package/dist/chunk-B5UZSHQV.js.map +0 -1
- package/dist/chunk-E7OBRBCQ.js.map +0 -1
- package/dist/chunk-KC7ENQTN.js +0 -436
- package/dist/chunk-KC7ENQTN.js.map +0 -1
- package/dist/chunk-QGJ2ZIUZ.js.map +0 -1
- package/dist/chunk-UVGAVYWZ.js +0 -157
- package/dist/chunk-UVGAVYWZ.js.map +0 -1
- package/dist/chunk-ZMYNRTTD.js.map +0 -1
- package/dist/main-JEUQS3BY.js.map +0 -1
- package/dist/reprocess-Q4YH2ZBK.js +0 -268
- package/dist/reprocess-Q4YH2ZBK.js.map +0 -1
- package/dist/setup-digest-YLZZGSSR.js +0 -15
- package/dist/setup-llm-JOXBSLXC.js +0 -15
- package/dist/ui/assets/index-D37IoDXS.css +0 -1
- package/dist/ui/assets/index-DA61Ial2.js +0 -289
- /package/dist/{chunk-2YBUL3IL.js.map → chunk-4WL5X7VS.js.map} +0 -0
- /package/dist/{chunk-2GSX3BK2.js.map → chunk-CPVXNRGW.js.map} +0 -0
- /package/dist/{chunk-L25U7PIG.js.map → chunk-CQ4RKK67.js.map} +0 -0
- /package/dist/{chunk-GDYYJTTT.js.map → chunk-DBMHUMG3.js.map} +0 -0
- /package/dist/{chunk-5FNZ7AMX.js.map → chunk-IWBWZQK6.js.map} +0 -0
- /package/dist/{chunk-5QWZT4AB.js.map → chunk-RNWALAFP.js.map} +0 -0
- /package/dist/{chunk-3EM23DMD.js.map → chunk-RXJHB7W4.js.map} +0 -0
- /package/dist/{chunk-GNR3QAER.js.map → chunk-RY76WEN3.js.map} +0 -0
- /package/dist/{chunk-YTANWAGE.js.map → chunk-XNAM6Z4O.js.map} +0 -0
- /package/dist/{cli-K7SUTP7A.js.map → cli-EGWAINIE.js.map} +0 -0
- /package/dist/{client-YJMNTITQ.js.map → client-FDKJ4BY7.js.map} +0 -0
- /package/dist/{config-G5GGT5A6.js.map → config-HDUFDOQN.js.map} +0 -0
- /package/dist/{detect-providers-S3M5TAMW.js.map → detect-providers-4U3ZPW5G.js.map} +0 -0
- /package/dist/{init-TFLSATB3.js.map → init-ZO2XQT6U.js.map} +0 -0
- /package/dist/{restart-NLJLB52D.js.map → restart-WSA4JSE3.js.map} +0 -0
- /package/dist/{search-2BVRF54H.js.map → search-QXJQUB35.js.map} +0 -0
- /package/dist/{session-start-AZAF3DTE.js.map → session-start-KQ4KCQMZ.js.map} +0 -0
- /package/dist/{setup-digest-YLZZGSSR.js.map → setup-digest-QNCM3PNQ.js.map} +0 -0
- /package/dist/{setup-llm-JOXBSLXC.js.map → setup-llm-EAOIUSPJ.js.map} +0 -0
- /package/dist/{stats-MKDIZFIQ.js.map → stats-43OESUEB.js.map} +0 -0
- /package/dist/{verify-7DW7LAND.js.map → verify-IIAHBAAU.js.map} +0 -0
- /package/dist/{version-RQLD7VBP.js.map → version-NKOECSVH.js.map} +0 -0
|
@@ -1,29 +1,28 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
runCuration
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-3JCXYLHD.js";
|
|
4
|
+
} from "./chunk-PQWQC3RF.js";
|
|
6
5
|
import {
|
|
7
6
|
VectorIndex
|
|
8
7
|
} from "./chunk-4RMSHZE4.js";
|
|
9
|
-
import "./chunk-
|
|
10
|
-
import "./chunk-24DOZEUJ.js";
|
|
8
|
+
import "./chunk-ALBVNGCF.js";
|
|
11
9
|
import "./chunk-RGVBGTD6.js";
|
|
12
10
|
import {
|
|
13
11
|
createEmbeddingProvider,
|
|
14
12
|
createLlmProvider
|
|
15
|
-
} from "./chunk-
|
|
16
|
-
import "./chunk-
|
|
13
|
+
} from "./chunk-DBMHUMG3.js";
|
|
14
|
+
import "./chunk-RY76WEN3.js";
|
|
17
15
|
import "./chunk-6FQISQNA.js";
|
|
18
16
|
import {
|
|
19
17
|
MycoIndex
|
|
20
18
|
} from "./chunk-TWSTAVLO.js";
|
|
21
|
-
import "./chunk-
|
|
19
|
+
import "./chunk-25FY74AP.js";
|
|
22
20
|
import {
|
|
23
21
|
loadConfig
|
|
24
|
-
} from "./chunk-
|
|
25
|
-
import "./chunk-
|
|
26
|
-
import "./chunk-
|
|
22
|
+
} from "./chunk-YG6MLLGL.js";
|
|
23
|
+
import "./chunk-JSK7L46L.js";
|
|
24
|
+
import "./chunk-RNWALAFP.js";
|
|
25
|
+
import "./chunk-WBLTISAK.js";
|
|
27
26
|
import "./chunk-PZUWP5VK.js";
|
|
28
27
|
|
|
29
28
|
// src/cli/curate.ts
|
|
@@ -77,4 +76,4 @@ Curation complete:`);
|
|
|
77
76
|
export {
|
|
78
77
|
run
|
|
79
78
|
};
|
|
80
|
-
//# sourceMappingURL=curate-
|
|
79
|
+
//# sourceMappingURL=curate-OHIJFBYF.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/cli/curate.ts"],"sourcesContent":["/**\n * myco curate — scan the vault for stale spores and supersede them.\n *\n * Usage:\n * myco curate Scan and supersede stale spores\n * myco curate --dry-run Show what would be superseded without writing\n *\n * Algorithm:\n * 1. Load all active spores from the index\n * 2. Group by observation_type\n * 3. Within each group, embed spores and cluster by cosine similarity\n * 4. For each cluster with 2+ members, ask the LLM which are outdated\n * 5. Mark superseded: update frontmatter, append notice, re-index, remove vector\n */\nimport path from 'node:path';\nimport { loadConfig } from '../config/loader.js';\nimport { MycoIndex } from '../index/sqlite.js';\nimport { VectorIndex } from '../index/vectors.js';\nimport { createLlmProvider, createEmbeddingProvider } from '../intelligence/llm.js';\nimport { runCuration } from '../services/vault-ops.js';\n\nexport async function run(args: string[], vaultDir: string): Promise<void> {\n const isDryRun = args.includes('--dry-run');\n\n const config = loadConfig(vaultDir);\n const index = new MycoIndex(path.join(vaultDir, 'index.db'));\n\n const llmProvider = createLlmProvider(config.intelligence.llm);\n const embeddingProvider = createEmbeddingProvider(config.intelligence.embedding);\n\n let vectorIndex: VectorIndex | null = null;\n try {\n const testEmbed = await embeddingProvider.embed('test');\n vectorIndex = new VectorIndex(path.join(vaultDir, 'vectors.db'), testEmbed.dimensions);\n } catch (e) {\n console.error(`Vector index unavailable: ${(e as Error).message}`);\n console.error('Curate requires a working embedding provider.');\n index.close();\n process.exit(1);\n }\n\n try {\n if (isDryRun) {\n console.log('Dry run — no changes will be written.\\n');\n }\n\n const result = await runCuration(\n {\n vaultDir,\n config,\n index,\n vectorIndex,\n llmProvider,\n embeddingProvider,\n log: (_level, message) => console.log(` ${message}`),\n },\n isDryRun,\n );\n\n console.log(`\\nCuration complete:`);\n console.log(` Scanned: ${result.scanned} active spores`);\n console.log(` Clusters evaluated: ${result.clustersEvaluated}`);\n if (isDryRun) {\n console.log(` Would supersede: ${result.superseded}`);\n } else {\n console.log(` Superseded: ${result.superseded}`);\n }\n } finally {\n index.close();\n vectorIndex?.close();\n }\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"sources":["../src/cli/curate.ts"],"sourcesContent":["/**\n * myco curate — scan the vault for stale spores and supersede them.\n *\n * Usage:\n * myco curate Scan and supersede stale spores\n * myco curate --dry-run Show what would be superseded without writing\n *\n * Algorithm:\n * 1. Load all active spores from the index\n * 2. Group by observation_type\n * 3. Within each group, embed spores and cluster by cosine similarity\n * 4. For each cluster with 2+ members, ask the LLM which are outdated\n * 5. Mark superseded: update frontmatter, append notice, re-index, remove vector\n */\nimport path from 'node:path';\nimport { loadConfig } from '../config/loader.js';\nimport { MycoIndex } from '../index/sqlite.js';\nimport { VectorIndex } from '../index/vectors.js';\nimport { createLlmProvider, createEmbeddingProvider } from '../intelligence/llm.js';\nimport { runCuration } from '../services/vault-ops.js';\n\nexport async function run(args: string[], vaultDir: string): Promise<void> {\n const isDryRun = args.includes('--dry-run');\n\n const config = loadConfig(vaultDir);\n const index = new MycoIndex(path.join(vaultDir, 'index.db'));\n\n const llmProvider = createLlmProvider(config.intelligence.llm);\n const embeddingProvider = createEmbeddingProvider(config.intelligence.embedding);\n\n let vectorIndex: VectorIndex | null = null;\n try {\n const testEmbed = await embeddingProvider.embed('test');\n vectorIndex = new VectorIndex(path.join(vaultDir, 'vectors.db'), testEmbed.dimensions);\n } catch (e) {\n console.error(`Vector index unavailable: ${(e as Error).message}`);\n console.error('Curate requires a working embedding provider.');\n index.close();\n process.exit(1);\n }\n\n try {\n if (isDryRun) {\n console.log('Dry run — no changes will be written.\\n');\n }\n\n const result = await runCuration(\n {\n vaultDir,\n config,\n index,\n vectorIndex,\n llmProvider,\n embeddingProvider,\n log: (_level, message) => console.log(` ${message}`),\n },\n isDryRun,\n );\n\n console.log(`\\nCuration complete:`);\n console.log(` Scanned: ${result.scanned} active spores`);\n console.log(` Clusters evaluated: ${result.clustersEvaluated}`);\n if (isDryRun) {\n console.log(` Would supersede: ${result.superseded}`);\n } else {\n console.log(` Superseded: ${result.superseded}`);\n }\n } finally {\n index.close();\n vectorIndex?.close();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,OAAO,UAAU;AAOjB,eAAsB,IAAI,MAAgB,UAAiC;AACzE,QAAM,WAAW,KAAK,SAAS,WAAW;AAE1C,QAAM,SAAS,WAAW,QAAQ;AAClC,QAAM,QAAQ,IAAI,UAAU,KAAK,KAAK,UAAU,UAAU,CAAC;AAE3D,QAAM,cAAc,kBAAkB,OAAO,aAAa,GAAG;AAC7D,QAAM,oBAAoB,wBAAwB,OAAO,aAAa,SAAS;AAE/E,MAAI,cAAkC;AACtC,MAAI;AACF,UAAM,YAAY,MAAM,kBAAkB,MAAM,MAAM;AACtD,kBAAc,IAAI,YAAY,KAAK,KAAK,UAAU,YAAY,GAAG,UAAU,UAAU;AAAA,EACvF,SAAS,GAAG;AACV,YAAQ,MAAM,6BAA8B,EAAY,OAAO,EAAE;AACjE,YAAQ,MAAM,+CAA+C;AAC7D,UAAM,MAAM;AACZ,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AACF,QAAI,UAAU;AACZ,cAAQ,IAAI,8CAAyC;AAAA,IACvD;AAEA,UAAM,SAAS,MAAM;AAAA,MACnB;AAAA,QACE;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,KAAK,CAAC,QAAQ,YAAY,QAAQ,IAAI,KAAK,OAAO,EAAE;AAAA,MACtD;AAAA,MACA;AAAA,IACF;AAEA,YAAQ,IAAI;AAAA,mBAAsB;AAClC,YAAQ,IAAI,cAAc,OAAO,OAAO,gBAAgB;AACxD,YAAQ,IAAI,yBAAyB,OAAO,iBAAiB,EAAE;AAC/D,QAAI,UAAU;AACZ,cAAQ,IAAI,sBAAsB,OAAO,UAAU,EAAE;AAAA,IACvD,OAAO;AACL,cAAQ,IAAI,iBAAiB,OAAO,UAAU,EAAE;AAAA,IAClD;AAAA,EACF,UAAE;AACA,UAAM,MAAM;AACZ,iBAAa,MAAM;AAAA,EACrB;AACF;","names":[]}
|
|
@@ -2,10 +2,10 @@ import { createRequire as __cr } from 'node:module'; const require = __cr(import
|
|
|
2
2
|
import {
|
|
3
3
|
LmStudioBackend,
|
|
4
4
|
OllamaBackend
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-25FY74AP.js";
|
|
6
6
|
import {
|
|
7
7
|
PROVIDER_DETECT_TIMEOUT_MS
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-WBLTISAK.js";
|
|
9
9
|
import "./chunk-PZUWP5VK.js";
|
|
10
10
|
|
|
11
11
|
// src/cli/detect-providers.ts
|
|
@@ -32,4 +32,4 @@ async function run(_args) {
|
|
|
32
32
|
export {
|
|
33
33
|
run
|
|
34
34
|
};
|
|
35
|
-
//# sourceMappingURL=detect-providers-
|
|
35
|
+
//# sourceMappingURL=detect-providers-4U3ZPW5G.js.map
|
|
@@ -1,30 +1,28 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
runDigest
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-
|
|
6
|
-
import "./chunk-UVGAVYWZ.js";
|
|
7
|
-
import "./chunk-24DOZEUJ.js";
|
|
4
|
+
} from "./chunk-PQWQC3RF.js";
|
|
5
|
+
import "./chunk-ALBVNGCF.js";
|
|
8
6
|
import "./chunk-RGVBGTD6.js";
|
|
9
7
|
import {
|
|
10
8
|
createLlmProvider
|
|
11
|
-
} from "./chunk-
|
|
12
|
-
import "./chunk-
|
|
9
|
+
} from "./chunk-DBMHUMG3.js";
|
|
10
|
+
import "./chunk-RY76WEN3.js";
|
|
13
11
|
import "./chunk-6FQISQNA.js";
|
|
14
12
|
import {
|
|
15
13
|
MycoIndex
|
|
16
14
|
} from "./chunk-TWSTAVLO.js";
|
|
17
|
-
import "./chunk-
|
|
15
|
+
import "./chunk-4WL5X7VS.js";
|
|
18
16
|
import {
|
|
19
17
|
parseIntFlag
|
|
20
18
|
} from "./chunk-SAKJMNSR.js";
|
|
21
|
-
import "./chunk-
|
|
19
|
+
import "./chunk-25FY74AP.js";
|
|
22
20
|
import {
|
|
23
21
|
loadConfig
|
|
24
|
-
} from "./chunk-
|
|
25
|
-
import "./chunk-
|
|
26
|
-
import "./chunk-
|
|
27
|
-
import "./chunk-
|
|
22
|
+
} from "./chunk-YG6MLLGL.js";
|
|
23
|
+
import "./chunk-JSK7L46L.js";
|
|
24
|
+
import "./chunk-RNWALAFP.js";
|
|
25
|
+
import "./chunk-WBLTISAK.js";
|
|
28
26
|
import "./chunk-PZUWP5VK.js";
|
|
29
27
|
|
|
30
28
|
// src/cli/digest.ts
|
|
@@ -84,4 +82,4 @@ Digest cycle complete:`);
|
|
|
84
82
|
export {
|
|
85
83
|
run
|
|
86
84
|
};
|
|
87
|
-
//# sourceMappingURL=digest-
|
|
85
|
+
//# sourceMappingURL=digest-I2XYCK2M.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/cli/digest.ts"],"sourcesContent":["/**\n * myco digest — run a digest cycle from the CLI.\n *\n * Usage:\n * myco digest Incremental cycle (only new substrate)\n * myco digest --full Full reprocess of all tiers from clean slate\n * myco digest --tier 3000 Reprocess a specific tier from clean slate\n *\n * When --tier or --full is used, the cycle processes ALL vault notes (not just\n * new ones) and ignores previous extracts, producing a clean synthesis.\n */\nimport { loadConfig } from '../config/loader.js';\nimport { MycoIndex } from '../index/sqlite.js';\nimport { createLlmProvider } from '../intelligence/llm.js';\nimport { runDigest } from '../services/vault-ops.js';\nimport { parseIntFlag } from './shared.js';\nimport path from 'node:path';\n\nexport async function run(args: string[], vaultDir: string): Promise<void> {\n const config = loadConfig(vaultDir);\n\n if (!config.digest.enabled) {\n console.error('Digest is not enabled. Set digest.enabled: true in myco.yaml.');\n process.exit(1);\n }\n\n const tierArg = parseIntFlag(args, '--tier');\n const isFull = args.includes('--full');\n const isReprocess = isFull || tierArg !== undefined;\n\n // Resolve the digest LLM provider\n const digestLlmConfig = {\n provider: config.digest.intelligence.provider ?? config.intelligence.llm.provider,\n model: config.digest.intelligence.model ?? config.intelligence.llm.model,\n base_url: config.digest.intelligence.base_url ?? config.intelligence.llm.base_url,\n context_window: config.digest.intelligence.context_window,\n };\n const llmProvider = createLlmProvider(digestLlmConfig);\n\n const index = new MycoIndex(path.join(vaultDir, 'index.db'));\n\n if (isReprocess) {\n const tierLabel = tierArg ? `tier ${tierArg}` : 'all tiers';\n console.log(`Full reprocess of ${tierLabel} — clean slate, all substrate`);\n } else {\n console.log('Running incremental digest cycle');\n }\n\n try {\n const result = await runDigest(\n {\n vaultDir,\n config,\n index,\n log: (level, message, data) => {\n const prefix = level === 'warn' ? '!' : level === 'info' ? '>' : ' ';\n const suffix = data ? ` ${JSON.stringify(data)}` : '';\n console.log(`${prefix} ${message}${suffix}`);\n },\n },\n llmProvider,\n { tier: tierArg, full: isFull },\n );\n\n if (!result) {\n console.log('No substrate found — nothing to digest.');\n return;\n }\n\n console.log(`\\nDigest cycle complete:`);\n console.log(` Tiers generated: [${result.tiersGenerated.join(', ')}]`);\n console.log(` Substrate: ${Object.values(result.substrate).flat().length} notes`);\n console.log(` Duration: ${(result.durationMs / 1000).toFixed(1)}s`);\n console.log(` Model: ${result.model}`);\n } finally {\n index.close();\n }\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"sources":["../src/cli/digest.ts"],"sourcesContent":["/**\n * myco digest — run a digest cycle from the CLI.\n *\n * Usage:\n * myco digest Incremental cycle (only new substrate)\n * myco digest --full Full reprocess of all tiers from clean slate\n * myco digest --tier 3000 Reprocess a specific tier from clean slate\n *\n * When --tier or --full is used, the cycle processes ALL vault notes (not just\n * new ones) and ignores previous extracts, producing a clean synthesis.\n */\nimport { loadConfig } from '../config/loader.js';\nimport { MycoIndex } from '../index/sqlite.js';\nimport { createLlmProvider } from '../intelligence/llm.js';\nimport { runDigest } from '../services/vault-ops.js';\nimport { parseIntFlag } from './shared.js';\nimport path from 'node:path';\n\nexport async function run(args: string[], vaultDir: string): Promise<void> {\n const config = loadConfig(vaultDir);\n\n if (!config.digest.enabled) {\n console.error('Digest is not enabled. Set digest.enabled: true in myco.yaml.');\n process.exit(1);\n }\n\n const tierArg = parseIntFlag(args, '--tier');\n const isFull = args.includes('--full');\n const isReprocess = isFull || tierArg !== undefined;\n\n // Resolve the digest LLM provider\n const digestLlmConfig = {\n provider: config.digest.intelligence.provider ?? config.intelligence.llm.provider,\n model: config.digest.intelligence.model ?? config.intelligence.llm.model,\n base_url: config.digest.intelligence.base_url ?? config.intelligence.llm.base_url,\n context_window: config.digest.intelligence.context_window,\n };\n const llmProvider = createLlmProvider(digestLlmConfig);\n\n const index = new MycoIndex(path.join(vaultDir, 'index.db'));\n\n if (isReprocess) {\n const tierLabel = tierArg ? `tier ${tierArg}` : 'all tiers';\n console.log(`Full reprocess of ${tierLabel} — clean slate, all substrate`);\n } else {\n console.log('Running incremental digest cycle');\n }\n\n try {\n const result = await runDigest(\n {\n vaultDir,\n config,\n index,\n log: (level, message, data) => {\n const prefix = level === 'warn' ? '!' : level === 'info' ? '>' : ' ';\n const suffix = data ? ` ${JSON.stringify(data)}` : '';\n console.log(`${prefix} ${message}${suffix}`);\n },\n },\n llmProvider,\n { tier: tierArg, full: isFull },\n );\n\n if (!result) {\n console.log('No substrate found — nothing to digest.');\n return;\n }\n\n console.log(`\\nDigest cycle complete:`);\n console.log(` Tiers generated: [${result.tiersGenerated.join(', ')}]`);\n console.log(` Substrate: ${Object.values(result.substrate).flat().length} notes`);\n console.log(` Duration: ${(result.durationMs / 1000).toFixed(1)}s`);\n console.log(` Model: ${result.model}`);\n } finally {\n index.close();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAgBA,OAAO,UAAU;AAEjB,eAAsB,IAAI,MAAgB,UAAiC;AACzE,QAAM,SAAS,WAAW,QAAQ;AAElC,MAAI,CAAC,OAAO,OAAO,SAAS;AAC1B,YAAQ,MAAM,+DAA+D;AAC7E,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,aAAa,MAAM,QAAQ;AAC3C,QAAM,SAAS,KAAK,SAAS,QAAQ;AACrC,QAAM,cAAc,UAAU,YAAY;AAG1C,QAAM,kBAAkB;AAAA,IACtB,UAAU,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,IAAI;AAAA,IACzE,OAAO,OAAO,OAAO,aAAa,SAAS,OAAO,aAAa,IAAI;AAAA,IACnE,UAAU,OAAO,OAAO,aAAa,YAAY,OAAO,aAAa,IAAI;AAAA,IACzE,gBAAgB,OAAO,OAAO,aAAa;AAAA,EAC7C;AACA,QAAM,cAAc,kBAAkB,eAAe;AAErD,QAAM,QAAQ,IAAI,UAAU,KAAK,KAAK,UAAU,UAAU,CAAC;AAE3D,MAAI,aAAa;AACf,UAAM,YAAY,UAAU,QAAQ,OAAO,KAAK;AAChD,YAAQ,IAAI,qBAAqB,SAAS,oCAA+B;AAAA,EAC3E,OAAO;AACL,YAAQ,IAAI,kCAAkC;AAAA,EAChD;AAEA,MAAI;AACF,UAAM,SAAS,MAAM;AAAA,MACnB;AAAA,QACE;AAAA,QACA;AAAA,QACA;AAAA,QACA,KAAK,CAAC,OAAO,SAAS,SAAS;AAC7B,gBAAM,SAAS,UAAU,SAAS,MAAM,UAAU,SAAS,MAAM;AACjE,gBAAM,SAAS,OAAO,IAAI,KAAK,UAAU,IAAI,CAAC,KAAK;AACnD,kBAAQ,IAAI,GAAG,MAAM,IAAI,OAAO,GAAG,MAAM,EAAE;AAAA,QAC7C;AAAA,MACF;AAAA,MACA;AAAA,MACA,EAAE,MAAM,SAAS,MAAM,OAAO;AAAA,IAChC;AAEA,QAAI,CAAC,QAAQ;AACX,cAAQ,IAAI,8CAAyC;AACrD;AAAA,IACF;AAEA,YAAQ,IAAI;AAAA,uBAA0B;AACtC,YAAQ,IAAI,uBAAuB,OAAO,eAAe,KAAK,IAAI,CAAC,GAAG;AACtE,YAAQ,IAAI,gBAAgB,OAAO,OAAO,OAAO,SAAS,EAAE,KAAK,EAAE,MAAM,QAAQ;AACjF,YAAQ,IAAI,gBAAgB,OAAO,aAAa,KAAM,QAAQ,CAAC,CAAC,GAAG;AACnE,YAAQ,IAAI,YAAY,OAAO,KAAK,EAAE;AAAA,EACxC,UAAE;AACA,UAAM,MAAM;AAAA,EACd;AACF;","names":[]}
|
|
@@ -7,27 +7,27 @@ import {
|
|
|
7
7
|
} from "./chunk-TWSTAVLO.js";
|
|
8
8
|
import {
|
|
9
9
|
run
|
|
10
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-IWBWZQK6.js";
|
|
11
11
|
import {
|
|
12
12
|
run as run2
|
|
13
|
-
} from "./chunk-
|
|
13
|
+
} from "./chunk-XNAM6Z4O.js";
|
|
14
14
|
import {
|
|
15
15
|
VAULT_GITIGNORE,
|
|
16
16
|
configureVaultEnv
|
|
17
|
-
} from "./chunk-
|
|
17
|
+
} from "./chunk-4WL5X7VS.js";
|
|
18
18
|
import {
|
|
19
19
|
parseStringFlag
|
|
20
20
|
} from "./chunk-SAKJMNSR.js";
|
|
21
|
-
import "./chunk-
|
|
21
|
+
import "./chunk-25FY74AP.js";
|
|
22
22
|
import {
|
|
23
23
|
MycoConfigSchema,
|
|
24
24
|
require_dist
|
|
25
|
-
} from "./chunk-
|
|
25
|
+
} from "./chunk-JSK7L46L.js";
|
|
26
26
|
import {
|
|
27
27
|
resolveVaultDir
|
|
28
28
|
} from "./chunk-N33KUCFP.js";
|
|
29
|
-
import "./chunk-
|
|
30
|
-
import "./chunk-
|
|
29
|
+
import "./chunk-RNWALAFP.js";
|
|
30
|
+
import "./chunk-WBLTISAK.js";
|
|
31
31
|
import {
|
|
32
32
|
__toESM
|
|
33
33
|
} from "./chunk-PZUWP5VK.js";
|
|
@@ -106,4 +106,4 @@ async function run3(args) {
|
|
|
106
106
|
export {
|
|
107
107
|
run3 as run
|
|
108
108
|
};
|
|
109
|
-
//# sourceMappingURL=init-
|
|
109
|
+
//# sourceMappingURL=init-ZO2XQT6U.js.map
|
|
@@ -1,35 +1,26 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
|
-
import {
|
|
3
|
-
BufferProcessor,
|
|
4
|
-
TranscriptMiner,
|
|
5
|
-
extractTurnsFromBuffer,
|
|
6
|
-
writeObservationNotes
|
|
7
|
-
} from "./chunk-B5UZSHQV.js";
|
|
8
2
|
import {
|
|
9
3
|
gatherStats
|
|
10
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-WU4PCNIK.js";
|
|
11
5
|
import {
|
|
6
|
+
BufferProcessor,
|
|
12
7
|
DigestEngine,
|
|
13
8
|
Metabolism,
|
|
9
|
+
SUMMARIZATION_FAILED_MARKER,
|
|
10
|
+
TranscriptMiner,
|
|
14
11
|
appendTraceRecord,
|
|
12
|
+
extractTurnsFromBuffer,
|
|
15
13
|
readLastTimestamp,
|
|
16
14
|
runCuration,
|
|
17
15
|
runDigest,
|
|
18
|
-
runRebuild
|
|
19
|
-
|
|
20
|
-
|
|
16
|
+
runRebuild,
|
|
17
|
+
runReprocess,
|
|
18
|
+
writeObservationNotes
|
|
19
|
+
} from "./chunk-PQWQC3RF.js";
|
|
21
20
|
import {
|
|
22
21
|
consolidateSpores,
|
|
23
22
|
handleMycoContext
|
|
24
|
-
} from "./chunk-
|
|
25
|
-
import {
|
|
26
|
-
VaultWriter,
|
|
27
|
-
bareSessionId,
|
|
28
|
-
formatSessionBody,
|
|
29
|
-
sessionNoteId,
|
|
30
|
-
sessionRelativePath,
|
|
31
|
-
sessionWikilink
|
|
32
|
-
} from "./chunk-KC7ENQTN.js";
|
|
23
|
+
} from "./chunk-LDKXXKF6.js";
|
|
33
24
|
import {
|
|
34
25
|
DaemonLogger
|
|
35
26
|
} from "./chunk-QLUE3BUL.js";
|
|
@@ -37,59 +28,65 @@ import {
|
|
|
37
28
|
VectorIndex
|
|
38
29
|
} from "./chunk-4RMSHZE4.js";
|
|
39
30
|
import {
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
import {
|
|
31
|
+
CONVERSATION_HEADING,
|
|
32
|
+
VaultWriter,
|
|
33
|
+
bareSessionId,
|
|
44
34
|
buildSimilarityPrompt,
|
|
35
|
+
checkSupersession,
|
|
36
|
+
extractJson,
|
|
45
37
|
extractNumber,
|
|
46
38
|
formatNotesForPrompt,
|
|
39
|
+
formatSessionBody,
|
|
47
40
|
indexNote,
|
|
41
|
+
isActiveSpore,
|
|
48
42
|
loadPrompt,
|
|
49
43
|
rebuildIndex,
|
|
44
|
+
sessionNoteId,
|
|
45
|
+
sessionRelativePath,
|
|
46
|
+
sessionWikilink,
|
|
50
47
|
stripReasoningTokens
|
|
51
|
-
} from "./chunk-
|
|
48
|
+
} from "./chunk-ALBVNGCF.js";
|
|
52
49
|
import {
|
|
53
50
|
generateEmbedding
|
|
54
51
|
} from "./chunk-RGVBGTD6.js";
|
|
55
52
|
import {
|
|
56
53
|
createEmbeddingProvider,
|
|
57
54
|
createLlmProvider
|
|
58
|
-
} from "./chunk-
|
|
59
|
-
import "./chunk-
|
|
55
|
+
} from "./chunk-DBMHUMG3.js";
|
|
56
|
+
import "./chunk-RY76WEN3.js";
|
|
60
57
|
import {
|
|
61
58
|
initFts
|
|
62
59
|
} from "./chunk-6FQISQNA.js";
|
|
63
60
|
import {
|
|
64
61
|
MycoIndex
|
|
65
62
|
} from "./chunk-TWSTAVLO.js";
|
|
66
|
-
import "./chunk-
|
|
63
|
+
import "./chunk-4WL5X7VS.js";
|
|
67
64
|
import "./chunk-SAKJMNSR.js";
|
|
68
65
|
import {
|
|
69
66
|
LmStudioBackend,
|
|
70
67
|
OllamaBackend
|
|
71
|
-
} from "./chunk-
|
|
68
|
+
} from "./chunk-25FY74AP.js";
|
|
72
69
|
import {
|
|
73
70
|
CONFIG_FILENAME,
|
|
74
71
|
loadConfig,
|
|
75
72
|
saveConfig
|
|
76
|
-
} from "./chunk-
|
|
73
|
+
} from "./chunk-YG6MLLGL.js";
|
|
77
74
|
import {
|
|
78
75
|
MycoConfigSchema,
|
|
79
76
|
external_exports,
|
|
80
77
|
require_dist
|
|
81
|
-
} from "./chunk-
|
|
78
|
+
} from "./chunk-JSK7L46L.js";
|
|
82
79
|
import {
|
|
83
80
|
EventBuffer
|
|
84
81
|
} from "./chunk-HIN3UVOG.js";
|
|
85
82
|
import {
|
|
86
83
|
getPluginVersion
|
|
87
|
-
} from "./chunk-
|
|
84
|
+
} from "./chunk-CK24O5YQ.js";
|
|
88
85
|
import {
|
|
89
86
|
claudeCodeAdapter,
|
|
90
87
|
createPerProjectAdapter,
|
|
91
88
|
extensionForMimeType
|
|
92
|
-
} from "./chunk-
|
|
89
|
+
} from "./chunk-RNWALAFP.js";
|
|
93
90
|
import {
|
|
94
91
|
CANDIDATE_CONTENT_PREVIEW,
|
|
95
92
|
CONSOLIDATION_MAX_TOKENS,
|
|
@@ -97,6 +94,8 @@ import {
|
|
|
97
94
|
CONSOLIDATION_VECTOR_FETCH_LIMIT,
|
|
98
95
|
CONTENT_SNIPPET_CHARS,
|
|
99
96
|
CONTEXT_SESSION_PREVIEW_CHARS,
|
|
97
|
+
DAEMON_EVICT_POLL_MS,
|
|
98
|
+
DAEMON_EVICT_TIMEOUT_MS,
|
|
100
99
|
EMBEDDING_INPUT_LIMIT,
|
|
101
100
|
FILE_WATCH_STABILITY_MS,
|
|
102
101
|
LINEAGE_RECENT_SESSIONS_LIMIT,
|
|
@@ -108,7 +107,7 @@ import {
|
|
|
108
107
|
RELATED_SPORES_LIMIT,
|
|
109
108
|
SESSION_CONTEXT_MAX_PLANS,
|
|
110
109
|
STALE_BUFFER_MAX_AGE_MS
|
|
111
|
-
} from "./chunk-
|
|
110
|
+
} from "./chunk-WBLTISAK.js";
|
|
112
111
|
import {
|
|
113
112
|
__toESM
|
|
114
113
|
} from "./chunk-PZUWP5VK.js";
|
|
@@ -322,6 +321,57 @@ var DaemonServer = class {
|
|
|
322
321
|
} catch {
|
|
323
322
|
}
|
|
324
323
|
}
|
|
324
|
+
/**
|
|
325
|
+
* Kill any existing daemon for this vault before taking over.
|
|
326
|
+
* Prevents orphaned daemons when spawned from worktrees or plugin upgrades.
|
|
327
|
+
* Must be called BEFORE resolvePort() so the old daemon releases the port.
|
|
328
|
+
*/
|
|
329
|
+
async evictExistingDaemon() {
|
|
330
|
+
const jsonPath = path2.join(this.vaultDir, "daemon.json");
|
|
331
|
+
let existingPid;
|
|
332
|
+
try {
|
|
333
|
+
const content = fs2.readFileSync(jsonPath, "utf-8");
|
|
334
|
+
const info = JSON.parse(content);
|
|
335
|
+
if (typeof info.pid === "number" && info.pid !== process.pid) {
|
|
336
|
+
existingPid = info.pid;
|
|
337
|
+
}
|
|
338
|
+
} catch {
|
|
339
|
+
}
|
|
340
|
+
if (!existingPid) return;
|
|
341
|
+
try {
|
|
342
|
+
process.kill(existingPid, 0);
|
|
343
|
+
} catch {
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
this.logger.info("daemon", "Evicting existing daemon", { pid: existingPid });
|
|
347
|
+
try {
|
|
348
|
+
process.kill(existingPid, "SIGTERM");
|
|
349
|
+
} catch {
|
|
350
|
+
return;
|
|
351
|
+
}
|
|
352
|
+
const deadline = Date.now() + DAEMON_EVICT_TIMEOUT_MS;
|
|
353
|
+
while (Date.now() < deadline) {
|
|
354
|
+
await new Promise((r) => setTimeout(r, DAEMON_EVICT_POLL_MS));
|
|
355
|
+
try {
|
|
356
|
+
process.kill(existingPid, 0);
|
|
357
|
+
} catch {
|
|
358
|
+
return;
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
this.logger.warn("daemon", "Evicted daemon did not exit in time, sending SIGKILL", { pid: existingPid });
|
|
362
|
+
try {
|
|
363
|
+
process.kill(existingPid, "SIGKILL");
|
|
364
|
+
} catch {
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
await new Promise((r) => setTimeout(r, DAEMON_EVICT_POLL_MS));
|
|
368
|
+
try {
|
|
369
|
+
process.kill(existingPid, 0);
|
|
370
|
+
} catch {
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
this.logger.warn("daemon", "Evicted daemon still alive after SIGKILL", { pid: existingPid });
|
|
374
|
+
}
|
|
325
375
|
writeDaemonJson() {
|
|
326
376
|
const info = {
|
|
327
377
|
pid: process.pid,
|
|
@@ -335,6 +385,9 @@ var DaemonServer = class {
|
|
|
335
385
|
removeDaemonJson() {
|
|
336
386
|
const jsonPath = path2.join(this.vaultDir, "daemon.json");
|
|
337
387
|
try {
|
|
388
|
+
const content = fs2.readFileSync(jsonPath, "utf-8");
|
|
389
|
+
const info = JSON.parse(content);
|
|
390
|
+
if (info.pid !== process.pid) return;
|
|
338
391
|
fs2.unlinkSync(jsonPath);
|
|
339
392
|
} catch {
|
|
340
393
|
}
|
|
@@ -2368,6 +2421,7 @@ var consolidationResponseSchema = external_exports.discriminatedUnion("consolida
|
|
|
2368
2421
|
var CONSOLIDATION_TRACE_FILENAME = "consolidation-trace.jsonl";
|
|
2369
2422
|
var ConsolidationEngine = class {
|
|
2370
2423
|
deps;
|
|
2424
|
+
maxTokens;
|
|
2371
2425
|
log;
|
|
2372
2426
|
lastTimestampCache = void 0;
|
|
2373
2427
|
constructor(config) {
|
|
@@ -2378,6 +2432,7 @@ var ConsolidationEngine = class {
|
|
|
2378
2432
|
llmProvider: config.llmProvider,
|
|
2379
2433
|
embeddingProvider: config.embeddingProvider
|
|
2380
2434
|
};
|
|
2435
|
+
this.maxTokens = config.maxTokens ?? CONSOLIDATION_MAX_TOKENS;
|
|
2381
2436
|
this.log = config.log ?? (() => {
|
|
2382
2437
|
});
|
|
2383
2438
|
}
|
|
@@ -2492,11 +2547,11 @@ var ConsolidationEngine = class {
|
|
|
2492
2547
|
}
|
|
2493
2548
|
clustersFound++;
|
|
2494
2549
|
const candidatesText = formatNotesForPrompt(cluster);
|
|
2495
|
-
const prompt = template.replace("{{count}}", String(cluster.length)).replace("{{observation_type}}", observationType ?? "unknown").replace("{{candidates}}", candidatesText);
|
|
2550
|
+
const prompt = template.replace("{{count}}", String(cluster.length)).replace("{{observation_type}}", observationType ?? "unknown").replace("{{candidates}}", candidatesText).replace("{{maxTokens}}", String(this.maxTokens));
|
|
2496
2551
|
let responseText;
|
|
2497
2552
|
try {
|
|
2498
2553
|
const response = await llmProvider.summarize(prompt, {
|
|
2499
|
-
maxTokens:
|
|
2554
|
+
maxTokens: this.maxTokens,
|
|
2500
2555
|
reasoning: LLM_REASONING_MODE
|
|
2501
2556
|
});
|
|
2502
2557
|
responseText = stripReasoningTokens(response.text);
|
|
@@ -2510,7 +2565,7 @@ var ConsolidationEngine = class {
|
|
|
2510
2565
|
}
|
|
2511
2566
|
let parsed;
|
|
2512
2567
|
try {
|
|
2513
|
-
const raw =
|
|
2568
|
+
const raw = extractJson(responseText);
|
|
2514
2569
|
const result = consolidationResponseSchema.safeParse(raw);
|
|
2515
2570
|
if (!result.success) {
|
|
2516
2571
|
this.log("warn", "ConsolidationEngine: LLM response failed schema validation", {
|
|
@@ -2737,7 +2792,7 @@ async function handleGetStats(deps) {
|
|
|
2737
2792
|
const digestConfig = deps.config.digest;
|
|
2738
2793
|
const digest = {
|
|
2739
2794
|
enabled: digestConfig.enabled,
|
|
2740
|
-
consolidation_enabled: digestConfig.consolidation,
|
|
2795
|
+
consolidation_enabled: digestConfig.consolidation.enabled,
|
|
2741
2796
|
metabolism_state: deps.metabolism?.state ?? null,
|
|
2742
2797
|
last_cycle: null,
|
|
2743
2798
|
substrate_queue: 0
|
|
@@ -2925,6 +2980,12 @@ var DigestBody = external_exports.object({
|
|
|
2925
2980
|
tier: external_exports.number().int().positive().optional(),
|
|
2926
2981
|
full: external_exports.boolean().optional()
|
|
2927
2982
|
}).optional();
|
|
2983
|
+
var ReprocessBody = external_exports.object({
|
|
2984
|
+
session: external_exports.string().optional(),
|
|
2985
|
+
date: external_exports.string().optional(),
|
|
2986
|
+
failed: external_exports.boolean().optional(),
|
|
2987
|
+
index_only: external_exports.boolean().optional()
|
|
2988
|
+
}).optional();
|
|
2928
2989
|
async function handleRebuild(deps) {
|
|
2929
2990
|
const { token, isNew } = deps.progressTracker.create("rebuild");
|
|
2930
2991
|
if (!isNew) {
|
|
@@ -3062,6 +3123,56 @@ async function handleCurate(deps, body, runCuration2) {
|
|
|
3062
3123
|
});
|
|
3063
3124
|
return { body: { token } };
|
|
3064
3125
|
}
|
|
3126
|
+
async function handleReprocess(deps, body) {
|
|
3127
|
+
const parsed = ReprocessBody.safeParse(body);
|
|
3128
|
+
if (!parsed.success) {
|
|
3129
|
+
return { status: 400, body: { error: "validation_failed", issues: parsed.error.issues } };
|
|
3130
|
+
}
|
|
3131
|
+
const options = {
|
|
3132
|
+
session: parsed.data?.session,
|
|
3133
|
+
date: parsed.data?.date,
|
|
3134
|
+
failed: parsed.data?.failed,
|
|
3135
|
+
indexOnly: parsed.data?.index_only
|
|
3136
|
+
};
|
|
3137
|
+
const { token, isNew } = deps.progressTracker.create("reprocess");
|
|
3138
|
+
if (!isNew) {
|
|
3139
|
+
return { body: { token, status: "already_running" } };
|
|
3140
|
+
}
|
|
3141
|
+
runReprocess(
|
|
3142
|
+
{
|
|
3143
|
+
vaultDir: deps.vaultDir,
|
|
3144
|
+
config: deps.config,
|
|
3145
|
+
index: deps.index,
|
|
3146
|
+
vectorIndex: deps.vectorIndex ?? void 0,
|
|
3147
|
+
log: deps.log
|
|
3148
|
+
},
|
|
3149
|
+
deps.llmProvider,
|
|
3150
|
+
deps.embeddingProvider,
|
|
3151
|
+
options,
|
|
3152
|
+
(phase, done, total) => {
|
|
3153
|
+
const percent = total > 0 ? Math.round(done / total * PROGRESS_COMPLETE) : 0;
|
|
3154
|
+
deps.progressTracker.update(token, {
|
|
3155
|
+
percent,
|
|
3156
|
+
message: `${phase}: ${done}/${total}`
|
|
3157
|
+
});
|
|
3158
|
+
}
|
|
3159
|
+
).then((result) => {
|
|
3160
|
+
const message = result.sessionsProcessed === 0 ? `No matching sessions found (${result.sessionsFound} checked)` : `${result.sessionsProcessed} sessions, ${result.observationsExtracted} observations, ${result.summariesRegenerated} summaries`;
|
|
3161
|
+
deps.progressTracker.update(token, {
|
|
3162
|
+
status: "completed",
|
|
3163
|
+
percent: PROGRESS_COMPLETE,
|
|
3164
|
+
message
|
|
3165
|
+
});
|
|
3166
|
+
deps.log("info", "Reprocess completed via API", { ...result });
|
|
3167
|
+
}).catch((err) => {
|
|
3168
|
+
deps.progressTracker.update(token, {
|
|
3169
|
+
status: "failed",
|
|
3170
|
+
message: err.message
|
|
3171
|
+
});
|
|
3172
|
+
deps.log("warn", "Reprocess failed via API", { error: err.message });
|
|
3173
|
+
});
|
|
3174
|
+
return { body: { token } };
|
|
3175
|
+
}
|
|
3065
3176
|
|
|
3066
3177
|
// src/daemon/api/models.ts
|
|
3067
3178
|
var MODEL_LIST_TIMEOUT_MS = 5e3;
|
|
@@ -3117,6 +3228,28 @@ async function handleGetModels(req) {
|
|
|
3117
3228
|
return { body: { provider, models } };
|
|
3118
3229
|
}
|
|
3119
3230
|
|
|
3231
|
+
// src/daemon/api/sessions.ts
|
|
3232
|
+
function handleGetSessions(index) {
|
|
3233
|
+
const notes = index.query({ type: "session" });
|
|
3234
|
+
const dateSet = /* @__PURE__ */ new Set();
|
|
3235
|
+
const sessions = [];
|
|
3236
|
+
for (const note of notes) {
|
|
3237
|
+
const parts = note.path.split("/");
|
|
3238
|
+
const date = parts[1] ?? "";
|
|
3239
|
+
const filename = parts[2] ?? "";
|
|
3240
|
+
const id = filename.replace("session-", "").replace(".md", "");
|
|
3241
|
+
dateSet.add(date);
|
|
3242
|
+
sessions.push({
|
|
3243
|
+
id,
|
|
3244
|
+
date,
|
|
3245
|
+
title: note.title || id.slice(0, 8),
|
|
3246
|
+
hasFailed: note.content.includes(SUMMARIZATION_FAILED_MARKER)
|
|
3247
|
+
});
|
|
3248
|
+
}
|
|
3249
|
+
const dates = [...dateSet].sort().reverse();
|
|
3250
|
+
return { body: { sessions, dates } };
|
|
3251
|
+
}
|
|
3252
|
+
|
|
3120
3253
|
// src/daemon/main.ts
|
|
3121
3254
|
var import_yaml = __toESM(require_dist(), 1);
|
|
3122
3255
|
import fs6 from "fs";
|
|
@@ -3371,13 +3504,14 @@ ${content}`,
|
|
|
3371
3504
|
config,
|
|
3372
3505
|
log: (level, message, data) => logger[level]("digest", message, data)
|
|
3373
3506
|
});
|
|
3374
|
-
if (config.digest.consolidation) {
|
|
3507
|
+
if (config.digest.consolidation.enabled) {
|
|
3375
3508
|
const consolidationEngine = new ConsolidationEngine({
|
|
3376
3509
|
vaultDir,
|
|
3377
3510
|
index,
|
|
3378
3511
|
vectorIndex,
|
|
3379
3512
|
embeddingProvider,
|
|
3380
3513
|
llmProvider: digestLlm,
|
|
3514
|
+
maxTokens: config.digest.consolidation.max_tokens,
|
|
3381
3515
|
log: (level, message, data) => logger[level]("consolidation", message, data)
|
|
3382
3516
|
});
|
|
3383
3517
|
digestEngine.registerPrePass("consolidation", async () => {
|
|
@@ -3639,7 +3773,7 @@ ${content}`,
|
|
|
3639
3773
|
if (t.aiResponse) parts.push(`Response: ${t.aiResponse}`);
|
|
3640
3774
|
return parts.join("\n");
|
|
3641
3775
|
}).join("\n\n");
|
|
3642
|
-
const conversationSection =
|
|
3776
|
+
const conversationSection = `${CONVERSATION_HEADING}
|
|
3643
3777
|
|
|
3644
3778
|
${conversationText}`;
|
|
3645
3779
|
const observationPromise = lastBatch.length > 0 ? processor.process(lastBatch, sessionId).catch((err) => {
|
|
@@ -3921,6 +4055,9 @@ ${lines.join("\n")}`;
|
|
|
3921
4055
|
server.registerRoute("POST", "/api/rebuild", async () => handleRebuild(operationDeps));
|
|
3922
4056
|
server.registerRoute("POST", "/api/digest", async (req) => handleDigest(operationDeps, req.body));
|
|
3923
4057
|
server.registerRoute("POST", "/api/curate", async (req) => handleCurate(operationDeps, req.body, runCuration));
|
|
4058
|
+
server.registerRoute("POST", "/api/reprocess", async (req) => handleReprocess(operationDeps, req.body));
|
|
4059
|
+
server.registerRoute("GET", "/api/sessions", async () => handleGetSessions(index));
|
|
4060
|
+
await server.evictExistingDaemon();
|
|
3924
4061
|
const resolvedPort = await resolvePort(config.daemon.port, vaultDir);
|
|
3925
4062
|
if (resolvedPort === 0) {
|
|
3926
4063
|
logger.warn("daemon", "All preferred ports occupied, using ephemeral port");
|
|
@@ -3977,4 +4114,4 @@ export {
|
|
|
3977
4114
|
chokidar/index.js:
|
|
3978
4115
|
(*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) *)
|
|
3979
4116
|
*/
|
|
3980
|
-
//# sourceMappingURL=main-
|
|
4117
|
+
//# sourceMappingURL=main-XZ6X4BUX.js.map
|