@goondocks/myco 0.6.4 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +2 -3
- package/.claude-plugin/plugin.json +3 -3
- package/CONTRIBUTING.md +37 -30
- package/README.md +64 -28
- package/bin/myco-run +2 -0
- package/dist/agent-run-EFICNTAU.js +34 -0
- package/dist/agent-run-EFICNTAU.js.map +1 -0
- package/dist/agent-tasks-RXJ7Z5NG.js +180 -0
- package/dist/agent-tasks-RXJ7Z5NG.js.map +1 -0
- package/dist/chunk-2T7RPVPP.js +116 -0
- package/dist/chunk-2T7RPVPP.js.map +1 -0
- package/dist/chunk-3K5WGSJ4.js +165 -0
- package/dist/chunk-3K5WGSJ4.js.map +1 -0
- package/dist/chunk-46PWOKSI.js +26 -0
- package/dist/chunk-46PWOKSI.js.map +1 -0
- package/dist/chunk-4LPQ26CK.js +277 -0
- package/dist/chunk-4LPQ26CK.js.map +1 -0
- package/dist/chunk-5PEUFJ6U.js +92 -0
- package/dist/chunk-5PEUFJ6U.js.map +1 -0
- package/dist/chunk-5VZ52A4T.js +136 -0
- package/dist/chunk-5VZ52A4T.js.map +1 -0
- package/dist/chunk-BUSP3OJB.js +103 -0
- package/dist/chunk-BUSP3OJB.js.map +1 -0
- package/dist/chunk-D7TYRPRM.js +7312 -0
- package/dist/chunk-D7TYRPRM.js.map +1 -0
- package/dist/chunk-DCXRSSBP.js +22 -0
- package/dist/chunk-DCXRSSBP.js.map +1 -0
- package/dist/chunk-E4VLWIJC.js +2 -0
- package/dist/chunk-FFAYUQ5N.js +39 -0
- package/dist/chunk-FFAYUQ5N.js.map +1 -0
- package/dist/chunk-IB76KGBY.js +2 -0
- package/dist/chunk-JMJJEQ3P.js +486 -0
- package/dist/chunk-JMJJEQ3P.js.map +1 -0
- package/dist/{chunk-N33KUCFP.js → chunk-JTYZRPX5.js} +1 -9
- package/dist/chunk-JTYZRPX5.js.map +1 -0
- package/dist/{chunk-NLUE6CYG.js → chunk-JYOOJCPQ.js} +33 -17
- package/dist/chunk-JYOOJCPQ.js.map +1 -0
- package/dist/{chunk-Z74SDEKE.js → chunk-KB4DGYIY.js} +91 -9
- package/dist/chunk-KB4DGYIY.js.map +1 -0
- package/dist/{chunk-ERG2IEWX.js → chunk-KH64DHOY.js} +3 -7413
- package/dist/chunk-KH64DHOY.js.map +1 -0
- package/dist/chunk-KV4OC4H3.js +498 -0
- package/dist/chunk-KV4OC4H3.js.map +1 -0
- package/dist/chunk-KYLDNM7H.js +66 -0
- package/dist/chunk-KYLDNM7H.js.map +1 -0
- package/dist/chunk-LPUQPDC2.js +19 -0
- package/dist/chunk-LPUQPDC2.js.map +1 -0
- package/dist/chunk-M5XWW7UI.js +97 -0
- package/dist/chunk-M5XWW7UI.js.map +1 -0
- package/dist/chunk-MHSCMET3.js +275 -0
- package/dist/chunk-MHSCMET3.js.map +1 -0
- package/dist/chunk-MYX5NCRH.js +45 -0
- package/dist/chunk-MYX5NCRH.js.map +1 -0
- package/dist/chunk-OXZSXYAT.js +877 -0
- package/dist/chunk-OXZSXYAT.js.map +1 -0
- package/dist/chunk-PB6TOLRQ.js +35 -0
- package/dist/chunk-PB6TOLRQ.js.map +1 -0
- package/dist/chunk-PT5IC642.js +162 -0
- package/dist/chunk-PT5IC642.js.map +1 -0
- package/dist/chunk-QIK2XSDQ.js +187 -0
- package/dist/chunk-QIK2XSDQ.js.map +1 -0
- package/dist/chunk-RJ6ZQKG5.js +26 -0
- package/dist/chunk-RJ6ZQKG5.js.map +1 -0
- package/dist/{chunk-YIQLYIHW.js → chunk-TRUJLI6K.js} +29 -43
- package/dist/chunk-TRUJLI6K.js.map +1 -0
- package/dist/chunk-U3IBO3O3.js +41 -0
- package/dist/chunk-U3IBO3O3.js.map +1 -0
- package/dist/{chunk-7WHF2OIZ.js → chunk-UBZPD4HN.js} +25 -7
- package/dist/chunk-UBZPD4HN.js.map +1 -0
- package/dist/{chunk-HIN3UVOG.js → chunk-V7XG6V6C.js} +20 -11
- package/dist/chunk-V7XG6V6C.js.map +1 -0
- package/dist/chunk-WGTCA2NU.js +84 -0
- package/dist/chunk-WGTCA2NU.js.map +1 -0
- package/dist/{chunk-O6PERU7U.js → chunk-XNOCTDHF.js} +2 -2
- package/dist/chunk-YDN4OM33.js +80 -0
- package/dist/chunk-YDN4OM33.js.map +1 -0
- package/dist/cli-ODLFRIYS.js +128 -0
- package/dist/cli-ODLFRIYS.js.map +1 -0
- package/dist/client-EYOTW3JU.js +19 -0
- package/dist/client-MXRNQ5FI.js +13 -0
- package/dist/{config-IBS6KOLQ.js → config-UR5BSGVX.js} +21 -34
- package/dist/config-UR5BSGVX.js.map +1 -0
- package/dist/detect-H5OPI7GD.js +17 -0
- package/dist/detect-H5OPI7GD.js.map +1 -0
- package/dist/detect-providers-Q42OD4OS.js +26 -0
- package/dist/detect-providers-Q42OD4OS.js.map +1 -0
- package/dist/doctor-JLKTXDEH.js +258 -0
- package/dist/doctor-JLKTXDEH.js.map +1 -0
- package/dist/executor-ONSDHPGX.js +1441 -0
- package/dist/executor-ONSDHPGX.js.map +1 -0
- package/dist/init-6GWY345B.js +198 -0
- package/dist/init-6GWY345B.js.map +1 -0
- package/dist/init-wizard-UONLDYLI.js +294 -0
- package/dist/init-wizard-UONLDYLI.js.map +1 -0
- package/dist/llm-BV3QNVRD.js +17 -0
- package/dist/llm-BV3QNVRD.js.map +1 -0
- package/dist/loader-SH67XD54.js +28 -0
- package/dist/loader-SH67XD54.js.map +1 -0
- package/dist/loader-XVXKZZDH.js +18 -0
- package/dist/loader-XVXKZZDH.js.map +1 -0
- package/dist/{chunk-H7PRCVGQ.js → logs-QZVYF6FP.js} +74 -5
- package/dist/logs-QZVYF6FP.js.map +1 -0
- package/dist/main-BMCL7CPO.js +4393 -0
- package/dist/main-BMCL7CPO.js.map +1 -0
- package/dist/openai-embeddings-C265WRNK.js +14 -0
- package/dist/openai-embeddings-C265WRNK.js.map +1 -0
- package/dist/openrouter-U6VFCRX2.js +14 -0
- package/dist/openrouter-U6VFCRX2.js.map +1 -0
- package/dist/post-compact-OWFSOITU.js +26 -0
- package/dist/post-compact-OWFSOITU.js.map +1 -0
- package/dist/post-tool-use-DOUM7CGQ.js +56 -0
- package/dist/post-tool-use-DOUM7CGQ.js.map +1 -0
- package/dist/post-tool-use-failure-SG3C7PE6.js +28 -0
- package/dist/post-tool-use-failure-SG3C7PE6.js.map +1 -0
- package/dist/pre-compact-3J33CHXQ.js +25 -0
- package/dist/pre-compact-3J33CHXQ.js.map +1 -0
- package/dist/provider-check-3WBPZADE.js +12 -0
- package/dist/provider-check-3WBPZADE.js.map +1 -0
- package/dist/registry-J4XTWARS.js +25 -0
- package/dist/registry-J4XTWARS.js.map +1 -0
- package/dist/resolution-events-TFEQPVKS.js +12 -0
- package/dist/resolution-events-TFEQPVKS.js.map +1 -0
- package/dist/resolve-3FEUV462.js +9 -0
- package/dist/resolve-3FEUV462.js.map +1 -0
- package/dist/{restart-XCMILOL5.js → restart-2VM33WOB.js} +10 -6
- package/dist/{restart-XCMILOL5.js.map → restart-2VM33WOB.js.map} +1 -1
- package/dist/search-ZGQR5MDE.js +91 -0
- package/dist/search-ZGQR5MDE.js.map +1 -0
- package/dist/{server-6UDN35QN.js → server-6KMBJCHZ.js} +308 -517
- package/dist/server-6KMBJCHZ.js.map +1 -0
- package/dist/session-Z2FXDDG6.js +68 -0
- package/dist/session-Z2FXDDG6.js.map +1 -0
- package/dist/session-end-FLVX32LE.js +38 -0
- package/dist/session-end-FLVX32LE.js.map +1 -0
- package/dist/session-start-UCLK7PXE.js +169 -0
- package/dist/session-start-UCLK7PXE.js.map +1 -0
- package/dist/setup-digest-4KDSXAIV.js +15 -0
- package/dist/setup-digest-4KDSXAIV.js.map +1 -0
- package/dist/setup-llm-GKMCHURK.js +81 -0
- package/dist/setup-llm-GKMCHURK.js.map +1 -0
- package/dist/src/agent/definitions/agent.yaml +35 -0
- package/dist/src/agent/definitions/tasks/digest-only.yaml +84 -0
- package/dist/src/agent/definitions/tasks/extract-only.yaml +87 -0
- package/dist/src/agent/definitions/tasks/full-intelligence.yaml +472 -0
- package/dist/src/agent/definitions/tasks/graph-maintenance.yaml +92 -0
- package/dist/src/agent/definitions/tasks/review-session.yaml +132 -0
- package/dist/src/agent/definitions/tasks/supersession-sweep.yaml +86 -0
- package/dist/src/agent/definitions/tasks/title-summary.yaml +88 -0
- package/dist/src/agent/prompts/agent.md +121 -0
- package/dist/src/agent/prompts/orchestrator.md +91 -0
- package/dist/src/cli.js +1 -8
- package/dist/src/cli.js.map +1 -1
- package/dist/src/daemon/main.js +1 -8
- package/dist/src/daemon/main.js.map +1 -1
- package/dist/src/hooks/post-tool-use.js +3 -50
- package/dist/src/hooks/post-tool-use.js.map +1 -1
- package/dist/src/hooks/session-end.js +3 -32
- package/dist/src/hooks/session-end.js.map +1 -1
- package/dist/src/hooks/session-start.js +2 -8
- package/dist/src/hooks/session-start.js.map +1 -1
- package/dist/src/hooks/stop.js +3 -42
- package/dist/src/hooks/stop.js.map +1 -1
- package/dist/src/hooks/user-prompt-submit.js +3 -53
- package/dist/src/hooks/user-prompt-submit.js.map +1 -1
- package/dist/src/mcp/server.js +1 -8
- package/dist/src/mcp/server.js.map +1 -1
- package/dist/src/prompts/digest-system.md +1 -1
- package/dist/src/symbionts/manifests/claude-code.yaml +16 -0
- package/dist/src/symbionts/manifests/cursor.yaml +14 -0
- package/dist/stats-IUJPZSVZ.js +94 -0
- package/dist/stats-IUJPZSVZ.js.map +1 -0
- package/dist/stop-XRQLLXST.js +42 -0
- package/dist/stop-XRQLLXST.js.map +1 -0
- package/dist/stop-failure-2CAJJKRG.js +26 -0
- package/dist/stop-failure-2CAJJKRG.js.map +1 -0
- package/dist/subagent-start-MWWQTZMQ.js +26 -0
- package/dist/subagent-start-MWWQTZMQ.js.map +1 -0
- package/dist/subagent-stop-PJXYGRXB.js +28 -0
- package/dist/subagent-stop-PJXYGRXB.js.map +1 -0
- package/dist/task-completed-4LFRJVGI.js +27 -0
- package/dist/task-completed-4LFRJVGI.js.map +1 -0
- package/dist/ui/assets/index-DZrElonz.js +744 -0
- package/dist/ui/assets/index-TkeiYbZB.css +1 -0
- package/dist/ui/favicon.svg +7 -7
- package/dist/ui/fonts/Inter-Variable.woff2 +0 -0
- package/dist/ui/fonts/JetBrainsMono-Variable.woff2 +0 -0
- package/dist/ui/fonts/Newsreader-Italic-Variable.woff2 +0 -0
- package/dist/ui/fonts/Newsreader-Variable.woff2 +0 -0
- package/dist/ui/index.html +2 -2
- package/dist/user-prompt-submit-KSM3AR6P.js +59 -0
- package/dist/user-prompt-submit-KSM3AR6P.js.map +1 -0
- package/dist/{verify-TOWQHPBX.js → verify-UDAYVX37.js} +17 -22
- package/dist/verify-UDAYVX37.js.map +1 -0
- package/dist/{version-36RVCQA6.js → version-KLBN4HZT.js} +3 -4
- package/dist/version-KLBN4HZT.js.map +1 -0
- package/hooks/hooks.json +82 -5
- package/package.json +6 -3
- package/skills/myco/SKILL.md +10 -10
- package/skills/myco/references/cli-usage.md +15 -13
- package/skills/myco/references/vault-status.md +3 -3
- package/skills/myco/references/wisdom.md +4 -4
- package/skills/myco-curate/SKILL.md +86 -0
- package/dist/chunk-2ZIBCEYO.js +0 -113
- package/dist/chunk-2ZIBCEYO.js.map +0 -1
- package/dist/chunk-4RMSHZE4.js +0 -107
- package/dist/chunk-4RMSHZE4.js.map +0 -1
- package/dist/chunk-4XVKZ3WA.js +0 -1078
- package/dist/chunk-4XVKZ3WA.js.map +0 -1
- package/dist/chunk-6FQISQNA.js +0 -61
- package/dist/chunk-6FQISQNA.js.map +0 -1
- package/dist/chunk-7WHF2OIZ.js.map +0 -1
- package/dist/chunk-ERG2IEWX.js.map +0 -1
- package/dist/chunk-FPRXMJLT.js +0 -56
- package/dist/chunk-FPRXMJLT.js.map +0 -1
- package/dist/chunk-GENQ5QGP.js +0 -37
- package/dist/chunk-GENQ5QGP.js.map +0 -1
- package/dist/chunk-H7PRCVGQ.js.map +0 -1
- package/dist/chunk-HIN3UVOG.js.map +0 -1
- package/dist/chunk-HYVT345Y.js +0 -159
- package/dist/chunk-HYVT345Y.js.map +0 -1
- package/dist/chunk-J4D4CROB.js +0 -143
- package/dist/chunk-J4D4CROB.js.map +0 -1
- package/dist/chunk-MDLSAFPP.js +0 -99
- package/dist/chunk-MDLSAFPP.js.map +0 -1
- package/dist/chunk-N33KUCFP.js.map +0 -1
- package/dist/chunk-NL6WQO56.js +0 -65
- package/dist/chunk-NL6WQO56.js.map +0 -1
- package/dist/chunk-NLUE6CYG.js.map +0 -1
- package/dist/chunk-P723N2LP.js +0 -147
- package/dist/chunk-P723N2LP.js.map +0 -1
- package/dist/chunk-QLUE3BUL.js +0 -161
- package/dist/chunk-QLUE3BUL.js.map +0 -1
- package/dist/chunk-QN4W3JUA.js +0 -43
- package/dist/chunk-QN4W3JUA.js.map +0 -1
- package/dist/chunk-RGVBGTD6.js +0 -21
- package/dist/chunk-RGVBGTD6.js.map +0 -1
- package/dist/chunk-TWSTAVLO.js +0 -132
- package/dist/chunk-TWSTAVLO.js.map +0 -1
- package/dist/chunk-UP4P4OAA.js +0 -4423
- package/dist/chunk-UP4P4OAA.js.map +0 -1
- package/dist/chunk-YIQLYIHW.js.map +0 -1
- package/dist/chunk-YTFXA4RX.js +0 -86
- package/dist/chunk-YTFXA4RX.js.map +0 -1
- package/dist/chunk-Z74SDEKE.js.map +0 -1
- package/dist/cli-IHILSS6N.js +0 -97
- package/dist/cli-IHILSS6N.js.map +0 -1
- package/dist/client-AGFNR2S4.js +0 -12
- package/dist/config-IBS6KOLQ.js.map +0 -1
- package/dist/curate-3D4GHKJH.js +0 -78
- package/dist/curate-3D4GHKJH.js.map +0 -1
- package/dist/detect-providers-XEP4QA3R.js +0 -35
- package/dist/detect-providers-XEP4QA3R.js.map +0 -1
- package/dist/digest-7HLJXL77.js +0 -85
- package/dist/digest-7HLJXL77.js.map +0 -1
- package/dist/init-ARQ53JOR.js +0 -109
- package/dist/init-ARQ53JOR.js.map +0 -1
- package/dist/logs-IENORIYR.js +0 -84
- package/dist/logs-IENORIYR.js.map +0 -1
- package/dist/main-6AGPIMH2.js +0 -5715
- package/dist/main-6AGPIMH2.js.map +0 -1
- package/dist/rebuild-Q2ACEB6F.js +0 -64
- package/dist/rebuild-Q2ACEB6F.js.map +0 -1
- package/dist/reprocess-CDEFGQOV.js +0 -79
- package/dist/reprocess-CDEFGQOV.js.map +0 -1
- package/dist/search-7W25SKCB.js +0 -120
- package/dist/search-7W25SKCB.js.map +0 -1
- package/dist/server-6UDN35QN.js.map +0 -1
- package/dist/session-F326AWCH.js +0 -44
- package/dist/session-F326AWCH.js.map +0 -1
- package/dist/session-start-K6IGAC7H.js +0 -192
- package/dist/session-start-K6IGAC7H.js.map +0 -1
- package/dist/setup-digest-X5PN27F4.js +0 -15
- package/dist/setup-llm-S5OHQJXK.js +0 -15
- package/dist/src/prompts/classification.md +0 -43
- package/dist/stats-TTSDXGJV.js +0 -58
- package/dist/stats-TTSDXGJV.js.map +0 -1
- package/dist/templates-XPRBOWCE.js +0 -38
- package/dist/templates-XPRBOWCE.js.map +0 -1
- package/dist/ui/assets/index-08wKT7wS.css +0 -1
- package/dist/ui/assets/index-CMSMi4Jb.js +0 -369
- package/dist/verify-TOWQHPBX.js.map +0 -1
- package/skills/setup/SKILL.md +0 -174
- package/skills/setup/references/model-recommendations.md +0 -83
- /package/dist/{client-AGFNR2S4.js.map → chunk-E4VLWIJC.js.map} +0 -0
- /package/dist/{setup-digest-X5PN27F4.js.map → chunk-IB76KGBY.js.map} +0 -0
- /package/dist/{chunk-O6PERU7U.js.map → chunk-XNOCTDHF.js.map} +0 -0
- /package/dist/{setup-llm-S5OHQJXK.js.map → client-EYOTW3JU.js.map} +0 -0
- /package/dist/{version-36RVCQA6.js.map → client-MXRNQ5FI.js.map} +0 -0
package/dist/chunk-4XVKZ3WA.js
DELETED
|
@@ -1,1078 +0,0 @@
|
|
|
1
|
-
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
|
-
import {
|
|
3
|
-
ARTIFACT_TYPES,
|
|
4
|
-
CONVERSATION_HEADING,
|
|
5
|
-
TURN_HEADING_PREFIX,
|
|
6
|
-
VaultWriter,
|
|
7
|
-
bareSessionId,
|
|
8
|
-
buildClassificationPrompt,
|
|
9
|
-
buildExtractionPrompt,
|
|
10
|
-
buildSummaryPrompt,
|
|
11
|
-
buildTitlePrompt,
|
|
12
|
-
callout,
|
|
13
|
-
extractJson,
|
|
14
|
-
extractSection,
|
|
15
|
-
formatNoteForPrompt,
|
|
16
|
-
formatNotesForPrompt,
|
|
17
|
-
formatSporeBody,
|
|
18
|
-
indexNote,
|
|
19
|
-
isActiveSpore,
|
|
20
|
-
loadPrompt,
|
|
21
|
-
rebuildIndex,
|
|
22
|
-
require_gray_matter,
|
|
23
|
-
sessionNoteId,
|
|
24
|
-
stripReasoningTokens,
|
|
25
|
-
supersedeSpore,
|
|
26
|
-
supersededIdsSchema
|
|
27
|
-
} from "./chunk-UP4P4OAA.js";
|
|
28
|
-
import {
|
|
29
|
-
generateEmbedding
|
|
30
|
-
} from "./chunk-RGVBGTD6.js";
|
|
31
|
-
import {
|
|
32
|
-
stripFrontmatter
|
|
33
|
-
} from "./chunk-GENQ5QGP.js";
|
|
34
|
-
import {
|
|
35
|
-
initFts
|
|
36
|
-
} from "./chunk-6FQISQNA.js";
|
|
37
|
-
import {
|
|
38
|
-
external_exports,
|
|
39
|
-
require_dist
|
|
40
|
-
} from "./chunk-ERG2IEWX.js";
|
|
41
|
-
import {
|
|
42
|
-
CHARS_PER_TOKEN,
|
|
43
|
-
CURATION_CLUSTER_SIMILARITY,
|
|
44
|
-
DIGEST_LLM_REQUEST_TIMEOUT_MS,
|
|
45
|
-
DIGEST_SUBSTRATE_TYPE_WEIGHTS,
|
|
46
|
-
DIGEST_TIERS,
|
|
47
|
-
DIGEST_TIER_MIN_CONTEXT,
|
|
48
|
-
EMBEDDING_INPUT_LIMIT,
|
|
49
|
-
LLM_REASONING_MODE,
|
|
50
|
-
SUPERSESSION_MAX_TOKENS,
|
|
51
|
-
estimateTokens
|
|
52
|
-
} from "./chunk-J4D4CROB.js";
|
|
53
|
-
import {
|
|
54
|
-
__toESM
|
|
55
|
-
} from "./chunk-PZUWP5VK.js";
|
|
56
|
-
|
|
57
|
-
// src/daemon/digest.ts
|
|
58
|
-
var import_yaml = __toESM(require_dist(), 1);
|
|
59
|
-
import fs2 from "fs";
|
|
60
|
-
import path2 from "path";
|
|
61
|
-
import crypto from "crypto";
|
|
62
|
-
|
|
63
|
-
// src/daemon/trace.ts
|
|
64
|
-
import fs from "fs";
|
|
65
|
-
import path from "path";
|
|
66
|
-
function readLastRecord(filePath) {
|
|
67
|
-
let content;
|
|
68
|
-
try {
|
|
69
|
-
content = fs.readFileSync(filePath, "utf-8").trim();
|
|
70
|
-
} catch {
|
|
71
|
-
return null;
|
|
72
|
-
}
|
|
73
|
-
if (!content) return null;
|
|
74
|
-
const lines = content.split("\n");
|
|
75
|
-
const lastLine = lines[lines.length - 1];
|
|
76
|
-
try {
|
|
77
|
-
return JSON.parse(lastLine);
|
|
78
|
-
} catch {
|
|
79
|
-
return null;
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
function readLastTimestamp(filePath) {
|
|
83
|
-
return readLastRecord(filePath)?.timestamp ?? null;
|
|
84
|
-
}
|
|
85
|
-
function appendTraceRecord(filePath, record) {
|
|
86
|
-
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
87
|
-
fs.appendFileSync(filePath, JSON.stringify(record) + "\n", "utf-8");
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
// src/daemon/digest.ts
|
|
91
|
-
var PREVIOUS_EXTRACT_OVERHEAD_TOKENS = 50;
|
|
92
|
-
var CONTEXT_SAFETY_MARGIN = 0.7;
|
|
93
|
-
var EXTRACT_TYPE = "extract";
|
|
94
|
-
var DigestEngine = class {
|
|
95
|
-
vaultDir;
|
|
96
|
-
index;
|
|
97
|
-
llm;
|
|
98
|
-
config;
|
|
99
|
-
log;
|
|
100
|
-
lastCycleTimestampCache = void 0;
|
|
101
|
-
cycleInProgress = false;
|
|
102
|
-
/** Whether a digest cycle is currently running. */
|
|
103
|
-
get isCycleInProgress() {
|
|
104
|
-
return this.cycleInProgress;
|
|
105
|
-
}
|
|
106
|
-
/** Hooks that run before each digest cycle (e.g., consolidation). */
|
|
107
|
-
prePassHooks = [];
|
|
108
|
-
/** Hooks that run after each successful digest cycle. */
|
|
109
|
-
postPassHooks = [];
|
|
110
|
-
constructor(engineConfig) {
|
|
111
|
-
this.vaultDir = engineConfig.vaultDir;
|
|
112
|
-
this.index = engineConfig.index;
|
|
113
|
-
this.llm = engineConfig.llmProvider;
|
|
114
|
-
this.config = engineConfig.config;
|
|
115
|
-
this.log = engineConfig.log ?? (() => {
|
|
116
|
-
});
|
|
117
|
-
}
|
|
118
|
-
/** Register a hook that runs before each digest cycle. Best-effort — errors are logged, not thrown. */
|
|
119
|
-
registerPrePass(name, fn) {
|
|
120
|
-
this.prePassHooks.push({ name, fn });
|
|
121
|
-
}
|
|
122
|
-
/** Register a hook that runs after each successful digest cycle. Best-effort — errors are logged, not thrown. */
|
|
123
|
-
registerPostPass(name, fn) {
|
|
124
|
-
this.postPassHooks.push({ name, fn });
|
|
125
|
-
}
|
|
126
|
-
/**
|
|
127
|
-
* Query index for recent vault notes to feed into the digest.
|
|
128
|
-
* Filters out extract notes (our own output) and caps at max_notes_per_cycle.
|
|
129
|
-
*/
|
|
130
|
-
discoverSubstrate(lastCycleTimestamp) {
|
|
131
|
-
const maxNotes = this.config.digest.substrate.max_notes_per_cycle;
|
|
132
|
-
const notes = lastCycleTimestamp ? this.index.query({ updatedSince: lastCycleTimestamp, limit: maxNotes }) : this.index.query({ limit: maxNotes });
|
|
133
|
-
const filtered = notes.filter((n) => n.type !== EXTRACT_TYPE).filter((n) => {
|
|
134
|
-
if (n.type !== "spore") return true;
|
|
135
|
-
const status = n.frontmatter.status;
|
|
136
|
-
return !status || status === "active";
|
|
137
|
-
});
|
|
138
|
-
filtered.sort((a, b) => {
|
|
139
|
-
const weightA = DIGEST_SUBSTRATE_TYPE_WEIGHTS[a.type] ?? 0;
|
|
140
|
-
const weightB = DIGEST_SUBSTRATE_TYPE_WEIGHTS[b.type] ?? 0;
|
|
141
|
-
if (weightB !== weightA) return weightB - weightA;
|
|
142
|
-
return b.created.localeCompare(a.created);
|
|
143
|
-
});
|
|
144
|
-
return filtered.slice(0, maxNotes);
|
|
145
|
-
}
|
|
146
|
-
/**
|
|
147
|
-
* Filter configured tiers by the context window available.
|
|
148
|
-
* Only tiers whose minimum context requirement is met are eligible.
|
|
149
|
-
*/
|
|
150
|
-
getEligibleTiers() {
|
|
151
|
-
const contextWindow = this.config.digest.intelligence.context_window;
|
|
152
|
-
return DIGEST_TIERS.filter((tier) => {
|
|
153
|
-
const minContext = DIGEST_TIER_MIN_CONTEXT[tier];
|
|
154
|
-
return minContext !== void 0 && minContext <= contextWindow;
|
|
155
|
-
});
|
|
156
|
-
}
|
|
157
|
-
/**
|
|
158
|
-
* Format notes compactly for inclusion in the digest prompt.
|
|
159
|
-
* Stops adding notes once the token budget is exceeded.
|
|
160
|
-
*/
|
|
161
|
-
formatSubstrate(notes, tokenBudget) {
|
|
162
|
-
const charBudget = tokenBudget * CHARS_PER_TOKEN;
|
|
163
|
-
const parts = [];
|
|
164
|
-
let usedChars = 0;
|
|
165
|
-
for (const note of notes) {
|
|
166
|
-
const entry = `### [${note.type}] ${note.id} \u2014 "${note.title}"
|
|
167
|
-
${note.content}`;
|
|
168
|
-
if (usedChars + entry.length > charBudget && parts.length > 0) break;
|
|
169
|
-
parts.push(entry);
|
|
170
|
-
usedChars += entry.length;
|
|
171
|
-
}
|
|
172
|
-
return parts.join("\n\n");
|
|
173
|
-
}
|
|
174
|
-
/**
|
|
175
|
-
* Read a previously generated extract for a given tier.
|
|
176
|
-
* Returns the body (stripped of YAML frontmatter), or null if not found.
|
|
177
|
-
*/
|
|
178
|
-
readPreviousExtract(tier) {
|
|
179
|
-
const extractPath = path2.join(this.vaultDir, "digest", `extract-${tier}.md`);
|
|
180
|
-
let content;
|
|
181
|
-
try {
|
|
182
|
-
content = fs2.readFileSync(extractPath, "utf-8");
|
|
183
|
-
} catch {
|
|
184
|
-
return null;
|
|
185
|
-
}
|
|
186
|
-
return stripFrontmatter(content).body;
|
|
187
|
-
}
|
|
188
|
-
/**
|
|
189
|
-
* Write a digest extract to the vault with YAML frontmatter.
|
|
190
|
-
* Uses atomic write pattern (temp file + rename).
|
|
191
|
-
*/
|
|
192
|
-
writeExtract(tier, body, cycleId, model, substrateCount, substrateNotes, tokensUsed) {
|
|
193
|
-
const digestDir = path2.join(this.vaultDir, "digest");
|
|
194
|
-
fs2.mkdirSync(digestDir, { recursive: true });
|
|
195
|
-
const frontmatter = {
|
|
196
|
-
type: EXTRACT_TYPE,
|
|
197
|
-
tier,
|
|
198
|
-
generated: (/* @__PURE__ */ new Date()).toISOString(),
|
|
199
|
-
cycle_id: cycleId,
|
|
200
|
-
substrate_count: substrateCount,
|
|
201
|
-
model
|
|
202
|
-
};
|
|
203
|
-
if (substrateNotes && substrateNotes.length > 0) frontmatter.substrate_notes = substrateNotes;
|
|
204
|
-
if (tokensUsed !== void 0) frontmatter.tokens_used = tokensUsed;
|
|
205
|
-
const fmYaml = import_yaml.default.stringify(frontmatter, {
|
|
206
|
-
defaultStringType: "QUOTE_DOUBLE",
|
|
207
|
-
defaultKeyType: "PLAIN"
|
|
208
|
-
}).trim();
|
|
209
|
-
const file = `---
|
|
210
|
-
${fmYaml}
|
|
211
|
-
---
|
|
212
|
-
|
|
213
|
-
${body}
|
|
214
|
-
`;
|
|
215
|
-
const fullPath = path2.join(digestDir, `extract-${tier}.md`);
|
|
216
|
-
const tmpPath = `${fullPath}.tmp`;
|
|
217
|
-
fs2.writeFileSync(tmpPath, file, "utf-8");
|
|
218
|
-
fs2.renameSync(tmpPath, fullPath);
|
|
219
|
-
}
|
|
220
|
-
/**
|
|
221
|
-
* Append a digest cycle result as a JSON line to trace.jsonl.
|
|
222
|
-
*/
|
|
223
|
-
appendTrace(record) {
|
|
224
|
-
const tracePath = path2.join(this.vaultDir, "digest", "trace.jsonl");
|
|
225
|
-
appendTraceRecord(tracePath, record);
|
|
226
|
-
this.lastCycleTimestampCache = record.timestamp;
|
|
227
|
-
}
|
|
228
|
-
/**
|
|
229
|
-
* Read the last cycle timestamp from trace.jsonl.
|
|
230
|
-
* Cached in memory after first read — subsequent calls are O(1).
|
|
231
|
-
*/
|
|
232
|
-
getLastCycleTimestamp() {
|
|
233
|
-
if (this.lastCycleTimestampCache !== void 0) return this.lastCycleTimestampCache;
|
|
234
|
-
const tracePath = path2.join(this.vaultDir, "digest", "trace.jsonl");
|
|
235
|
-
this.lastCycleTimestampCache = readLastTimestamp(tracePath);
|
|
236
|
-
return this.lastCycleTimestampCache;
|
|
237
|
-
}
|
|
238
|
-
/**
|
|
239
|
-
* Run a full digest cycle: discover substrate, generate extracts for each tier.
|
|
240
|
-
* Returns the cycle result, or null if no substrate was found.
|
|
241
|
-
*/
|
|
242
|
-
async runCycle(opts) {
|
|
243
|
-
if (this.cycleInProgress) {
|
|
244
|
-
this.log("debug", "Cycle already in progress \u2014 skipping");
|
|
245
|
-
return null;
|
|
246
|
-
}
|
|
247
|
-
this.cycleInProgress = true;
|
|
248
|
-
try {
|
|
249
|
-
if (this.llm.ensureLoaded) {
|
|
250
|
-
const { context_window: contextWindow, gpu_kv_cache: gpuKvCache } = this.config.digest.intelligence;
|
|
251
|
-
this.log("debug", "Verifying digest model", { contextWindow, gpuKvCache });
|
|
252
|
-
await this.llm.ensureLoaded(contextWindow, gpuKvCache);
|
|
253
|
-
}
|
|
254
|
-
for (const hook of this.prePassHooks) {
|
|
255
|
-
try {
|
|
256
|
-
await hook.fn();
|
|
257
|
-
} catch (err) {
|
|
258
|
-
this.log("warn", `Pre-pass hook "${hook.name}" failed`, { error: err.message });
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
return await this.runCycleInternal(opts);
|
|
262
|
-
} finally {
|
|
263
|
-
this.cycleInProgress = false;
|
|
264
|
-
}
|
|
265
|
-
}
|
|
266
|
-
async runCycleInternal(opts) {
|
|
267
|
-
const startTime = Date.now();
|
|
268
|
-
const fullReprocess = opts?.fullReprocess ?? false;
|
|
269
|
-
const lastTimestamp = fullReprocess ? null : this.getLastCycleTimestamp();
|
|
270
|
-
const substrate = this.discoverSubstrate(lastTimestamp);
|
|
271
|
-
this.log("debug", "Discovering substrate", { lastTimestamp: lastTimestamp ?? "full reprocess", substrateCount: substrate.length });
|
|
272
|
-
if (substrate.length === 0) {
|
|
273
|
-
this.log("debug", "No substrate found \u2014 skipping cycle");
|
|
274
|
-
return null;
|
|
275
|
-
}
|
|
276
|
-
this.log("info", `Starting digest cycle`, { substrateCount: substrate.length, fullReprocess });
|
|
277
|
-
const cycleId = crypto.randomUUID();
|
|
278
|
-
const allEligible = this.getEligibleTiers();
|
|
279
|
-
const eligibleTiers = opts?.tiers ? allEligible.filter((t) => opts.tiers.includes(t)) : allEligible;
|
|
280
|
-
this.log("debug", `Eligible tiers: [${eligibleTiers.join(", ")}]`);
|
|
281
|
-
const tiersGenerated = [];
|
|
282
|
-
let totalTokensUsed = 0;
|
|
283
|
-
let model = "";
|
|
284
|
-
const typeToKey = {
|
|
285
|
-
session: "sessions",
|
|
286
|
-
spore: "spores",
|
|
287
|
-
plan: "plans",
|
|
288
|
-
artifact: "artifacts",
|
|
289
|
-
"team-member": "team"
|
|
290
|
-
};
|
|
291
|
-
const substrateIndex = {
|
|
292
|
-
sessions: [],
|
|
293
|
-
spores: [],
|
|
294
|
-
plans: [],
|
|
295
|
-
artifacts: [],
|
|
296
|
-
team: []
|
|
297
|
-
};
|
|
298
|
-
for (const note of substrate) {
|
|
299
|
-
const key = typeToKey[note.type];
|
|
300
|
-
if (key) {
|
|
301
|
-
substrateIndex[key].push(note.id);
|
|
302
|
-
}
|
|
303
|
-
}
|
|
304
|
-
const cycleTimestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
305
|
-
const systemPrompt = loadPrompt("digest-system");
|
|
306
|
-
const allSubstrateIds = substrate.map((note) => note.id);
|
|
307
|
-
for (const tier of eligibleTiers) {
|
|
308
|
-
const tierPrompt = loadPrompt(`digest-${tier}`);
|
|
309
|
-
const previousExtract = opts?.cleanSlate ? null : this.readPreviousExtract(tier);
|
|
310
|
-
const contextWindow = this.config.digest.intelligence.context_window;
|
|
311
|
-
const systemPromptTokens = estimateTokens(systemPrompt);
|
|
312
|
-
const tierPromptTokens = estimateTokens(tierPrompt);
|
|
313
|
-
const previousExtractTokens = previousExtract ? estimateTokens(previousExtract) + PREVIOUS_EXTRACT_OVERHEAD_TOKENS : 0;
|
|
314
|
-
const availableTokens = Math.floor(contextWindow * CONTEXT_SAFETY_MARGIN);
|
|
315
|
-
const substrateBudget = availableTokens - tier - systemPromptTokens - tierPromptTokens - previousExtractTokens;
|
|
316
|
-
if (substrateBudget <= 0) continue;
|
|
317
|
-
const formattedSubstrate = this.formatSubstrate(substrate, substrateBudget);
|
|
318
|
-
const promptParts = [tierPrompt];
|
|
319
|
-
if (previousExtract) {
|
|
320
|
-
promptParts.push("", "## Previous Synthesis", "", previousExtract);
|
|
321
|
-
}
|
|
322
|
-
promptParts.push("", "## New Substrate", "", formattedSubstrate);
|
|
323
|
-
promptParts.push(
|
|
324
|
-
"",
|
|
325
|
-
"---",
|
|
326
|
-
"Produce your updated synthesis now. Stay within the token budget specified above."
|
|
327
|
-
);
|
|
328
|
-
const userPrompt = promptParts.join("\n");
|
|
329
|
-
const promptTokens = estimateTokens(systemPrompt + userPrompt);
|
|
330
|
-
this.log("debug", `Tier ${tier}: sending LLM request`, { promptTokens, maxTokens: tier, substrateBudget });
|
|
331
|
-
try {
|
|
332
|
-
const tierStart = Date.now();
|
|
333
|
-
const digestConfig = this.config.digest.intelligence;
|
|
334
|
-
const opts2 = {
|
|
335
|
-
maxTokens: tier,
|
|
336
|
-
timeoutMs: DIGEST_LLM_REQUEST_TIMEOUT_MS,
|
|
337
|
-
contextLength: contextWindow,
|
|
338
|
-
reasoning: LLM_REASONING_MODE,
|
|
339
|
-
systemPrompt,
|
|
340
|
-
keepAlive: digestConfig.keep_alive ?? void 0
|
|
341
|
-
};
|
|
342
|
-
const response = await this.llm.summarize(userPrompt, opts2);
|
|
343
|
-
const tierDuration = Date.now() - tierStart;
|
|
344
|
-
const extractText = stripReasoningTokens(response.text);
|
|
345
|
-
model = response.model;
|
|
346
|
-
const responseTokens = estimateTokens(extractText);
|
|
347
|
-
totalTokensUsed += promptTokens + responseTokens;
|
|
348
|
-
this.log("info", `Tier ${tier}: completed`, { durationMs: tierDuration, responseTokens, model: response.model });
|
|
349
|
-
this.writeExtract(tier, extractText, cycleId, response.model, substrate.length, allSubstrateIds, promptTokens + responseTokens);
|
|
350
|
-
tiersGenerated.push(tier);
|
|
351
|
-
} catch (err) {
|
|
352
|
-
this.log("warn", `Tier ${tier}: failed`, { error: err.message });
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
if (tiersGenerated.length > 0) {
|
|
356
|
-
const digestDir = path2.join(this.vaultDir, "digest");
|
|
357
|
-
for (const tier of tiersGenerated) {
|
|
358
|
-
const extractPath = path2.join(digestDir, `extract-${tier}.md`);
|
|
359
|
-
try {
|
|
360
|
-
const content = fs2.readFileSync(extractPath, "utf-8");
|
|
361
|
-
const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
|
362
|
-
if (fmMatch) {
|
|
363
|
-
const parsed = import_yaml.default.parse(fmMatch[1]);
|
|
364
|
-
parsed.tiers_generated = tiersGenerated;
|
|
365
|
-
const fmYaml = import_yaml.default.stringify(parsed, { defaultStringType: "QUOTE_DOUBLE", defaultKeyType: "PLAIN" }).trim();
|
|
366
|
-
const extractBody = content.slice(fmMatch[0].length);
|
|
367
|
-
const tmpPath = `${extractPath}.tmp`;
|
|
368
|
-
fs2.writeFileSync(tmpPath, `---
|
|
369
|
-
${fmYaml}
|
|
370
|
-
---${extractBody}`, "utf-8");
|
|
371
|
-
fs2.renameSync(tmpPath, extractPath);
|
|
372
|
-
}
|
|
373
|
-
} catch {
|
|
374
|
-
}
|
|
375
|
-
}
|
|
376
|
-
}
|
|
377
|
-
const result = {
|
|
378
|
-
cycleId,
|
|
379
|
-
timestamp: cycleTimestamp,
|
|
380
|
-
substrate: substrateIndex,
|
|
381
|
-
tiersGenerated,
|
|
382
|
-
model,
|
|
383
|
-
durationMs: Date.now() - startTime,
|
|
384
|
-
tokensUsed: totalTokensUsed
|
|
385
|
-
};
|
|
386
|
-
this.appendTrace(result);
|
|
387
|
-
for (const hook of this.postPassHooks) {
|
|
388
|
-
try {
|
|
389
|
-
await hook.fn(result);
|
|
390
|
-
} catch (err) {
|
|
391
|
-
this.log("warn", `Post-pass hook "${hook.name}" failed`, { error: err.message });
|
|
392
|
-
}
|
|
393
|
-
}
|
|
394
|
-
return result;
|
|
395
|
-
}
|
|
396
|
-
};
|
|
397
|
-
var MS_PER_SECOND = 1e3;
|
|
398
|
-
var Metabolism = class {
|
|
399
|
-
state = "active";
|
|
400
|
-
currentIntervalMs;
|
|
401
|
-
cooldownStep = 0;
|
|
402
|
-
lastSubstrateTime;
|
|
403
|
-
timer = null;
|
|
404
|
-
activeIntervalMs;
|
|
405
|
-
cooldownIntervalsMs;
|
|
406
|
-
dormancyThresholdMs;
|
|
407
|
-
constructor(config) {
|
|
408
|
-
this.activeIntervalMs = config.active_interval * MS_PER_SECOND;
|
|
409
|
-
this.cooldownIntervalsMs = config.cooldown_intervals.map((s) => s * MS_PER_SECOND);
|
|
410
|
-
this.dormancyThresholdMs = config.dormancy_threshold * MS_PER_SECOND;
|
|
411
|
-
this.currentIntervalMs = this.activeIntervalMs;
|
|
412
|
-
this.lastSubstrateTime = Date.now();
|
|
413
|
-
}
|
|
414
|
-
/** Reset to active state when new substrate is found. */
|
|
415
|
-
onSubstrateFound() {
|
|
416
|
-
this.state = "active";
|
|
417
|
-
this.cooldownStep = 0;
|
|
418
|
-
this.currentIntervalMs = this.activeIntervalMs;
|
|
419
|
-
this.lastSubstrateTime = Date.now();
|
|
420
|
-
}
|
|
421
|
-
/** Advance cooldown when a cycle finds no new substrate. */
|
|
422
|
-
onEmptyCycle() {
|
|
423
|
-
if (this.state === "dormant") return;
|
|
424
|
-
this.state = "cooling";
|
|
425
|
-
if (this.cooldownStep < this.cooldownIntervalsMs.length) {
|
|
426
|
-
this.currentIntervalMs = this.cooldownIntervalsMs[this.cooldownStep];
|
|
427
|
-
this.cooldownStep++;
|
|
428
|
-
}
|
|
429
|
-
this.checkDormancy();
|
|
430
|
-
}
|
|
431
|
-
/** Enter dormant state if enough time has elapsed since last substrate. */
|
|
432
|
-
checkDormancy() {
|
|
433
|
-
const elapsed = Date.now() - this.lastSubstrateTime;
|
|
434
|
-
if (elapsed >= this.dormancyThresholdMs) {
|
|
435
|
-
this.state = "dormant";
|
|
436
|
-
}
|
|
437
|
-
}
|
|
438
|
-
/** Return to active from any state, resetting timers and rescheduling immediately. */
|
|
439
|
-
activate() {
|
|
440
|
-
this.onSubstrateFound();
|
|
441
|
-
if (this.callback) {
|
|
442
|
-
this.reschedule();
|
|
443
|
-
}
|
|
444
|
-
}
|
|
445
|
-
/** Set lastSubstrateTime explicitly (for testing). */
|
|
446
|
-
markLastSubstrate(time) {
|
|
447
|
-
this.lastSubstrateTime = time;
|
|
448
|
-
}
|
|
449
|
-
/** Begin scheduling digest cycles with adaptive intervals. */
|
|
450
|
-
start(callback) {
|
|
451
|
-
this.callback = callback;
|
|
452
|
-
this.reschedule();
|
|
453
|
-
}
|
|
454
|
-
/** Stop the timer. */
|
|
455
|
-
stop() {
|
|
456
|
-
if (this.timer) {
|
|
457
|
-
clearTimeout(this.timer);
|
|
458
|
-
this.timer = null;
|
|
459
|
-
}
|
|
460
|
-
}
|
|
461
|
-
callback = null;
|
|
462
|
-
reschedule() {
|
|
463
|
-
this.stop();
|
|
464
|
-
if (!this.callback) return;
|
|
465
|
-
const cb = this.callback;
|
|
466
|
-
const schedule = () => {
|
|
467
|
-
this.timer = setTimeout(async () => {
|
|
468
|
-
await cb();
|
|
469
|
-
schedule();
|
|
470
|
-
}, this.currentIntervalMs);
|
|
471
|
-
this.timer.unref();
|
|
472
|
-
};
|
|
473
|
-
schedule();
|
|
474
|
-
}
|
|
475
|
-
};
|
|
476
|
-
|
|
477
|
-
// src/daemon/processor.ts
|
|
478
|
-
var EXTRACTION_PROMPT_OVERHEAD_TOKENS = 500;
|
|
479
|
-
var SUMMARIZATION_FAILED_MARKER = "summarization failed";
|
|
480
|
-
var ClassificationResponseSchema = external_exports.object({
|
|
481
|
-
artifacts: external_exports.array(external_exports.object({
|
|
482
|
-
source_path: external_exports.string(),
|
|
483
|
-
artifact_type: external_exports.enum(ARTIFACT_TYPES),
|
|
484
|
-
title: external_exports.string(),
|
|
485
|
-
tags: external_exports.array(external_exports.string()).default([])
|
|
486
|
-
})).default([])
|
|
487
|
-
});
|
|
488
|
-
var BufferProcessor = class {
|
|
489
|
-
constructor(backend, contextWindow = 8192, captureConfig) {
|
|
490
|
-
this.backend = backend;
|
|
491
|
-
this.contextWindow = contextWindow;
|
|
492
|
-
this.extractionMaxTokens = captureConfig?.extraction_max_tokens ?? 2048;
|
|
493
|
-
this.summaryMaxTokens = captureConfig?.summary_max_tokens ?? 512;
|
|
494
|
-
this.titleMaxTokens = captureConfig?.title_max_tokens ?? 32;
|
|
495
|
-
this.classificationMaxTokens = captureConfig?.classification_max_tokens ?? 1024;
|
|
496
|
-
}
|
|
497
|
-
extractionMaxTokens;
|
|
498
|
-
summaryMaxTokens;
|
|
499
|
-
titleMaxTokens;
|
|
500
|
-
classificationMaxTokens;
|
|
501
|
-
truncateForContext(data, maxTokens) {
|
|
502
|
-
const available = this.contextWindow - maxTokens;
|
|
503
|
-
const dataTokens = estimateTokens(data);
|
|
504
|
-
if (dataTokens <= available) return data;
|
|
505
|
-
const charBudget = available * CHARS_PER_TOKEN;
|
|
506
|
-
return data.slice(0, charBudget);
|
|
507
|
-
}
|
|
508
|
-
async process(conversationMarkdown, sessionId) {
|
|
509
|
-
if (!conversationMarkdown.trim()) {
|
|
510
|
-
return { summary: "", observations: [], degraded: false };
|
|
511
|
-
}
|
|
512
|
-
const availableTokens = this.contextWindow - EXTRACTION_PROMPT_OVERHEAD_TOKENS - this.extractionMaxTokens;
|
|
513
|
-
const availableChars = availableTokens * CHARS_PER_TOKEN;
|
|
514
|
-
let truncated = conversationMarkdown;
|
|
515
|
-
if (conversationMarkdown.length > availableChars) {
|
|
516
|
-
truncated = conversationMarkdown.slice(-availableChars);
|
|
517
|
-
const turnBoundary = truncated.indexOf(TURN_HEADING_PREFIX);
|
|
518
|
-
if (turnBoundary > 0) {
|
|
519
|
-
truncated = truncated.slice(turnBoundary);
|
|
520
|
-
}
|
|
521
|
-
}
|
|
522
|
-
const prompt = buildExtractionPrompt(sessionId, truncated, this.extractionMaxTokens);
|
|
523
|
-
try {
|
|
524
|
-
const response = await this.backend.summarize(prompt, {
|
|
525
|
-
maxTokens: this.extractionMaxTokens,
|
|
526
|
-
reasoning: LLM_REASONING_MODE
|
|
527
|
-
});
|
|
528
|
-
const parsed = extractJson(response.text);
|
|
529
|
-
return {
|
|
530
|
-
summary: parsed.summary,
|
|
531
|
-
observations: parsed.observations ?? [],
|
|
532
|
-
degraded: false
|
|
533
|
-
};
|
|
534
|
-
} catch (error) {
|
|
535
|
-
return {
|
|
536
|
-
summary: `LLM processing failed for session ${sessionId}. Error: ${error.message}`,
|
|
537
|
-
observations: [],
|
|
538
|
-
degraded: true
|
|
539
|
-
};
|
|
540
|
-
}
|
|
541
|
-
}
|
|
542
|
-
async summarizeSession(conversationMarkdown, sessionId, user) {
|
|
543
|
-
const truncatedContent = this.truncateForContext(conversationMarkdown, this.summaryMaxTokens);
|
|
544
|
-
const summaryPrompt = buildSummaryPrompt(sessionId, user ?? "unknown", truncatedContent, this.summaryMaxTokens);
|
|
545
|
-
let summaryText;
|
|
546
|
-
try {
|
|
547
|
-
const response = await this.backend.summarize(summaryPrompt, { maxTokens: this.summaryMaxTokens, reasoning: LLM_REASONING_MODE });
|
|
548
|
-
summaryText = stripReasoningTokens(response.text);
|
|
549
|
-
} catch (error) {
|
|
550
|
-
summaryText = `Session ${sessionId} \u2014 ${SUMMARIZATION_FAILED_MARKER}: ${error.message}`;
|
|
551
|
-
}
|
|
552
|
-
const titlePrompt = buildTitlePrompt(summaryText, sessionId);
|
|
553
|
-
let title;
|
|
554
|
-
try {
|
|
555
|
-
const response = await this.backend.summarize(titlePrompt, { maxTokens: this.titleMaxTokens, reasoning: LLM_REASONING_MODE });
|
|
556
|
-
title = stripReasoningTokens(response.text).trim();
|
|
557
|
-
} catch {
|
|
558
|
-
title = `Session ${sessionId}`;
|
|
559
|
-
}
|
|
560
|
-
return { summary: summaryText, title };
|
|
561
|
-
}
|
|
562
|
-
async classifyArtifacts(candidates, sessionId) {
|
|
563
|
-
if (candidates.length === 0) return [];
|
|
564
|
-
const prompt = this.buildPromptForClassification(candidates, sessionId);
|
|
565
|
-
const response = await this.backend.summarize(prompt, { maxTokens: this.classificationMaxTokens, reasoning: LLM_REASONING_MODE });
|
|
566
|
-
const raw = extractJson(response.text);
|
|
567
|
-
const parsed = ClassificationResponseSchema.parse(raw);
|
|
568
|
-
return parsed.artifacts;
|
|
569
|
-
}
|
|
570
|
-
buildPromptForClassification(candidates, sessionId) {
|
|
571
|
-
return buildClassificationPrompt(sessionId, candidates, this.classificationMaxTokens);
|
|
572
|
-
}
|
|
573
|
-
};
|
|
574
|
-
|
|
575
|
-
// src/vault/observations.ts
|
|
576
|
-
function writeObservationNotes(observations, sessionId, writer, index, vaultDir) {
|
|
577
|
-
const results = [];
|
|
578
|
-
for (const obs of observations) {
|
|
579
|
-
const obsId = `${obs.type}-${sessionId.slice(-6)}-${Date.now()}`;
|
|
580
|
-
const body = formatSporeBody({
|
|
581
|
-
title: obs.title,
|
|
582
|
-
observationType: obs.type,
|
|
583
|
-
content: obs.content,
|
|
584
|
-
sessionId,
|
|
585
|
-
root_cause: obs.root_cause,
|
|
586
|
-
fix: obs.fix,
|
|
587
|
-
rationale: obs.rationale,
|
|
588
|
-
alternatives_rejected: obs.alternatives_rejected,
|
|
589
|
-
gained: obs.gained,
|
|
590
|
-
sacrificed: obs.sacrificed,
|
|
591
|
-
tags: obs.tags
|
|
592
|
-
});
|
|
593
|
-
const relativePath = writer.writeSpore({
|
|
594
|
-
id: obsId,
|
|
595
|
-
observation_type: obs.type,
|
|
596
|
-
session: sessionNoteId(sessionId),
|
|
597
|
-
tags: obs.tags,
|
|
598
|
-
content: body
|
|
599
|
-
});
|
|
600
|
-
indexNote(index, vaultDir, relativePath);
|
|
601
|
-
results.push({ id: obsId, path: relativePath, observation: obs });
|
|
602
|
-
}
|
|
603
|
-
return results;
|
|
604
|
-
}
|
|
605
|
-
|
|
606
|
-
// src/services/vault-ops.ts
|
|
607
|
-
import fs3 from "fs";
|
|
608
|
-
import path3 from "path";
|
|
609
|
-
|
|
610
|
-
// src/intelligence/batch.ts
|
|
611
|
-
var LLM_BATCH_CONCURRENCY = 3;
|
|
612
|
-
var EMBEDDING_BATCH_CONCURRENCY = 4;
|
|
613
|
-
async function batchExecute(items, fn, options) {
|
|
614
|
-
const { concurrency, onProgress } = options;
|
|
615
|
-
let succeeded = 0;
|
|
616
|
-
let failed = 0;
|
|
617
|
-
const results = [];
|
|
618
|
-
for (let i = 0; i < items.length; i += concurrency) {
|
|
619
|
-
const batch = items.slice(i, i + concurrency);
|
|
620
|
-
const settled = await Promise.allSettled(batch.map(fn));
|
|
621
|
-
for (const result of settled) {
|
|
622
|
-
if (result.status === "fulfilled") {
|
|
623
|
-
succeeded++;
|
|
624
|
-
results.push({ status: "fulfilled", value: result.value });
|
|
625
|
-
} else {
|
|
626
|
-
failed++;
|
|
627
|
-
results.push({ status: "rejected", reason: result.reason?.message ?? String(result.reason) });
|
|
628
|
-
}
|
|
629
|
-
}
|
|
630
|
-
onProgress?.(succeeded + failed, items.length);
|
|
631
|
-
}
|
|
632
|
-
return { succeeded, failed, results };
|
|
633
|
-
}
|
|
634
|
-
|
|
635
|
-
// src/services/vault-ops.ts
|
|
636
|
-
var import_gray_matter = __toESM(require_gray_matter(), 1);
|
|
637
|
-
async function runRebuild(ctx, embeddingProvider, onProgress) {
|
|
638
|
-
const { index, vaultDir } = ctx;
|
|
639
|
-
initFts(index);
|
|
640
|
-
const ftsCount = rebuildIndex(index, vaultDir);
|
|
641
|
-
if (!ctx.vectorIndex) {
|
|
642
|
-
return { ftsCount, embeddedCount: 0, failedCount: 0, skippedCount: 0 };
|
|
643
|
-
}
|
|
644
|
-
const allNotes = index.query({});
|
|
645
|
-
const activeNotes = allNotes.filter((n) => {
|
|
646
|
-
const status = n.frontmatter?.status;
|
|
647
|
-
return status !== "superseded" && status !== "archived";
|
|
648
|
-
});
|
|
649
|
-
const skippedCount = allNotes.length - activeNotes.length;
|
|
650
|
-
const vec = ctx.vectorIndex;
|
|
651
|
-
const result = await batchExecute(
|
|
652
|
-
activeNotes,
|
|
653
|
-
async (note) => {
|
|
654
|
-
const text = `${note.title}
|
|
655
|
-
${note.content}`.slice(0, EMBEDDING_INPUT_LIMIT);
|
|
656
|
-
const emb = await generateEmbedding(embeddingProvider, text);
|
|
657
|
-
vec.upsert(note.id, emb.embedding, {
|
|
658
|
-
type: note.type,
|
|
659
|
-
session_id: note.frontmatter?.session ?? ""
|
|
660
|
-
});
|
|
661
|
-
},
|
|
662
|
-
{
|
|
663
|
-
concurrency: EMBEDDING_BATCH_CONCURRENCY,
|
|
664
|
-
onProgress
|
|
665
|
-
}
|
|
666
|
-
);
|
|
667
|
-
if (ctx.pipeline) {
|
|
668
|
-
for (const note of activeNotes) {
|
|
669
|
-
ctx.pipeline.register(note.id, note.type, note.path);
|
|
670
|
-
ctx.pipeline.advance(note.id, note.type, "capture", "succeeded");
|
|
671
|
-
ctx.pipeline.advance(note.id, note.type, "embedding", "succeeded");
|
|
672
|
-
}
|
|
673
|
-
}
|
|
674
|
-
return {
|
|
675
|
-
ftsCount,
|
|
676
|
-
embeddedCount: result.succeeded,
|
|
677
|
-
failedCount: result.failed,
|
|
678
|
-
skippedCount
|
|
679
|
-
};
|
|
680
|
-
}
|
|
681
|
-
async function runDigest(ctx, llmProvider, options) {
|
|
682
|
-
const { config, vaultDir, index } = ctx;
|
|
683
|
-
const log = ctx.log ? (level, message, data) => ctx.log(level, message, data) : () => {
|
|
684
|
-
};
|
|
685
|
-
if (ctx.pipeline && options?.full) {
|
|
686
|
-
const items = ctx.pipeline.listItems({ stage: "digest", status: "succeeded" });
|
|
687
|
-
let reset = 0;
|
|
688
|
-
for (const item of items.items) {
|
|
689
|
-
ctx.pipeline.advance(item.id, item.item_type, "digest", "pending");
|
|
690
|
-
reset++;
|
|
691
|
-
}
|
|
692
|
-
log("info", `Reset ${reset} item(s) to digest:pending for full reprocessing`);
|
|
693
|
-
return null;
|
|
694
|
-
}
|
|
695
|
-
const engine = new DigestEngine({
|
|
696
|
-
vaultDir,
|
|
697
|
-
index,
|
|
698
|
-
llmProvider,
|
|
699
|
-
config,
|
|
700
|
-
log
|
|
701
|
-
});
|
|
702
|
-
const opts = {};
|
|
703
|
-
const isReprocess = options?.full || options?.tier !== void 0;
|
|
704
|
-
if (isReprocess) {
|
|
705
|
-
opts.fullReprocess = true;
|
|
706
|
-
opts.cleanSlate = true;
|
|
707
|
-
}
|
|
708
|
-
if (options?.tier !== void 0) {
|
|
709
|
-
const eligible = engine.getEligibleTiers();
|
|
710
|
-
if (!eligible.includes(options.tier)) {
|
|
711
|
-
throw new Error(`Tier ${options.tier} is not eligible. Eligible tiers: [${eligible.join(", ")}]`);
|
|
712
|
-
}
|
|
713
|
-
opts.tiers = [options.tier];
|
|
714
|
-
}
|
|
715
|
-
return engine.runCycle(opts);
|
|
716
|
-
}
|
|
717
|
-
var CURATION_EMBEDDING_BATCH_SIZE = 10;
|
|
718
|
-
function cosineSimilarity(a, b) {
|
|
719
|
-
let dot = 0, normA = 0, normB = 0;
|
|
720
|
-
for (let i = 0; i < a.length; i++) {
|
|
721
|
-
dot += a[i] * b[i];
|
|
722
|
-
normA += a[i] * a[i];
|
|
723
|
-
normB += b[i] * b[i];
|
|
724
|
-
}
|
|
725
|
-
return dot / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
726
|
-
}
|
|
727
|
-
function updateCentroid(spores) {
|
|
728
|
-
if (spores.length === 0) return [];
|
|
729
|
-
const dim = spores[0].embedding.length;
|
|
730
|
-
const centroid = new Array(dim).fill(0);
|
|
731
|
-
for (const s of spores) {
|
|
732
|
-
for (let i = 0; i < dim; i++) {
|
|
733
|
-
centroid[i] += s.embedding[i];
|
|
734
|
-
}
|
|
735
|
-
}
|
|
736
|
-
for (let i = 0; i < dim; i++) {
|
|
737
|
-
centroid[i] /= spores.length;
|
|
738
|
-
}
|
|
739
|
-
return centroid;
|
|
740
|
-
}
|
|
741
|
-
function clusterSpores(spores) {
|
|
742
|
-
const clusters = [];
|
|
743
|
-
for (const spore of spores) {
|
|
744
|
-
let bestCluster = null;
|
|
745
|
-
let bestSimilarity = -1;
|
|
746
|
-
for (const cluster of clusters) {
|
|
747
|
-
const sim = cosineSimilarity(spore.embedding, cluster.centroid);
|
|
748
|
-
if (sim > bestSimilarity) {
|
|
749
|
-
bestSimilarity = sim;
|
|
750
|
-
bestCluster = cluster;
|
|
751
|
-
}
|
|
752
|
-
}
|
|
753
|
-
if (bestCluster !== null && bestSimilarity >= CURATION_CLUSTER_SIMILARITY) {
|
|
754
|
-
bestCluster.spores.push(spore);
|
|
755
|
-
bestCluster.centroid = updateCentroid(bestCluster.spores);
|
|
756
|
-
} else {
|
|
757
|
-
clusters.push({ spores: [spore], centroid: [...spore.embedding] });
|
|
758
|
-
}
|
|
759
|
-
}
|
|
760
|
-
return clusters;
|
|
761
|
-
}
|
|
762
|
-
async function runCuration(deps, dryRun) {
|
|
763
|
-
const { index, vectorIndex, llmProvider, embeddingProvider, vaultDir } = deps;
|
|
764
|
-
const log = deps.log ?? (() => {
|
|
765
|
-
});
|
|
766
|
-
const allSpores = index.query({ type: "spore" });
|
|
767
|
-
const activeSpores = allSpores.filter((n) => isActiveSpore(n.frontmatter));
|
|
768
|
-
if (deps.pipeline && !dryRun) {
|
|
769
|
-
let enqueued = 0;
|
|
770
|
-
for (const spore of activeSpores) {
|
|
771
|
-
deps.pipeline.register(spore.id, "spore", spore.path);
|
|
772
|
-
deps.pipeline.advance(spore.id, "spore", "capture", "succeeded");
|
|
773
|
-
deps.pipeline.advance(spore.id, "spore", "consolidation", "pending");
|
|
774
|
-
enqueued++;
|
|
775
|
-
}
|
|
776
|
-
log("info", `Enqueued ${enqueued} spore(s) for pipeline consolidation`);
|
|
777
|
-
return { scanned: activeSpores.length, clustersEvaluated: 0, superseded: 0, enqueued: true };
|
|
778
|
-
}
|
|
779
|
-
if (activeSpores.length === 0) {
|
|
780
|
-
return { scanned: 0, clustersEvaluated: 0, superseded: 0 };
|
|
781
|
-
}
|
|
782
|
-
const sporesWithEmbeddings = [];
|
|
783
|
-
let embedFailures = 0;
|
|
784
|
-
for (let i = 0; i < activeSpores.length; i += CURATION_EMBEDDING_BATCH_SIZE) {
|
|
785
|
-
const batch = activeSpores.slice(i, i + CURATION_EMBEDDING_BATCH_SIZE);
|
|
786
|
-
const results = await Promise.allSettled(
|
|
787
|
-
batch.map(async (spore) => {
|
|
788
|
-
const text = spore.content.slice(0, EMBEDDING_INPUT_LIMIT);
|
|
789
|
-
const result = await generateEmbedding(embeddingProvider, text);
|
|
790
|
-
return { spore, embedding: result.embedding };
|
|
791
|
-
})
|
|
792
|
-
);
|
|
793
|
-
for (const result of results) {
|
|
794
|
-
if (result.status === "fulfilled") {
|
|
795
|
-
const { spore, embedding } = result.value;
|
|
796
|
-
sporesWithEmbeddings.push({
|
|
797
|
-
id: spore.id,
|
|
798
|
-
path: spore.path,
|
|
799
|
-
title: spore.title,
|
|
800
|
-
content: spore.content,
|
|
801
|
-
created: spore.created,
|
|
802
|
-
frontmatter: spore.frontmatter,
|
|
803
|
-
embedding
|
|
804
|
-
});
|
|
805
|
-
} else {
|
|
806
|
-
embedFailures++;
|
|
807
|
-
}
|
|
808
|
-
}
|
|
809
|
-
}
|
|
810
|
-
if (embedFailures > 0) {
|
|
811
|
-
log("warn", `${embedFailures} spore(s) could not be embedded and were skipped`);
|
|
812
|
-
}
|
|
813
|
-
const byType = /* @__PURE__ */ new Map();
|
|
814
|
-
for (const spore of sporesWithEmbeddings) {
|
|
815
|
-
const obsType = spore.frontmatter["observation_type"] ?? "unknown";
|
|
816
|
-
if (!byType.has(obsType)) byType.set(obsType, []);
|
|
817
|
-
byType.get(obsType).push(spore);
|
|
818
|
-
}
|
|
819
|
-
const template = loadPrompt("supersession");
|
|
820
|
-
let totalClusters = 0;
|
|
821
|
-
let totalSuperseded = 0;
|
|
822
|
-
for (const [obsType, typeSpores] of byType) {
|
|
823
|
-
const clusters = clusterSpores(typeSpores);
|
|
824
|
-
const multiSpore = clusters.filter((c) => c.spores.length >= 2);
|
|
825
|
-
if (multiSpore.length === 0) continue;
|
|
826
|
-
log("info", `Type: ${obsType} \u2014 ${typeSpores.length} spores, ${multiSpore.length} cluster(s) to evaluate`);
|
|
827
|
-
totalClusters += multiSpore.length;
|
|
828
|
-
for (const cluster of multiSpore) {
|
|
829
|
-
const sorted = [...cluster.spores].sort((a, b) => a.created.localeCompare(b.created));
|
|
830
|
-
const newest = sorted[sorted.length - 1];
|
|
831
|
-
const candidates = sorted.slice(0, sorted.length - 1);
|
|
832
|
-
const newSporeText = formatNoteForPrompt(newest);
|
|
833
|
-
const candidatesText = formatNotesForPrompt(candidates);
|
|
834
|
-
const prompt = template.replace("{{new_spore}}", newSporeText).replace("{{candidates}}", candidatesText);
|
|
835
|
-
let responseText;
|
|
836
|
-
try {
|
|
837
|
-
const response = await llmProvider.summarize(prompt, {
|
|
838
|
-
maxTokens: SUPERSESSION_MAX_TOKENS,
|
|
839
|
-
reasoning: LLM_REASONING_MODE
|
|
840
|
-
});
|
|
841
|
-
responseText = stripReasoningTokens(response.text);
|
|
842
|
-
} catch (err) {
|
|
843
|
-
log("warn", `LLM call failed for cluster in ${obsType}: ${String(err)}`);
|
|
844
|
-
continue;
|
|
845
|
-
}
|
|
846
|
-
let rawIds;
|
|
847
|
-
try {
|
|
848
|
-
rawIds = JSON.parse(responseText);
|
|
849
|
-
} catch {
|
|
850
|
-
log("warn", `Could not parse LLM response for cluster in ${obsType}`);
|
|
851
|
-
continue;
|
|
852
|
-
}
|
|
853
|
-
const parsed = supersededIdsSchema.safeParse(rawIds);
|
|
854
|
-
if (!parsed.success) {
|
|
855
|
-
log("warn", `LLM response schema invalid for cluster in ${obsType}`);
|
|
856
|
-
continue;
|
|
857
|
-
}
|
|
858
|
-
const candidateMap = new Map(candidates.map((c) => [c.id, c]));
|
|
859
|
-
const validIds = parsed.data.filter((id) => candidateMap.has(id));
|
|
860
|
-
if (validIds.length === 0) continue;
|
|
861
|
-
for (const id of validIds) {
|
|
862
|
-
const candidate = candidateMap.get(id);
|
|
863
|
-
if (dryRun) {
|
|
864
|
-
log("info", `[dry-run] Would supersede: ${candidate.title} (${id}) by ${newest.title} (${newest.id})`);
|
|
865
|
-
totalSuperseded++;
|
|
866
|
-
continue;
|
|
867
|
-
}
|
|
868
|
-
const wrote = supersedeSpore(id, newest.id, candidate.path, { index, vectorIndex, vaultDir });
|
|
869
|
-
if (!wrote) {
|
|
870
|
-
log("warn", `File not found for ${id}, skipping write`);
|
|
871
|
-
continue;
|
|
872
|
-
}
|
|
873
|
-
log("info", `Superseded: ${candidate.title} (${id}) by ${newest.title} (${newest.id})`);
|
|
874
|
-
totalSuperseded++;
|
|
875
|
-
}
|
|
876
|
-
}
|
|
877
|
-
}
|
|
878
|
-
return {
|
|
879
|
-
scanned: activeSpores.length,
|
|
880
|
-
clustersEvaluated: totalClusters,
|
|
881
|
-
superseded: totalSuperseded
|
|
882
|
-
};
|
|
883
|
-
}
|
|
884
|
-
function updateTitleAndSummary(body, newTitle, newNarrative) {
|
|
885
|
-
let updated = body.replace(/^# .*/m, `# ${newTitle}`);
|
|
886
|
-
const summaryCallout = callout("abstract", "Summary", newNarrative);
|
|
887
|
-
const hasExistingCallout = /> \[!abstract\] Summary/.test(updated);
|
|
888
|
-
if (hasExistingCallout) {
|
|
889
|
-
updated = updated.replace(/> \[!abstract\] Summary\n(?:> .*\n?)*/m, summaryCallout + "\n");
|
|
890
|
-
} else {
|
|
891
|
-
updated = updated.replace(/^(# .*\n)/m, `$1
|
|
892
|
-
${summaryCallout}
|
|
893
|
-
`);
|
|
894
|
-
}
|
|
895
|
-
return updated;
|
|
896
|
-
}
|
|
897
|
-
async function runReprocess(ctx, llmProvider, embeddingProvider, options, onProgress) {
|
|
898
|
-
const { vaultDir, config, index } = ctx;
|
|
899
|
-
const log = ctx.log ?? (() => {
|
|
900
|
-
});
|
|
901
|
-
const sessionFilter = options?.session;
|
|
902
|
-
const dateFilter = options?.date;
|
|
903
|
-
const failedOnly = options?.failed ?? false;
|
|
904
|
-
const skipLlm = options?.indexOnly ?? false;
|
|
905
|
-
const sessionsDir = path3.join(vaultDir, "sessions");
|
|
906
|
-
if (!fs3.existsSync(sessionsDir)) {
|
|
907
|
-
return { sessionsFound: 0, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };
|
|
908
|
-
}
|
|
909
|
-
const sessionFiles = [];
|
|
910
|
-
for (const dateDir of fs3.readdirSync(sessionsDir)) {
|
|
911
|
-
if (dateFilter && dateDir !== dateFilter) continue;
|
|
912
|
-
const datePath = path3.join(sessionsDir, dateDir);
|
|
913
|
-
if (!fs3.statSync(datePath).isDirectory()) continue;
|
|
914
|
-
for (const file of fs3.readdirSync(datePath)) {
|
|
915
|
-
if (!file.startsWith("session-") || !file.endsWith(".md")) continue;
|
|
916
|
-
const sessionId = file.replace("session-", "").replace(".md", "");
|
|
917
|
-
if (sessionFilter && !sessionId.includes(sessionFilter)) continue;
|
|
918
|
-
sessionFiles.push({ relativePath: path3.join("sessions", dateDir, file), sessionId, dateDir });
|
|
919
|
-
}
|
|
920
|
-
}
|
|
921
|
-
if (sessionFiles.length === 0) {
|
|
922
|
-
return { sessionsFound: 0, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };
|
|
923
|
-
}
|
|
924
|
-
if (ctx.pipeline && !skipLlm) {
|
|
925
|
-
let enqueued = 0;
|
|
926
|
-
let eligibleFiles = sessionFiles;
|
|
927
|
-
if (failedOnly) {
|
|
928
|
-
eligibleFiles = sessionFiles.filter(({ relativePath }) => {
|
|
929
|
-
const rawContent = fs3.readFileSync(path3.join(vaultDir, relativePath), "utf-8");
|
|
930
|
-
return rawContent.includes(SUMMARIZATION_FAILED_MARKER);
|
|
931
|
-
});
|
|
932
|
-
}
|
|
933
|
-
for (const { relativePath, sessionId } of eligibleFiles) {
|
|
934
|
-
ctx.pipeline.register(sessionId, "session", relativePath);
|
|
935
|
-
ctx.pipeline.advance(sessionId, "session", "capture", "succeeded");
|
|
936
|
-
ctx.pipeline.advance(sessionId, "session", "extraction", "pending");
|
|
937
|
-
enqueued++;
|
|
938
|
-
}
|
|
939
|
-
log("info", `Enqueued ${enqueued} session(s) for pipeline reprocessing`, {
|
|
940
|
-
filters: { session: sessionFilter, date: dateFilter, failed: failedOnly }
|
|
941
|
-
});
|
|
942
|
-
return {
|
|
943
|
-
sessionsFound: sessionFiles.length,
|
|
944
|
-
sessionsProcessed: enqueued,
|
|
945
|
-
observationsExtracted: 0,
|
|
946
|
-
summariesRegenerated: 0,
|
|
947
|
-
embeddingsQueued: 0,
|
|
948
|
-
enqueued: true
|
|
949
|
-
};
|
|
950
|
-
}
|
|
951
|
-
const effectiveLlm = skipLlm ? null : llmProvider;
|
|
952
|
-
const processor = effectiveLlm ? new BufferProcessor(effectiveLlm, config.intelligence.llm.context_window, config.capture) : null;
|
|
953
|
-
const writer = new VaultWriter(vaultDir);
|
|
954
|
-
const tasks = [];
|
|
955
|
-
for (const { relativePath, sessionId } of sessionFiles) {
|
|
956
|
-
const rawContent = fs3.readFileSync(path3.join(vaultDir, relativePath), "utf-8");
|
|
957
|
-
const hasFailed = rawContent.includes(SUMMARIZATION_FAILED_MARKER);
|
|
958
|
-
if (failedOnly && !hasFailed) continue;
|
|
959
|
-
const { data: frontmatter, content: body } = (0, import_gray_matter.default)(rawContent);
|
|
960
|
-
const bare = bareSessionId(sessionId);
|
|
961
|
-
const conversationSection = extractSection(body, CONVERSATION_HEADING);
|
|
962
|
-
const fmEnd = rawContent.indexOf("---", 4);
|
|
963
|
-
const frontmatterBlock = rawContent.slice(0, fmEnd + 3);
|
|
964
|
-
tasks.push({ relativePath, sessionId, bare, frontmatter, frontmatterBlock, body, conversationSection, hasFailed });
|
|
965
|
-
}
|
|
966
|
-
if (tasks.length === 0) {
|
|
967
|
-
return { sessionsFound: sessionFiles.length, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };
|
|
968
|
-
}
|
|
969
|
-
log("info", `Reprocessing ${tasks.length} session(s)`, { filters: { session: sessionFilter, date: dateFilter, failed: failedOnly, indexOnly: skipLlm } });
|
|
970
|
-
let embeddingsQueued = 0;
|
|
971
|
-
const embedPending = [];
|
|
972
|
-
const fireEmbed = (id, text, metadata) => {
|
|
973
|
-
if (!ctx.vectorIndex) return;
|
|
974
|
-
embeddingsQueued++;
|
|
975
|
-
const vec = ctx.vectorIndex;
|
|
976
|
-
const p = generateEmbedding(embeddingProvider, text).then((emb) => {
|
|
977
|
-
vec.upsert(id, emb.embedding, metadata);
|
|
978
|
-
}).catch((err) => {
|
|
979
|
-
log("warn", `Embedding failed for ${id}`, { error: err.message });
|
|
980
|
-
});
|
|
981
|
-
embedPending.push(p);
|
|
982
|
-
};
|
|
983
|
-
let totalObservations = 0;
|
|
984
|
-
const extractionResult = await batchExecute(
|
|
985
|
-
tasks,
|
|
986
|
-
async (task) => {
|
|
987
|
-
let obs = 0;
|
|
988
|
-
if (processor && task.conversationSection.trim()) {
|
|
989
|
-
const result = await processor.process(task.conversationSection, task.bare);
|
|
990
|
-
if (result.observations.length > 0) {
|
|
991
|
-
writeObservationNotes(result.observations, task.bare, writer, index, vaultDir);
|
|
992
|
-
obs = result.observations.length;
|
|
993
|
-
for (const o of result.observations) {
|
|
994
|
-
fireEmbed(
|
|
995
|
-
`${o.type}-${task.bare.slice(-6)}-${Date.now()}`,
|
|
996
|
-
`${o.title}
|
|
997
|
-
${o.content}`.slice(0, EMBEDDING_INPUT_LIMIT),
|
|
998
|
-
{ type: "spore", session_id: task.bare }
|
|
999
|
-
);
|
|
1000
|
-
}
|
|
1001
|
-
}
|
|
1002
|
-
}
|
|
1003
|
-
indexNote(index, vaultDir, task.relativePath);
|
|
1004
|
-
const embText = `${task.frontmatter.title ?? ""}
|
|
1005
|
-
${task.frontmatter.summary ?? ""}`.slice(0, EMBEDDING_INPUT_LIMIT);
|
|
1006
|
-
if (embText.trim()) {
|
|
1007
|
-
fireEmbed(sessionNoteId(task.bare), embText, { type: "session", session_id: task.bare });
|
|
1008
|
-
}
|
|
1009
|
-
return obs;
|
|
1010
|
-
},
|
|
1011
|
-
{
|
|
1012
|
-
concurrency: LLM_BATCH_CONCURRENCY,
|
|
1013
|
-
onProgress: (done, total) => onProgress?.("extraction", done, total)
|
|
1014
|
-
}
|
|
1015
|
-
);
|
|
1016
|
-
for (const r of extractionResult.results) {
|
|
1017
|
-
if (r.status === "fulfilled") totalObservations += r.value;
|
|
1018
|
-
}
|
|
1019
|
-
let summarized = 0;
|
|
1020
|
-
if (processor) {
|
|
1021
|
-
const summarizableTasks = tasks.filter((t) => t.conversationSection);
|
|
1022
|
-
if (summarizableTasks.length > 0) {
|
|
1023
|
-
const summaryResult = await batchExecute(
|
|
1024
|
-
summarizableTasks,
|
|
1025
|
-
async (task) => {
|
|
1026
|
-
const user = typeof task.frontmatter.user === "string" ? task.frontmatter.user : void 0;
|
|
1027
|
-
const result = await processor.summarizeSession(task.conversationSection, task.bare, user);
|
|
1028
|
-
if (result.summary.includes(SUMMARIZATION_FAILED_MARKER)) {
|
|
1029
|
-
log("warn", `Summarization failed for ${task.sessionId.slice(0, 12)}`);
|
|
1030
|
-
return false;
|
|
1031
|
-
}
|
|
1032
|
-
const updatedBody = updateTitleAndSummary(task.body, result.title, result.summary);
|
|
1033
|
-
fs3.writeFileSync(path3.join(vaultDir, task.relativePath), task.frontmatterBlock + updatedBody);
|
|
1034
|
-
indexNote(index, vaultDir, task.relativePath);
|
|
1035
|
-
return true;
|
|
1036
|
-
},
|
|
1037
|
-
{
|
|
1038
|
-
concurrency: LLM_BATCH_CONCURRENCY,
|
|
1039
|
-
onProgress: (done, total) => onProgress?.("summarization", done, total)
|
|
1040
|
-
}
|
|
1041
|
-
);
|
|
1042
|
-
for (const r of summaryResult.results) {
|
|
1043
|
-
if (r.status === "fulfilled" && r.value) summarized++;
|
|
1044
|
-
}
|
|
1045
|
-
}
|
|
1046
|
-
}
|
|
1047
|
-
await Promise.allSettled(embedPending);
|
|
1048
|
-
log("info", "Reprocess completed", {
|
|
1049
|
-
sessions: tasks.length,
|
|
1050
|
-
observations: totalObservations,
|
|
1051
|
-
summaries: summarized,
|
|
1052
|
-
embeddings: embeddingsQueued
|
|
1053
|
-
});
|
|
1054
|
-
return {
|
|
1055
|
-
sessionsFound: sessionFiles.length,
|
|
1056
|
-
sessionsProcessed: tasks.length,
|
|
1057
|
-
observationsExtracted: totalObservations,
|
|
1058
|
-
summariesRegenerated: summarized,
|
|
1059
|
-
embeddingsQueued
|
|
1060
|
-
};
|
|
1061
|
-
}
|
|
1062
|
-
|
|
1063
|
-
export {
|
|
1064
|
-
readLastRecord,
|
|
1065
|
-
readLastTimestamp,
|
|
1066
|
-
appendTraceRecord,
|
|
1067
|
-
DigestEngine,
|
|
1068
|
-
Metabolism,
|
|
1069
|
-
SUMMARIZATION_FAILED_MARKER,
|
|
1070
|
-
BufferProcessor,
|
|
1071
|
-
writeObservationNotes,
|
|
1072
|
-
runRebuild,
|
|
1073
|
-
runDigest,
|
|
1074
|
-
runCuration,
|
|
1075
|
-
updateTitleAndSummary,
|
|
1076
|
-
runReprocess
|
|
1077
|
-
};
|
|
1078
|
-
//# sourceMappingURL=chunk-4XVKZ3WA.js.map
|