@goondocks/myco 0.3.6 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -1
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +9 -4
- package/commands/init.md +63 -39
- package/commands/setup-llm.md +69 -44
- package/commands/status.md +28 -10
- package/dist/{chunk-LAL7WK6I.js → chunk-2GJFTIWX.js} +2 -2
- package/dist/chunk-4FCFRJIQ.js +147 -0
- package/dist/chunk-4FCFRJIQ.js.map +1 -0
- package/dist/{chunk-PA3VMINE.js → chunk-AK6GNLPV.js} +6 -1
- package/dist/chunk-AK6GNLPV.js.map +1 -0
- package/dist/{chunk-ZCGGWNAA.js → chunk-BNIYWCST.js} +3 -3
- package/dist/chunk-BNIYWCST.js.map +1 -0
- package/dist/{chunk-ISCT2SI6.js → chunk-G6ZMTQMJ.js} +7357 -60
- package/dist/chunk-G6ZMTQMJ.js.map +1 -0
- package/dist/{chunk-7WNE22W7.js → chunk-IVS5MYBL.js} +3 -3
- package/dist/{chunk-7WNE22W7.js.map → chunk-IVS5MYBL.js.map} +1 -1
- package/dist/{chunk-7VPJK56U.js → chunk-JBD5KP5G.js} +31 -16
- package/dist/chunk-JBD5KP5G.js.map +1 -0
- package/dist/chunk-NUA7UTIY.js +37 -0
- package/dist/chunk-NUA7UTIY.js.map +1 -0
- package/dist/{chunk-5EGHGTN2.js → chunk-OUFSLZTX.js} +4 -4
- package/dist/chunk-P7RNAYU7.js +242 -0
- package/dist/chunk-P7RNAYU7.js.map +1 -0
- package/dist/chunk-QQ36XEJP.js +38 -0
- package/dist/chunk-QQ36XEJP.js.map +1 -0
- package/dist/chunk-RDXTQ436.js +49 -0
- package/dist/chunk-RDXTQ436.js.map +1 -0
- package/dist/{chunk-AWF3M57N.js → chunk-S7EIHYE7.js} +8 -8
- package/dist/{chunk-AWF3M57N.js.map → chunk-S7EIHYE7.js.map} +1 -1
- package/dist/{chunk-QWU7QLZI.js → chunk-TZDDXRHG.js} +10 -10
- package/dist/chunk-TZDDXRHG.js.map +1 -0
- package/dist/chunk-VYV5IFD6.js +99 -0
- package/dist/chunk-VYV5IFD6.js.map +1 -0
- package/dist/{chunk-F6AG6YXJ.js → chunk-XCPQHC4X.js} +2 -2
- package/dist/{chunk-5O52JLGT.js → chunk-XHWIIU5D.js} +8 -9
- package/dist/chunk-XHWIIU5D.js.map +1 -0
- package/dist/{chunk-2YQGS5CK.js → chunk-YZO22BBI.js} +45 -31
- package/dist/chunk-YZO22BBI.js.map +1 -0
- package/dist/{chunk-ZBNT6E22.js → chunk-ZCBL5HER.js} +2 -2
- package/dist/{cli-DNYLL2JD.js → cli-ZN6VBA7V.js} +23 -17
- package/dist/cli-ZN6VBA7V.js.map +1 -0
- package/dist/{client-XBD4NCDO.js → client-5SUO2UYH.js} +5 -5
- package/dist/{config-MD4XMLUS.js → config-4GGMWGAF.js} +4 -4
- package/dist/{detect-providers-LNOLBICR.js → detect-providers-5FU3BN5Q.js} +3 -3
- package/dist/{init-57K46O7U.js → init-7UXGDOFS.js} +51 -60
- package/dist/init-7UXGDOFS.js.map +1 -0
- package/dist/{main-FOZYS5B7.js → main-6UPAIDGS.js} +648 -228
- package/dist/main-6UPAIDGS.js.map +1 -0
- package/dist/{rebuild-JW6BCHHZ.js → rebuild-QDSYYCS7.js} +10 -10
- package/dist/rebuild-QDSYYCS7.js.map +1 -0
- package/dist/{reprocess-5YK7ZOFW.js → reprocess-ZNUQCIS3.js} +18 -18
- package/dist/reprocess-ZNUQCIS3.js.map +1 -0
- package/dist/{restart-GZE73CZ7.js → restart-5UY2KV54.js} +6 -6
- package/dist/{search-2HMG3ON7.js → search-2VEN3XIG.js} +9 -9
- package/dist/{server-KKVTFBSN.js → server-OR5B4B7K.js} +77 -54
- package/dist/{server-KKVTFBSN.js.map → server-OR5B4B7K.js.map} +1 -1
- package/dist/{session-5GI2YU6R.js → session-QF6MILAC.js} +2 -2
- package/dist/{session-start-5KXWKS3B.js → session-start-TUITIUMB.js} +29 -28
- package/dist/session-start-TUITIUMB.js.map +1 -0
- package/dist/setup-digest-ETCZAUIU.js +15 -0
- package/dist/setup-llm-DWEJE3JE.js +15 -0
- package/dist/setup-llm-DWEJE3JE.js.map +1 -0
- package/dist/src/cli.js +4 -4
- package/dist/src/daemon/main.js +4 -4
- package/dist/src/hooks/post-tool-use.js +5 -5
- package/dist/src/hooks/session-end.js +5 -5
- package/dist/src/hooks/session-start.js +4 -4
- package/dist/src/hooks/stop.js +7 -7
- package/dist/src/hooks/user-prompt-submit.js +5 -5
- package/dist/src/hooks/user-prompt-submit.js.map +1 -1
- package/dist/src/mcp/server.js +4 -4
- package/dist/src/prompts/classification.md +1 -0
- package/dist/src/prompts/digest-10000.md +74 -0
- package/dist/src/prompts/digest-1500.md +25 -0
- package/dist/src/prompts/digest-3000.md +32 -0
- package/dist/src/prompts/digest-5000.md +43 -0
- package/dist/src/prompts/digest-system.md +32 -0
- package/dist/src/prompts/extraction.md +11 -10
- package/dist/src/prompts/summary.md +11 -1
- package/dist/src/prompts/title.md +1 -1
- package/dist/{stats-R5KAGBQE.js → stats-IVIXIKTS.js} +12 -12
- package/dist/stats-IVIXIKTS.js.map +1 -0
- package/dist/{verify-7MWOV72E.js → verify-4H6CEE5T.js} +6 -6
- package/dist/{version-DLKARUP4.js → version-5B2TWXQJ.js} +4 -4
- package/dist/version-5B2TWXQJ.js.map +1 -0
- package/package.json +1 -1
- package/skills/myco/SKILL.md +20 -20
- package/skills/myco/references/wisdom.md +14 -14
- package/skills/rules/SKILL.md +4 -4
- package/dist/chunk-2YQGS5CK.js.map +0 -1
- package/dist/chunk-5O52JLGT.js.map +0 -1
- package/dist/chunk-7VPJK56U.js.map +0 -1
- package/dist/chunk-BA23DROX.js +0 -160
- package/dist/chunk-BA23DROX.js.map +0 -1
- package/dist/chunk-EF4JVH24.js +0 -7299
- package/dist/chunk-EF4JVH24.js.map +0 -1
- package/dist/chunk-ISCT2SI6.js.map +0 -1
- package/dist/chunk-PA3VMINE.js.map +0 -1
- package/dist/chunk-QWU7QLZI.js.map +0 -1
- package/dist/chunk-YMYJ7FNH.js +0 -19
- package/dist/chunk-YMYJ7FNH.js.map +0 -1
- package/dist/chunk-ZCGGWNAA.js.map +0 -1
- package/dist/cli-DNYLL2JD.js.map +0 -1
- package/dist/init-57K46O7U.js.map +0 -1
- package/dist/main-FOZYS5B7.js.map +0 -1
- package/dist/rebuild-JW6BCHHZ.js.map +0 -1
- package/dist/reprocess-5YK7ZOFW.js.map +0 -1
- package/dist/session-start-5KXWKS3B.js.map +0 -1
- package/dist/stats-R5KAGBQE.js.map +0 -1
- /package/dist/{chunk-LAL7WK6I.js.map → chunk-2GJFTIWX.js.map} +0 -0
- /package/dist/{chunk-5EGHGTN2.js.map → chunk-OUFSLZTX.js.map} +0 -0
- /package/dist/{chunk-F6AG6YXJ.js.map → chunk-XCPQHC4X.js.map} +0 -0
- /package/dist/{chunk-ZBNT6E22.js.map → chunk-ZCBL5HER.js.map} +0 -0
- /package/dist/{client-XBD4NCDO.js.map → client-5SUO2UYH.js.map} +0 -0
- /package/dist/{config-MD4XMLUS.js.map → config-4GGMWGAF.js.map} +0 -0
- /package/dist/{detect-providers-LNOLBICR.js.map → detect-providers-5FU3BN5Q.js.map} +0 -0
- /package/dist/{restart-GZE73CZ7.js.map → restart-5UY2KV54.js.map} +0 -0
- /package/dist/{search-2HMG3ON7.js.map → search-2VEN3XIG.js.map} +0 -0
- /package/dist/{session-5GI2YU6R.js.map → session-QF6MILAC.js.map} +0 -0
- /package/dist/{version-DLKARUP4.js.map → setup-digest-ETCZAUIU.js.map} +0 -0
- /package/dist/{verify-7MWOV72E.js.map → verify-4H6CEE5T.js.map} +0 -0
package/README.md
CHANGED
|
@@ -37,9 +37,10 @@ Myco captures everything your AI agents do — sessions, decisions, plans, disco
|
|
|
37
37
|
myco_search("how did we handle auth?") → semantically matched sessions, decisions, and linked context
|
|
38
38
|
myco_recall("migration plan") → full decision history with session lineage
|
|
39
39
|
myco_remember(observation) → persist a discovery for the team
|
|
40
|
+
myco_context(tier: 3000) → pre-computed project understanding, instantly available
|
|
40
41
|
```
|
|
41
42
|
|
|
42
|
-
**For humans** — open the vault in Obsidian and browse the intelligence graph visually. Sessions link to plans, plans link to decisions, decisions link to
|
|
43
|
+
**For humans** — open the vault in Obsidian and browse the intelligence graph visually. Sessions link to plans, plans link to decisions, decisions link to spores. It's all Markdown with backlinks — your team's connected knowledge, navigable and searchable.
|
|
43
44
|
|
|
44
45
|
**For teams** — the vault is a Git-friendly directory of Markdown files. Share it through your existing Git workflow.
|
|
45
46
|
|
|
@@ -47,7 +48,11 @@ myco_remember(observation) → persist a discovery for the team
|
|
|
47
48
|
|
|
48
49
|
### Capture
|
|
49
50
|
|
|
50
|
-
A background daemon reads your agent's conversation transcript after each turn — the full dialogue including prompts, AI responses, tool calls, and screenshots. Observations (decisions, gotchas, discoveries) are extracted automatically via a local LLM and written as linked vault notes.
|
|
51
|
+
A background daemon reads your agent's conversation transcript after each turn — the full dialogue including prompts, AI responses, tool calls, and screenshots. Observations called **spores** (decisions, gotchas, discoveries, trade-offs, bug fixes) are extracted automatically via a local LLM and written as linked vault notes.
|
|
52
|
+
|
|
53
|
+
### Digest
|
|
54
|
+
|
|
55
|
+
A **continuous reasoning engine** runs inside the daemon, periodically synthesizing all accumulated knowledge into tiered context extracts. These pre-computed summaries give agents an instant, rich understanding of the project at session start — no searching required. Four tiers serve different needs: executive briefing (1.5K tokens), team standup (3K), deep onboarding (5K), and institutional knowledge (10K).
|
|
51
56
|
|
|
52
57
|
### Index
|
|
53
58
|
|
|
@@ -55,11 +60,11 @@ Every note is indexed for both keyword search (SQLite FTS5) and semantic search
|
|
|
55
60
|
|
|
56
61
|
### Serve
|
|
57
62
|
|
|
58
|
-
An MCP server exposes the vault to any agent runtime.
|
|
63
|
+
An MCP server exposes the vault to any agent runtime. The digest extract is injected at session start for immediate context, and relevant spores are injected per-prompt for targeted intelligence. Agents build on your team's accumulated knowledge without being told to.
|
|
59
64
|
|
|
60
65
|
### Connect
|
|
61
66
|
|
|
62
|
-
Sessions link to plans. Plans link to decisions. Decisions link to
|
|
67
|
+
Sessions link to plans. Plans link to decisions. Decisions link to spores. Obsidian backlinks and metadata create a navigable graph of your team's institutional knowledge. Open the vault in [Obsidian](https://obsidian.md) to browse it visually, or let agents traverse it via MCP tools.
|
|
63
68
|
|
|
64
69
|
### Multi-agent
|
|
65
70
|
|
package/commands/init.md
CHANGED
|
@@ -9,20 +9,24 @@ Guide the user through setup using the composable CLI commands. **Do NOT create
|
|
|
9
9
|
|
|
10
10
|
**Ask each question one at a time using AskUserQuestion with selectable options.** Wait for the user's answer before proceeding to the next question. Do NOT combine multiple questions into one message.
|
|
11
11
|
|
|
12
|
-
|
|
12
|
+
The streamlined setup asks just four questions: vault location, provider, model, and embedding model. One model handles everything — hooks, extraction, summaries, and digest — sized for the most demanding task (digestion). Advanced configuration is available via CLI commands after init.
|
|
13
13
|
|
|
14
|
-
|
|
14
|
+
## Step 1: Detect available providers and system capabilities
|
|
15
|
+
|
|
16
|
+
Run the provider detection command and detect system RAM:
|
|
15
17
|
|
|
16
18
|
```bash
|
|
17
19
|
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js detect-providers
|
|
18
20
|
```
|
|
19
21
|
|
|
22
|
+
Detect RAM:
|
|
23
|
+
- **macOS**: `sysctl -n hw.memsize` (bytes → GB)
|
|
24
|
+
- **Linux**: parse `/proc/meminfo` for `MemTotal`
|
|
25
|
+
|
|
20
26
|
Parse the JSON output. This tells you which providers are running and what models are available.
|
|
21
27
|
|
|
22
28
|
## Step 2: Choose vault location
|
|
23
29
|
|
|
24
|
-
Ask the user:
|
|
25
|
-
|
|
26
30
|
**Question:** "Where would you like to store the Myco vault?"
|
|
27
31
|
|
|
28
32
|
**Options:**
|
|
@@ -30,69 +34,89 @@ Ask the user:
|
|
|
30
34
|
- "Centralized (~/.myco/vaults/<project-name>/)" — vault stays outside the repo, good for public repos or personal use
|
|
31
35
|
- "Custom path" — specify your own location
|
|
32
36
|
|
|
33
|
-
|
|
37
|
+
## Step 3: Choose provider and model
|
|
38
|
+
|
|
39
|
+
**Question:** "Which LLM provider and model?"
|
|
40
|
+
|
|
41
|
+
List only providers where `available` is `true`. Recommend a model sized for digest based on detected RAM:
|
|
42
|
+
|
|
43
|
+
| RAM | Recommended Model | Digest Context |
|
|
44
|
+
|-----|-------------------|----------------|
|
|
45
|
+
| **64GB+** | `qwen3.5:35b` (MoE, recommended) | 65536 |
|
|
46
|
+
| **32–64GB** | `qwen3.5:27b` | 32768 |
|
|
47
|
+
| **16–32GB** | `qwen3.5:latest` (~10B) | 16384 |
|
|
48
|
+
| **8–16GB** | `qwen3.5:4b` | 8192 |
|
|
34
49
|
|
|
35
|
-
|
|
50
|
+
The same model handles hooks (at 8K context), extraction, summaries, and digest (at the larger context from the table). No separate model configuration needed.
|
|
36
51
|
|
|
37
|
-
|
|
52
|
+
If the model isn't installed, offer to pull it:
|
|
53
|
+
- **Ollama**: `ollama pull qwen3.5`
|
|
54
|
+
- **LM Studio**: search for `qwen3.5` in the model browser
|
|
38
55
|
|
|
39
|
-
|
|
56
|
+
## Step 4: Choose embedding model
|
|
40
57
|
|
|
41
|
-
**
|
|
42
|
-
- "Ollama — gpt-oss (recommended)"
|
|
43
|
-
- "LM Studio — openai/gpt-oss-20b"
|
|
44
|
-
- "Anthropic"
|
|
58
|
+
**Question:** "Which embedding model?"
|
|
45
59
|
|
|
46
|
-
|
|
60
|
+
**Options:** List only providers that support embeddings (Anthropic does not):
|
|
61
|
+
- **Ollama** — list available embedding models. If none are available, offer to pull one (e.g., `bge-m3` or `nomic-embed-text`).
|
|
62
|
+
- **LM Studio** — filter the model list for names containing `text-embedding`. If none are available, guide the user to search for and download an embedding model through LM Studio's model browser.
|
|
47
63
|
|
|
48
|
-
|
|
64
|
+
If no embedding models are available on the chosen provider, help the user get one before proceeding.
|
|
49
65
|
|
|
50
|
-
|
|
66
|
+
## Step 5: Choose digest inject tier
|
|
51
67
|
|
|
52
|
-
**Question:** "
|
|
68
|
+
**Question:** "How much context should the agent receive at session start?"
|
|
53
69
|
|
|
54
|
-
|
|
55
|
-
- "Ollama — bge-m3 (recommended)"
|
|
56
|
-
- "LM Studio — text-embedding-bge-m3"
|
|
70
|
+
Based on RAM, present the recommended tiers:
|
|
57
71
|
|
|
58
|
-
|
|
72
|
+
| RAM | Options | Default |
|
|
73
|
+
|-----|---------|---------|
|
|
74
|
+
| **64GB+** | 1500, 3000, 5000, 10000 | 3000 |
|
|
75
|
+
| **32–64GB** | 1500, 3000, 5000 | 3000 |
|
|
76
|
+
| **16–32GB** | 1500, 3000 | 1500 |
|
|
77
|
+
| **8–16GB** | 1500 | 1500 |
|
|
59
78
|
|
|
60
|
-
|
|
61
|
-
-
|
|
79
|
+
**Options:**
|
|
80
|
+
- "1500 — executive briefing (fastest, lightest)"
|
|
81
|
+
- "3000 — team standup (recommended)"
|
|
82
|
+
- "5000 — deep onboarding"
|
|
83
|
+
- "10000 — institutional knowledge (richest)"
|
|
84
|
+
|
|
85
|
+
This controls what gets auto-injected at the start of every session. Agents can always request a different tier on-demand via the `myco_context` tool.
|
|
62
86
|
|
|
63
|
-
## Step
|
|
87
|
+
## Step 6: Run init and configure
|
|
64
88
|
|
|
65
|
-
|
|
89
|
+
Create the vault and apply settings:
|
|
66
90
|
|
|
67
91
|
```bash
|
|
92
|
+
# Create vault structure and base config
|
|
68
93
|
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js init \
|
|
69
94
|
--vault <chosen-path> \
|
|
70
95
|
--llm-provider <provider> \
|
|
71
96
|
--llm-model <model> \
|
|
72
|
-
--
|
|
73
|
-
--embedding-
|
|
74
|
-
--embedding-model <model> \
|
|
75
|
-
--embedding-url <base-url>
|
|
76
|
-
```
|
|
97
|
+
--embedding-provider <embedding-provider> \
|
|
98
|
+
--embedding-model <embedding-model>
|
|
77
99
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
100
|
+
# Set digest context window and inject tier based on user choices
|
|
101
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-digest \
|
|
102
|
+
--context-window <from-ram-table> \
|
|
103
|
+
--inject-tier <chosen-tier>
|
|
104
|
+
```
|
|
81
105
|
|
|
82
|
-
|
|
106
|
+
## Step 7: Verify connectivity
|
|
83
107
|
|
|
84
108
|
```bash
|
|
85
109
|
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js verify
|
|
86
110
|
```
|
|
87
111
|
|
|
88
|
-
If verification fails, help the user troubleshoot
|
|
89
|
-
|
|
90
|
-
## Step 7: Display summary
|
|
112
|
+
If verification fails, help the user troubleshoot.
|
|
91
113
|
|
|
92
|
-
|
|
114
|
+
## Step 8: Display summary
|
|
93
115
|
|
|
94
116
|
| Setting | Value |
|
|
95
117
|
|---------|-------|
|
|
96
118
|
| Vault path | `<resolved path>` |
|
|
97
|
-
|
|
|
98
|
-
| Embedding
|
|
119
|
+
| Provider | `<provider>` / `<model>` |
|
|
120
|
+
| Embedding | `<embedding-provider>` / `<embedding-model>` |
|
|
121
|
+
| Digest | enabled (context: `<context-window>`) |
|
|
122
|
+
| RAM detected | `<X>` GB |
|
package/commands/setup-llm.md
CHANGED
|
@@ -7,11 +7,13 @@ description: Configure or change the intelligence backend (Ollama, LM Studio, or
|
|
|
7
7
|
|
|
8
8
|
Guide the user through configuring their intelligence backend. This command can be run at any time to change providers or models.
|
|
9
9
|
|
|
10
|
+
The streamlined setup asks just three questions: provider, model, and embedding model. One model handles everything — hooks, extraction, summaries, and digest — at different context windows per request. Advanced configuration is available via the CLI for power users.
|
|
11
|
+
|
|
10
12
|
## Prerequisites
|
|
11
13
|
|
|
12
14
|
Read the existing `myco.yaml` from the vault directory to show current settings before making changes.
|
|
13
15
|
|
|
14
|
-
## Step 1: Detect available providers
|
|
16
|
+
## Step 1: Detect available providers and system capabilities
|
|
15
17
|
|
|
16
18
|
Check which providers are reachable:
|
|
17
19
|
|
|
@@ -19,66 +21,62 @@ Check which providers are reachable:
|
|
|
19
21
|
- **LM Studio** — fetch `http://localhost:1234/v1/models`, list model names
|
|
20
22
|
- **Anthropic** — check if `ANTHROPIC_API_KEY` is set in the environment
|
|
21
23
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
24
|
+
Detect system RAM for recommendations:
|
|
25
|
+
- **macOS**: `sysctl -n hw.memsize` (bytes → GB)
|
|
26
|
+
- **Linux**: parse `/proc/meminfo` for `MemTotal`
|
|
25
27
|
|
|
26
|
-
|
|
28
|
+
Report which providers are available and the detected RAM.
|
|
27
29
|
|
|
28
|
-
|
|
29
|
-
- **LM Studio** — list available models
|
|
30
|
-
- **Anthropic** — verify API key works, default model `claude-haiku-4-5-20251001`
|
|
30
|
+
## Step 2: Choose provider and model
|
|
31
31
|
|
|
32
|
-
|
|
32
|
+
Ask the user to select from available providers. After picking a provider, recommend a model sized for digest (the most demanding task). The same model handles hooks and extraction at smaller context windows automatically.
|
|
33
33
|
|
|
34
|
-
|
|
35
|
-
|------|--------|-----|
|
|
36
|
-
| **High** | `gpt-oss` (~20B), `gemma3:27b`, `qwen3.5:14b` | 16GB+ |
|
|
37
|
-
| **Mid** | `qwen3.5:8b`, `gemma3:12b` | 8GB+ |
|
|
38
|
-
| **Light** | `gemma3:4b`, `qwen3.5:4b` | 4GB+ |
|
|
34
|
+
Recommended models by hardware tier — Qwen 3.5 is preferred for its strong instruction-following and synthesis quality:
|
|
39
35
|
|
|
40
|
-
|
|
36
|
+
| RAM | Model | Context for Digest |
|
|
37
|
+
|-----|-------|--------------------|
|
|
38
|
+
| **64GB+** | `qwen3.5:35b` (MoE, recommended) | 65536 |
|
|
39
|
+
| **32–64GB** | `qwen3.5:27b` | 32768 |
|
|
40
|
+
| **16–32GB** | `qwen3.5:latest` (~10B) | 16384 |
|
|
41
|
+
| **8–16GB** | `qwen3.5:4b` | 8192 |
|
|
41
42
|
|
|
42
|
-
|
|
43
|
-
- `context_window` — ask or accept default of 8192
|
|
44
|
-
- `max_tokens` — ask or accept default of 1024
|
|
43
|
+
Any instruction-tuned model that handles JSON output works. Prefer what the user already has loaded, but recommend Qwen 3.5 if they're starting fresh.
|
|
45
44
|
|
|
46
45
|
If the chosen model isn't installed, offer to pull it:
|
|
47
|
-
- **Ollama**: `ollama pull
|
|
48
|
-
- **LM Studio**:
|
|
46
|
+
- **Ollama**: `ollama pull qwen3.5` (pulls latest tag automatically)
|
|
47
|
+
- **LM Studio**: search for `qwen3.5` in the model browser
|
|
49
48
|
|
|
50
|
-
|
|
49
|
+
## Step 3: Choose embedding model
|
|
51
50
|
|
|
52
|
-
|
|
51
|
+
Ask the user to select an embedding model — **Anthropic is not an option** (it doesn't support embeddings):
|
|
53
52
|
|
|
54
|
-
|
|
53
|
+
- **Ollama** — list available embedding models. If none are available, offer to pull one (e.g., `bge-m3` or `nomic-embed-text`).
|
|
54
|
+
- **LM Studio** — filter the model list for names containing `text-embedding`. If none are available, guide the user to search for and download an embedding model through LM Studio's model browser.
|
|
55
55
|
|
|
56
|
-
|
|
57
|
-
- **LM Studio** — possible but not recommended for embeddings; better suited for LLM work
|
|
56
|
+
If no embedding models are available on the chosen provider, help the user get one before proceeding.
|
|
58
57
|
|
|
59
|
-
If the embedding model
|
|
60
|
-
|
|
61
|
-
**Important:** If the user changes the embedding model, the vector index must be rebuilt. Warn them:
|
|
58
|
+
**Important:** If the user changes the embedding model, warn them:
|
|
62
59
|
> "Changing the embedding model will require a full rebuild of the vector index. Run `node dist/src/cli.js rebuild` after this change."
|
|
63
60
|
|
|
64
|
-
## Step 4:
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
```
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
base_url: http://localhost:11434
|
|
61
|
+
## Step 4: Apply settings
|
|
62
|
+
|
|
63
|
+
Use the CLI commands to write settings deterministically. The context window for the main LLM stays at 8192 (hooks don't need more). The digest context window is set based on the RAM tier recommendation.
|
|
64
|
+
|
|
65
|
+
```bash
|
|
66
|
+
# Set provider and model
|
|
67
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-llm \
|
|
68
|
+
--llm-provider <provider> \
|
|
69
|
+
--llm-model <model> \
|
|
70
|
+
--embedding-provider <embedding-provider> \
|
|
71
|
+
--embedding-model <embedding-model>
|
|
72
|
+
|
|
73
|
+
# Set digest context window based on RAM tier (model inherits from main LLM)
|
|
74
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-digest \
|
|
75
|
+
--context-window <from-ram-table>
|
|
80
76
|
```
|
|
81
77
|
|
|
78
|
+
Only pass flags the user explicitly changed — Zod defaults handle the rest.
|
|
79
|
+
|
|
82
80
|
If migrating from a v1 config (has `backend: local/cloud` structure), bump `version` to `2` and rewrite the entire intelligence section. The loader auto-maps `provider: haiku` to `anthropic`.
|
|
83
81
|
|
|
84
82
|
## Step 5: Verify and restart
|
|
@@ -87,3 +85,30 @@ If migrating from a v1 config (has `backend: local/cloud` structure), bump `vers
|
|
|
87
85
|
2. Test the embedding provider with a test embedding
|
|
88
86
|
3. Restart the daemon to pick up the new config: `node dist/src/cli.js restart`
|
|
89
87
|
4. Report success or issues found
|
|
88
|
+
|
|
89
|
+
## Advanced Configuration
|
|
90
|
+
|
|
91
|
+
For power users who want fine-grained control, all settings are available via CLI:
|
|
92
|
+
|
|
93
|
+
```bash
|
|
94
|
+
# Separate digest model (e.g., larger model on LM Studio)
|
|
95
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-digest \
|
|
96
|
+
--provider lm-studio \
|
|
97
|
+
--model "qwen/qwen3.5-35b-a3b" \
|
|
98
|
+
--context-window 65536 \
|
|
99
|
+
--gpu-kv-cache false
|
|
100
|
+
|
|
101
|
+
# Custom tiers and injection
|
|
102
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-digest \
|
|
103
|
+
--tiers 1500,3000,5000,10000 \
|
|
104
|
+
--inject-tier 3000
|
|
105
|
+
|
|
106
|
+
# Capture token budgets
|
|
107
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-digest \
|
|
108
|
+
--extraction-tokens 2048 \
|
|
109
|
+
--summary-tokens 1024
|
|
110
|
+
|
|
111
|
+
# View current settings
|
|
112
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-llm --show
|
|
113
|
+
node ${CLAUDE_PLUGIN_ROOT}/dist/src/cli.js setup-digest --show
|
|
114
|
+
```
|
package/commands/status.md
CHANGED
|
@@ -39,14 +39,24 @@ Query the FTS index for counts:
|
|
|
39
39
|
| Metric | How to check |
|
|
40
40
|
|--------|-------------|
|
|
41
41
|
| Sessions | `index.query({ type: 'session' }).length` |
|
|
42
|
-
|
|
|
42
|
+
| Spores | `index.query({ type: 'spore' }).length` |
|
|
43
43
|
| Plans | `index.query({ type: 'plan' }).length` |
|
|
44
44
|
| Artifacts | `index.query({ type: 'artifact' }).length` |
|
|
45
45
|
| Embeddings | Vector index count |
|
|
46
46
|
|
|
47
|
-
Also report
|
|
47
|
+
Also report spore breakdown by observation type (decision, gotcha, trade_off, etc.).
|
|
48
48
|
|
|
49
|
-
## Step 5:
|
|
49
|
+
## Step 5: Digest status
|
|
50
|
+
|
|
51
|
+
Check the digest system state:
|
|
52
|
+
|
|
53
|
+
- **Enabled/disabled**: read `digest.enabled` from `myco.yaml`
|
|
54
|
+
- **Extracts**: list which tier files exist in `vault/digest/` (extract-1500.md, etc.) with file sizes and generated timestamps
|
|
55
|
+
- **Last cycle**: read last line of `vault/digest/trace.jsonl` — report cycle ID, timestamp, tiers generated, substrate count, duration
|
|
56
|
+
- **Metabolism**: report configured tiers, inject tier, and context window
|
|
57
|
+
- **Digest model**: if `digest.intelligence.model` is set, show it; otherwise note "inherits from main LLM"
|
|
58
|
+
|
|
59
|
+
## Step 6: Intelligence backend health
|
|
50
60
|
|
|
51
61
|
Test connectivity to the configured providers:
|
|
52
62
|
|
|
@@ -54,7 +64,7 @@ Test connectivity to the configured providers:
|
|
|
54
64
|
- **Embedding provider**: call `isAvailable()` — report reachable or not
|
|
55
65
|
- If either is unreachable, suggest running `/myco-setup-llm`
|
|
56
66
|
|
|
57
|
-
## Step
|
|
67
|
+
## Step 7: Pending issues
|
|
58
68
|
|
|
59
69
|
Check for problems:
|
|
60
70
|
|
|
@@ -63,13 +73,13 @@ Check for problems:
|
|
|
63
73
|
- **Missing vectors**: does `vectors.db` exist? If not, embeddings are disabled
|
|
64
74
|
- **Lineage**: does `lineage.json` exist? Report link count if so
|
|
65
75
|
|
|
66
|
-
## Step
|
|
76
|
+
## Step 8: Recent activity
|
|
67
77
|
|
|
68
78
|
Show the 3 most recent sessions with:
|
|
69
79
|
- Session ID (short form)
|
|
70
80
|
- Title
|
|
71
81
|
- Started/ended timestamps
|
|
72
|
-
- Number of
|
|
82
|
+
- Number of spores extracted
|
|
73
83
|
- Parent session (if lineage detected)
|
|
74
84
|
|
|
75
85
|
## Output format
|
|
@@ -92,18 +102,26 @@ Sessions: 1 active
|
|
|
92
102
|
|
|
93
103
|
--- Vault ---
|
|
94
104
|
Sessions: 12
|
|
95
|
-
|
|
105
|
+
Spores: 183 (67 decision, 34 gotcha, 32 trade_off, 20 discovery, 19 bug_fix, 1 cross-cutting)
|
|
96
106
|
Plans: 0
|
|
97
107
|
Artifacts: 8
|
|
98
108
|
Vectors: 224
|
|
99
109
|
|
|
110
|
+
--- Digest ---
|
|
111
|
+
Enabled: yes
|
|
112
|
+
Tiers: [1500, 3000, 5000, 10000]
|
|
113
|
+
Inject: 3000 (auto-inject at session start)
|
|
114
|
+
Model: gpt-oss (inherited from main LLM)
|
|
115
|
+
Last cycle: dc-a1b2c3 (2 min ago, 4 tiers, 12 notes, 45s)
|
|
116
|
+
Extracts: 1500 (1.1KB), 3000 (4.5KB), 5000 (6.9KB), 10000 (9.6KB)
|
|
117
|
+
|
|
100
118
|
--- Lineage ---
|
|
101
119
|
Links: 5 (3 clear, 1 inferred, 1 semantic_similarity)
|
|
102
120
|
|
|
103
121
|
--- Recent Sessions ---
|
|
104
|
-
1. [abc123] "Auth redesign session" (2h 15m, 5
|
|
105
|
-
2. [def456] "Bug fix for CORS" (45m, 2
|
|
106
|
-
3. [ghi789] "Config cleanup" (20m, 1
|
|
122
|
+
1. [abc123] "Auth redesign session" (2h 15m, 5 spores)
|
|
123
|
+
2. [def456] "Bug fix for CORS" (45m, 2 spores, parent: abc123)
|
|
124
|
+
3. [ghi789] "Config cleanup" (20m, 1 spore)
|
|
107
125
|
|
|
108
126
|
--- Issues ---
|
|
109
127
|
None found.
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
AgentRegistry
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-BNIYWCST.js";
|
|
5
5
|
|
|
6
6
|
// src/version.ts
|
|
7
7
|
import fs from "fs";
|
|
@@ -30,4 +30,4 @@ function readVersionFrom(dir) {
|
|
|
30
30
|
export {
|
|
31
31
|
getPluginVersion
|
|
32
32
|
};
|
|
33
|
-
//# sourceMappingURL=chunk-
|
|
33
|
+
//# sourceMappingURL=chunk-2GJFTIWX.js.map
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
|
+
import {
|
|
3
|
+
parseStringFlag
|
|
4
|
+
} from "./chunk-SAKJMNSR.js";
|
|
5
|
+
import {
|
|
6
|
+
MycoConfigSchema,
|
|
7
|
+
require_dist
|
|
8
|
+
} from "./chunk-G6ZMTQMJ.js";
|
|
9
|
+
import {
|
|
10
|
+
__toESM
|
|
11
|
+
} from "./chunk-PZUWP5VK.js";
|
|
12
|
+
|
|
13
|
+
// src/cli/setup-digest.ts
|
|
14
|
+
var import_yaml = __toESM(require_dist(), 1);
|
|
15
|
+
import fs from "fs";
|
|
16
|
+
import path from "path";
|
|
17
|
+
var CONFIG_FILENAME = "myco.yaml";
|
|
18
|
+
var DAEMON_STATE_FILENAME = "daemon.json";
|
|
19
|
+
var USAGE = `Usage: myco setup-digest [options]
|
|
20
|
+
|
|
21
|
+
Configure digest (continuous reasoning) settings.
|
|
22
|
+
|
|
23
|
+
Options:
|
|
24
|
+
--enabled <true|false> Enable/disable digest (default: true)
|
|
25
|
+
--tiers <1500,3000,...> Comma-separated tier list
|
|
26
|
+
--inject-tier <number|null> Tier to auto-inject at session start
|
|
27
|
+
--provider <name> LLM provider for digest (null = inherit)
|
|
28
|
+
--model <name> Model for digest (null = inherit)
|
|
29
|
+
--base-url <url> Provider base URL (null = inherit)
|
|
30
|
+
--context-window <number> Context window for digest operations
|
|
31
|
+
--keep-alive <duration> Keep model loaded (Ollama, e.g. "30m")
|
|
32
|
+
--gpu-kv-cache <true|false> Offload KV cache to GPU (LM Studio)
|
|
33
|
+
--active-interval <seconds> Metabolism active interval
|
|
34
|
+
--dormancy-threshold <seconds> Time before dormancy
|
|
35
|
+
--max-notes <number> Max substrate notes per cycle
|
|
36
|
+
--extraction-tokens <number> Max tokens for spore extraction
|
|
37
|
+
--summary-tokens <number> Max tokens for session summaries
|
|
38
|
+
--title-tokens <number> Max tokens for session titles
|
|
39
|
+
--classification-tokens <number> Max tokens for artifact classification
|
|
40
|
+
--show Show current settings and exit
|
|
41
|
+
`;
|
|
42
|
+
async function run(args, vaultDir) {
|
|
43
|
+
const configPath = path.join(vaultDir, CONFIG_FILENAME);
|
|
44
|
+
const raw = fs.readFileSync(configPath, "utf-8");
|
|
45
|
+
const doc = import_yaml.default.parse(raw);
|
|
46
|
+
if (args.includes("--show")) {
|
|
47
|
+
const config = MycoConfigSchema.parse(doc);
|
|
48
|
+
console.log(JSON.stringify({
|
|
49
|
+
digest: config.digest,
|
|
50
|
+
capture: {
|
|
51
|
+
extraction_max_tokens: config.capture.extraction_max_tokens,
|
|
52
|
+
summary_max_tokens: config.capture.summary_max_tokens,
|
|
53
|
+
title_max_tokens: config.capture.title_max_tokens,
|
|
54
|
+
classification_max_tokens: config.capture.classification_max_tokens
|
|
55
|
+
}
|
|
56
|
+
}, null, 2));
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
if (args.length === 0) {
|
|
60
|
+
console.log(USAGE);
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
if (!doc.digest || typeof doc.digest !== "object") {
|
|
64
|
+
doc.digest = {};
|
|
65
|
+
}
|
|
66
|
+
const digest = doc.digest;
|
|
67
|
+
if (!digest.intelligence || typeof digest.intelligence !== "object") {
|
|
68
|
+
digest.intelligence = {};
|
|
69
|
+
}
|
|
70
|
+
if (!digest.metabolism || typeof digest.metabolism !== "object") {
|
|
71
|
+
digest.metabolism = {};
|
|
72
|
+
}
|
|
73
|
+
if (!digest.substrate || typeof digest.substrate !== "object") {
|
|
74
|
+
digest.substrate = {};
|
|
75
|
+
}
|
|
76
|
+
if (!doc.capture || typeof doc.capture !== "object") {
|
|
77
|
+
doc.capture = {};
|
|
78
|
+
}
|
|
79
|
+
const intelligence = digest.intelligence;
|
|
80
|
+
const metabolism = digest.metabolism;
|
|
81
|
+
const substrate = digest.substrate;
|
|
82
|
+
const capture = doc.capture;
|
|
83
|
+
const enabled = parseStringFlag(args, "--enabled");
|
|
84
|
+
if (enabled !== void 0) digest.enabled = enabled === "true";
|
|
85
|
+
const tiers = parseStringFlag(args, "--tiers");
|
|
86
|
+
if (tiers !== void 0) {
|
|
87
|
+
digest.tiers = tiers.split(",").map((t) => parseInt(t.trim(), 10));
|
|
88
|
+
}
|
|
89
|
+
const injectTier = parseStringFlag(args, "--inject-tier");
|
|
90
|
+
if (injectTier !== void 0) {
|
|
91
|
+
digest.inject_tier = injectTier === "null" ? null : parseInt(injectTier, 10);
|
|
92
|
+
}
|
|
93
|
+
const provider = parseStringFlag(args, "--provider");
|
|
94
|
+
if (provider !== void 0) intelligence.provider = provider === "null" ? null : provider;
|
|
95
|
+
const model = parseStringFlag(args, "--model");
|
|
96
|
+
if (model !== void 0) intelligence.model = model === "null" ? null : model;
|
|
97
|
+
const baseUrl = parseStringFlag(args, "--base-url");
|
|
98
|
+
if (baseUrl !== void 0) intelligence.base_url = baseUrl === "null" ? null : baseUrl;
|
|
99
|
+
const contextWindow = parseStringFlag(args, "--context-window");
|
|
100
|
+
if (contextWindow !== void 0) intelligence.context_window = parseInt(contextWindow, 10);
|
|
101
|
+
const keepAlive = parseStringFlag(args, "--keep-alive");
|
|
102
|
+
if (keepAlive !== void 0) intelligence.keep_alive = keepAlive === "null" ? null : keepAlive;
|
|
103
|
+
const gpuKvCache = parseStringFlag(args, "--gpu-kv-cache");
|
|
104
|
+
if (gpuKvCache !== void 0) intelligence.gpu_kv_cache = gpuKvCache === "true";
|
|
105
|
+
const activeInterval = parseStringFlag(args, "--active-interval");
|
|
106
|
+
if (activeInterval !== void 0) metabolism.active_interval = parseInt(activeInterval, 10);
|
|
107
|
+
const dormancyThreshold = parseStringFlag(args, "--dormancy-threshold");
|
|
108
|
+
if (dormancyThreshold !== void 0) metabolism.dormancy_threshold = parseInt(dormancyThreshold, 10);
|
|
109
|
+
const maxNotes = parseStringFlag(args, "--max-notes");
|
|
110
|
+
if (maxNotes !== void 0) substrate.max_notes_per_cycle = parseInt(maxNotes, 10);
|
|
111
|
+
const extractionTokens = parseStringFlag(args, "--extraction-tokens");
|
|
112
|
+
if (extractionTokens !== void 0) capture.extraction_max_tokens = parseInt(extractionTokens, 10);
|
|
113
|
+
const summaryTokens = parseStringFlag(args, "--summary-tokens");
|
|
114
|
+
if (summaryTokens !== void 0) capture.summary_max_tokens = parseInt(summaryTokens, 10);
|
|
115
|
+
const titleTokens = parseStringFlag(args, "--title-tokens");
|
|
116
|
+
if (titleTokens !== void 0) capture.title_max_tokens = parseInt(titleTokens, 10);
|
|
117
|
+
const classificationTokens = parseStringFlag(args, "--classification-tokens");
|
|
118
|
+
if (classificationTokens !== void 0) capture.classification_max_tokens = parseInt(classificationTokens, 10);
|
|
119
|
+
const result = MycoConfigSchema.safeParse(doc);
|
|
120
|
+
if (!result.success) {
|
|
121
|
+
console.error("Validation error:");
|
|
122
|
+
for (const issue of result.error.issues) {
|
|
123
|
+
console.error(` ${issue.path.join(".")}: ${issue.message}`);
|
|
124
|
+
}
|
|
125
|
+
process.exit(1);
|
|
126
|
+
}
|
|
127
|
+
fs.writeFileSync(configPath, import_yaml.default.stringify(doc), "utf-8");
|
|
128
|
+
console.log("Digest configuration updated.");
|
|
129
|
+
const updated = MycoConfigSchema.parse(doc);
|
|
130
|
+
console.log(JSON.stringify({
|
|
131
|
+
digest: updated.digest,
|
|
132
|
+
capture: {
|
|
133
|
+
extraction_max_tokens: updated.capture.extraction_max_tokens,
|
|
134
|
+
summary_max_tokens: updated.capture.summary_max_tokens,
|
|
135
|
+
title_max_tokens: updated.capture.title_max_tokens,
|
|
136
|
+
classification_max_tokens: updated.capture.classification_max_tokens
|
|
137
|
+
}
|
|
138
|
+
}, null, 2));
|
|
139
|
+
if (fs.existsSync(path.join(vaultDir, DAEMON_STATE_FILENAME))) {
|
|
140
|
+
console.log("\nNote: restart the daemon for changes to take effect (myco restart)");
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export {
|
|
145
|
+
run
|
|
146
|
+
};
|
|
147
|
+
//# sourceMappingURL=chunk-4FCFRJIQ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/cli/setup-digest.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport YAML from 'yaml';\nimport { MycoConfigSchema } from '../config/schema.js';\nimport { parseStringFlag } from './shared.js';\n\nconst CONFIG_FILENAME = 'myco.yaml';\nconst DAEMON_STATE_FILENAME = 'daemon.json';\n\nconst USAGE = `Usage: myco setup-digest [options]\n\nConfigure digest (continuous reasoning) settings.\n\nOptions:\n --enabled <true|false> Enable/disable digest (default: true)\n --tiers <1500,3000,...> Comma-separated tier list\n --inject-tier <number|null> Tier to auto-inject at session start\n --provider <name> LLM provider for digest (null = inherit)\n --model <name> Model for digest (null = inherit)\n --base-url <url> Provider base URL (null = inherit)\n --context-window <number> Context window for digest operations\n --keep-alive <duration> Keep model loaded (Ollama, e.g. \"30m\")\n --gpu-kv-cache <true|false> Offload KV cache to GPU (LM Studio)\n --active-interval <seconds> Metabolism active interval\n --dormancy-threshold <seconds> Time before dormancy\n --max-notes <number> Max substrate notes per cycle\n --extraction-tokens <number> Max tokens for spore extraction\n --summary-tokens <number> Max tokens for session summaries\n --title-tokens <number> Max tokens for session titles\n --classification-tokens <number> Max tokens for artifact classification\n --show Show current settings and exit\n`;\n\nexport async function run(args: string[], vaultDir: string): Promise<void> {\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n const raw = fs.readFileSync(configPath, 'utf-8');\n const doc = YAML.parse(raw) as Record<string, unknown>;\n\n // Show current settings\n if (args.includes('--show')) {\n const config = MycoConfigSchema.parse(doc);\n console.log(JSON.stringify({\n digest: config.digest,\n capture: {\n extraction_max_tokens: config.capture.extraction_max_tokens,\n summary_max_tokens: config.capture.summary_max_tokens,\n title_max_tokens: config.capture.title_max_tokens,\n classification_max_tokens: config.capture.classification_max_tokens,\n },\n }, null, 2));\n return;\n }\n\n // No flags = show usage\n if (args.length === 0) {\n console.log(USAGE);\n return;\n }\n\n // Ensure digest section exists\n if (!doc.digest || typeof doc.digest !== 'object') {\n doc.digest = {};\n }\n const digest = doc.digest as Record<string, unknown>;\n\n // Ensure nested sections exist\n if (!digest.intelligence || typeof digest.intelligence !== 'object') {\n digest.intelligence = {};\n }\n if (!digest.metabolism || typeof digest.metabolism !== 'object') {\n digest.metabolism = {};\n }\n if (!digest.substrate || typeof digest.substrate !== 'object') {\n digest.substrate = {};\n }\n if (!doc.capture || typeof doc.capture !== 'object') {\n doc.capture = {};\n }\n\n const intelligence = digest.intelligence as Record<string, unknown>;\n const metabolism = digest.metabolism as Record<string, unknown>;\n const substrate = digest.substrate as Record<string, unknown>;\n const capture = doc.capture as Record<string, unknown>;\n\n // Parse and apply flags\n const enabled = parseStringFlag(args, '--enabled');\n if (enabled !== undefined) digest.enabled = enabled === 'true';\n\n const tiers = parseStringFlag(args, '--tiers');\n if (tiers !== undefined) {\n digest.tiers = tiers.split(',').map((t) => parseInt(t.trim(), 10));\n }\n\n const injectTier = parseStringFlag(args, '--inject-tier');\n if (injectTier !== undefined) {\n digest.inject_tier = injectTier === 'null' ? null : parseInt(injectTier, 10);\n }\n\n const provider = parseStringFlag(args, '--provider');\n if (provider !== undefined) intelligence.provider = provider === 'null' ? null : provider;\n\n const model = parseStringFlag(args, '--model');\n if (model !== undefined) intelligence.model = model === 'null' ? null : model;\n\n const baseUrl = parseStringFlag(args, '--base-url');\n if (baseUrl !== undefined) intelligence.base_url = baseUrl === 'null' ? null : baseUrl;\n\n const contextWindow = parseStringFlag(args, '--context-window');\n if (contextWindow !== undefined) intelligence.context_window = parseInt(contextWindow, 10);\n\n const keepAlive = parseStringFlag(args, '--keep-alive');\n if (keepAlive !== undefined) intelligence.keep_alive = keepAlive === 'null' ? null : keepAlive;\n\n const gpuKvCache = parseStringFlag(args, '--gpu-kv-cache');\n if (gpuKvCache !== undefined) intelligence.gpu_kv_cache = gpuKvCache === 'true';\n\n const activeInterval = parseStringFlag(args, '--active-interval');\n if (activeInterval !== undefined) metabolism.active_interval = parseInt(activeInterval, 10);\n\n const dormancyThreshold = parseStringFlag(args, '--dormancy-threshold');\n if (dormancyThreshold !== undefined) metabolism.dormancy_threshold = parseInt(dormancyThreshold, 10);\n\n const maxNotes = parseStringFlag(args, '--max-notes');\n if (maxNotes !== undefined) substrate.max_notes_per_cycle = parseInt(maxNotes, 10);\n\n const extractionTokens = parseStringFlag(args, '--extraction-tokens');\n if (extractionTokens !== undefined) capture.extraction_max_tokens = parseInt(extractionTokens, 10);\n\n const summaryTokens = parseStringFlag(args, '--summary-tokens');\n if (summaryTokens !== undefined) capture.summary_max_tokens = parseInt(summaryTokens, 10);\n\n const titleTokens = parseStringFlag(args, '--title-tokens');\n if (titleTokens !== undefined) capture.title_max_tokens = parseInt(titleTokens, 10);\n\n const classificationTokens = parseStringFlag(args, '--classification-tokens');\n if (classificationTokens !== undefined) capture.classification_max_tokens = parseInt(classificationTokens, 10);\n\n // Validate the full config\n const result = MycoConfigSchema.safeParse(doc);\n if (!result.success) {\n console.error('Validation error:');\n for (const issue of result.error.issues) {\n console.error(` ${issue.path.join('.')}: ${issue.message}`);\n }\n process.exit(1);\n }\n\n // Write back\n fs.writeFileSync(configPath, YAML.stringify(doc), 'utf-8');\n console.log('Digest configuration updated.');\n\n // Show what was set\n const updated = MycoConfigSchema.parse(doc);\n console.log(JSON.stringify({\n digest: updated.digest,\n capture: {\n extraction_max_tokens: updated.capture.extraction_max_tokens,\n summary_max_tokens: updated.capture.summary_max_tokens,\n title_max_tokens: updated.capture.title_max_tokens,\n classification_max_tokens: updated.capture.classification_max_tokens,\n },\n }, null, 2));\n\n if (fs.existsSync(path.join(vaultDir, DAEMON_STATE_FILENAME))) {\n console.log('\\nNote: restart the daemon for changes to take effect (myco restart)');\n }\n}\n"],"mappings":";;;;;;;;;;;;;AAEA,kBAAiB;AAFjB,OAAO,QAAQ;AACf,OAAO,UAAU;AAKjB,IAAM,kBAAkB;AACxB,IAAM,wBAAwB;AAE9B,IAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAwBd,eAAsB,IAAI,MAAgB,UAAiC;AACzE,QAAM,aAAa,KAAK,KAAK,UAAU,eAAe;AACtD,QAAM,MAAM,GAAG,aAAa,YAAY,OAAO;AAC/C,QAAM,MAAM,YAAAA,QAAK,MAAM,GAAG;AAG1B,MAAI,KAAK,SAAS,QAAQ,GAAG;AAC3B,UAAM,SAAS,iBAAiB,MAAM,GAAG;AACzC,YAAQ,IAAI,KAAK,UAAU;AAAA,MACzB,QAAQ,OAAO;AAAA,MACf,SAAS;AAAA,QACP,uBAAuB,OAAO,QAAQ;AAAA,QACtC,oBAAoB,OAAO,QAAQ;AAAA,QACnC,kBAAkB,OAAO,QAAQ;AAAA,QACjC,2BAA2B,OAAO,QAAQ;AAAA,MAC5C;AAAA,IACF,GAAG,MAAM,CAAC,CAAC;AACX;AAAA,EACF;AAGA,MAAI,KAAK,WAAW,GAAG;AACrB,YAAQ,IAAI,KAAK;AACjB;AAAA,EACF;AAGA,MAAI,CAAC,IAAI,UAAU,OAAO,IAAI,WAAW,UAAU;AACjD,QAAI,SAAS,CAAC;AAAA,EAChB;AACA,QAAM,SAAS,IAAI;AAGnB,MAAI,CAAC,OAAO,gBAAgB,OAAO,OAAO,iBAAiB,UAAU;AACnE,WAAO,eAAe,CAAC;AAAA,EACzB;AACA,MAAI,CAAC,OAAO,cAAc,OAAO,OAAO,eAAe,UAAU;AAC/D,WAAO,aAAa,CAAC;AAAA,EACvB;AACA,MAAI,CAAC,OAAO,aAAa,OAAO,OAAO,cAAc,UAAU;AAC7D,WAAO,YAAY,CAAC;AAAA,EACtB;AACA,MAAI,CAAC,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;AACnD,QAAI,UAAU,CAAC;AAAA,EACjB;AAEA,QAAM,eAAe,OAAO;AAC5B,QAAM,aAAa,OAAO;AAC1B,QAAM,YAAY,OAAO;AACzB,QAAM,UAAU,IAAI;AAGpB,QAAM,UAAU,gBAAgB,MAAM,WAAW;AACjD,MAAI,YAAY,OAAW,QAAO,UAAU,YAAY;AAExD,QAAM,QAAQ,gBAAgB,MAAM,SAAS;AAC7C,MAAI,UAAU,QAAW;AACvB,WAAO,QAAQ,MAAM,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,SAAS,EAAE,KAAK,GAAG,EAAE,CAAC;AAAA,EACnE;AAEA,QAAM,aAAa,gBAAgB,MAAM,eAAe;AACxD,MAAI,eAAe,QAAW;AAC5B,WAAO,cAAc,eAAe,SAAS,OAAO,SAAS,YAAY,EAAE;AAAA,EAC7E;AAEA,QAAM,WAAW,gBAAgB,MAAM,YAAY;AACnD,MAAI,aAAa,OAAW,cAAa,WAAW,aAAa,SAAS,OAAO;AAEjF,QAAM,QAAQ,gBAAgB,MAAM,SAAS;AAC7C,MAAI,UAAU,OAAW,cAAa,QAAQ,UAAU,SAAS,OAAO;AAExE,QAAM,UAAU,gBAAgB,MAAM,YAAY;AAClD,MAAI,YAAY,OAAW,cAAa,WAAW,YAAY,SAAS,OAAO;AAE/E,QAAM,gBAAgB,gBAAgB,MAAM,kBAAkB;AAC9D,MAAI,kBAAkB,OAAW,cAAa,iBAAiB,SAAS,eAAe,EAAE;AAEzF,QAAM,YAAY,gBAAgB,MAAM,cAAc;AACtD,MAAI,cAAc,OAAW,cAAa,aAAa,cAAc,SAAS,OAAO;AAErF,QAAM,aAAa,gBAAgB,MAAM,gBAAgB;AACzD,MAAI,eAAe,OAAW,cAAa,eAAe,eAAe;AAEzE,QAAM,iBAAiB,gBAAgB,MAAM,mBAAmB;AAChE,MAAI,mBAAmB,OAAW,YAAW,kBAAkB,SAAS,gBAAgB,EAAE;AAE1F,QAAM,oBAAoB,gBAAgB,MAAM,sBAAsB;AACtE,MAAI,sBAAsB,OAAW,YAAW,qBAAqB,SAAS,mBAAmB,EAAE;AAEnG,QAAM,WAAW,gBAAgB,MAAM,aAAa;AACpD,MAAI,aAAa,OAAW,WAAU,sBAAsB,SAAS,UAAU,EAAE;AAEjF,QAAM,mBAAmB,gBAAgB,MAAM,qBAAqB;AACpE,MAAI,qBAAqB,OAAW,SAAQ,wBAAwB,SAAS,kBAAkB,EAAE;AAEjG,QAAM,gBAAgB,gBAAgB,MAAM,kBAAkB;AAC9D,MAAI,kBAAkB,OAAW,SAAQ,qBAAqB,SAAS,eAAe,EAAE;AAExF,QAAM,cAAc,gBAAgB,MAAM,gBAAgB;AAC1D,MAAI,gBAAgB,OAAW,SAAQ,mBAAmB,SAAS,aAAa,EAAE;AAElF,QAAM,uBAAuB,gBAAgB,MAAM,yBAAyB;AAC5E,MAAI,yBAAyB,OAAW,SAAQ,4BAA4B,SAAS,sBAAsB,EAAE;AAG7G,QAAM,SAAS,iBAAiB,UAAU,GAAG;AAC7C,MAAI,CAAC,OAAO,SAAS;AACnB,YAAQ,MAAM,mBAAmB;AACjC,eAAW,SAAS,OAAO,MAAM,QAAQ;AACvC,cAAQ,MAAM,KAAK,MAAM,KAAK,KAAK,GAAG,CAAC,KAAK,MAAM,OAAO,EAAE;AAAA,IAC7D;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,KAAG,cAAc,YAAY,YAAAA,QAAK,UAAU,GAAG,GAAG,OAAO;AACzD,UAAQ,IAAI,+BAA+B;AAG3C,QAAM,UAAU,iBAAiB,MAAM,GAAG;AAC1C,UAAQ,IAAI,KAAK,UAAU;AAAA,IACzB,QAAQ,QAAQ;AAAA,IAChB,SAAS;AAAA,MACP,uBAAuB,QAAQ,QAAQ;AAAA,MACvC,oBAAoB,QAAQ,QAAQ;AAAA,MACpC,kBAAkB,QAAQ,QAAQ;AAAA,MAClC,2BAA2B,QAAQ,QAAQ;AAAA,IAC7C;AAAA,EACF,GAAG,MAAM,CAAC,CAAC;AAEX,MAAI,GAAG,WAAW,KAAK,KAAK,UAAU,qBAAqB,CAAC,GAAG;AAC7D,YAAQ,IAAI,sEAAsE;AAAA,EACpF;AACF;","names":["YAML"]}
|