@goondocks/myco 0.6.3 → 0.6.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. package/.claude-plugin/marketplace.json +1 -1
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/dist/{chunk-CPVXNRGW.js → chunk-4B5RO2YV.js} +4 -4
  4. package/dist/{chunk-25FY74AP.js → chunk-4DYD4HHG.js} +25 -7
  5. package/dist/chunk-4DYD4HHG.js.map +1 -0
  6. package/dist/{chunk-DBMHUMG3.js → chunk-54WVLTKD.js} +3 -3
  7. package/dist/{chunk-WU4PCNIK.js → chunk-5LMRZDH3.js} +2 -2
  8. package/dist/{chunk-CQ4RKK67.js → chunk-AHZN4Z34.js} +2 -2
  9. package/dist/{chunk-WBLTISAK.js → chunk-DYDBF5W6.js} +36 -6
  10. package/dist/chunk-DYDBF5W6.js.map +1 -0
  11. package/dist/{chunk-JSK7L46L.js → chunk-ERG2IEWX.js} +22 -4
  12. package/dist/{chunk-JSK7L46L.js.map → chunk-ERG2IEWX.js.map} +1 -1
  13. package/dist/{chunk-RNWALAFP.js → chunk-F7GAYVWF.js} +2 -2
  14. package/dist/chunk-F7GAYVWF.js.map +1 -0
  15. package/dist/{chunk-H7PRCVGQ.js → chunk-F7PGDD2X.js} +2 -2
  16. package/dist/{chunk-RY76WEN3.js → chunk-GENQ5QGP.js} +2 -2
  17. package/dist/{chunk-YG6MLLGL.js → chunk-HYVT345Y.js} +2 -2
  18. package/dist/{chunk-LDKXXKF6.js → chunk-LEK6DEAE.js} +4 -4
  19. package/dist/{chunk-IWBWZQK6.js → chunk-MDLSAFPP.js} +2 -2
  20. package/dist/{chunk-QLUE3BUL.js → chunk-O6TBHGVO.js} +9 -2
  21. package/dist/chunk-O6TBHGVO.js.map +1 -0
  22. package/dist/{chunk-RXJHB7W4.js → chunk-OEGZ5YTJ.js} +2 -2
  23. package/dist/{chunk-XNAM6Z4O.js → chunk-P723N2LP.js} +2 -2
  24. package/dist/{chunk-PQWQC3RF.js → chunk-TK7A4RX7.js} +144 -146
  25. package/dist/chunk-TK7A4RX7.js.map +1 -0
  26. package/dist/{chunk-ALBVNGCF.js → chunk-V6BJVYNH.js} +55 -44
  27. package/dist/{chunk-ALBVNGCF.js.map → chunk-V6BJVYNH.js.map} +1 -1
  28. package/dist/{chunk-CK24O5YQ.js → chunk-XH34FX4C.js} +2 -2
  29. package/dist/{chunk-4WL5X7VS.js → chunk-YRIIBPJD.js} +3 -3
  30. package/dist/{cli-EGWAINIE.js → cli-OJYHLO4Y.js} +21 -21
  31. package/dist/{client-FDKJ4BY7.js → client-SS3C5MF6.js} +5 -5
  32. package/dist/{config-HDUFDOQN.js → config-IBS6KOLQ.js} +3 -3
  33. package/dist/{curate-OHIJFBYF.js → curate-4CKEMOPV.js} +9 -10
  34. package/dist/{curate-OHIJFBYF.js.map → curate-4CKEMOPV.js.map} +1 -1
  35. package/dist/{detect-providers-4U3ZPW5G.js → detect-providers-LFIVJYQO.js} +3 -3
  36. package/dist/{digest-I2XYCK2M.js → digest-ZLARHLLY.js} +11 -11
  37. package/dist/{init-ZO2XQT6U.js → init-3LVKVQ4L.js} +8 -8
  38. package/dist/{logs-IENORIYR.js → logs-6CWVP574.js} +3 -3
  39. package/dist/{main-XZ6X4BUX.js → main-RB727YRP.js} +2109 -390
  40. package/dist/main-RB727YRP.js.map +1 -0
  41. package/dist/{rebuild-NAH4EW5B.js → rebuild-QWVVCBCZ.js} +9 -10
  42. package/dist/{rebuild-NAH4EW5B.js.map → rebuild-QWVVCBCZ.js.map} +1 -1
  43. package/dist/{reprocess-6FOP37XS.js → reprocess-YG3WLUI2.js} +11 -11
  44. package/dist/{restart-WSA4JSE3.js → restart-UIP7US4U.js} +6 -6
  45. package/dist/{search-QXJQUB35.js → search-BQLBW5CS.js} +6 -6
  46. package/dist/{server-VXN3CJ4Y.js → server-43KSJ65Q.js} +80 -32
  47. package/dist/{server-VXN3CJ4Y.js.map → server-43KSJ65Q.js.map} +1 -1
  48. package/dist/{session-start-KQ4KCQMZ.js → session-start-6SHGT2AW.js} +9 -9
  49. package/dist/setup-digest-X735EZSD.js +15 -0
  50. package/dist/setup-llm-QBSTQO7N.js +15 -0
  51. package/dist/src/cli.js +4 -4
  52. package/dist/src/daemon/main.js +4 -4
  53. package/dist/src/hooks/post-tool-use.js +5 -5
  54. package/dist/src/hooks/session-end.js +5 -5
  55. package/dist/src/hooks/session-start.js +4 -4
  56. package/dist/src/hooks/stop.js +7 -7
  57. package/dist/src/hooks/user-prompt-submit.js +5 -5
  58. package/dist/src/mcp/server.js +4 -4
  59. package/dist/src/prompts/extraction.md +4 -4
  60. package/dist/{stats-43OESUEB.js → stats-QBLIEFWL.js} +6 -6
  61. package/dist/ui/assets/index-CjWGVHhF.css +1 -0
  62. package/dist/ui/assets/index-Cq-H7wgE.js +369 -0
  63. package/dist/ui/index.html +2 -2
  64. package/dist/{verify-IIAHBAAU.js → verify-X272WGBD.js} +6 -6
  65. package/dist/{version-NKOECSVH.js → version-XE4GYTBV.js} +4 -4
  66. package/package.json +1 -1
  67. package/dist/chunk-25FY74AP.js.map +0 -1
  68. package/dist/chunk-PQWQC3RF.js.map +0 -1
  69. package/dist/chunk-QLUE3BUL.js.map +0 -1
  70. package/dist/chunk-RNWALAFP.js.map +0 -1
  71. package/dist/chunk-WBLTISAK.js.map +0 -1
  72. package/dist/main-XZ6X4BUX.js.map +0 -1
  73. package/dist/setup-digest-QNCM3PNQ.js +0 -15
  74. package/dist/setup-llm-EAOIUSPJ.js +0 -15
  75. package/dist/ui/assets/index-Bk4X_8-Z.css +0 -1
  76. package/dist/ui/assets/index-D3SY7ZHY.js +0 -299
  77. /package/dist/{chunk-CPVXNRGW.js.map → chunk-4B5RO2YV.js.map} +0 -0
  78. /package/dist/{chunk-DBMHUMG3.js.map → chunk-54WVLTKD.js.map} +0 -0
  79. /package/dist/{chunk-WU4PCNIK.js.map → chunk-5LMRZDH3.js.map} +0 -0
  80. /package/dist/{chunk-CQ4RKK67.js.map → chunk-AHZN4Z34.js.map} +0 -0
  81. /package/dist/{chunk-H7PRCVGQ.js.map → chunk-F7PGDD2X.js.map} +0 -0
  82. /package/dist/{chunk-RY76WEN3.js.map → chunk-GENQ5QGP.js.map} +0 -0
  83. /package/dist/{chunk-YG6MLLGL.js.map → chunk-HYVT345Y.js.map} +0 -0
  84. /package/dist/{chunk-LDKXXKF6.js.map → chunk-LEK6DEAE.js.map} +0 -0
  85. /package/dist/{chunk-IWBWZQK6.js.map → chunk-MDLSAFPP.js.map} +0 -0
  86. /package/dist/{chunk-RXJHB7W4.js.map → chunk-OEGZ5YTJ.js.map} +0 -0
  87. /package/dist/{chunk-XNAM6Z4O.js.map → chunk-P723N2LP.js.map} +0 -0
  88. /package/dist/{chunk-CK24O5YQ.js.map → chunk-XH34FX4C.js.map} +0 -0
  89. /package/dist/{chunk-4WL5X7VS.js.map → chunk-YRIIBPJD.js.map} +0 -0
  90. /package/dist/{cli-EGWAINIE.js.map → cli-OJYHLO4Y.js.map} +0 -0
  91. /package/dist/{client-FDKJ4BY7.js.map → client-SS3C5MF6.js.map} +0 -0
  92. /package/dist/{config-HDUFDOQN.js.map → config-IBS6KOLQ.js.map} +0 -0
  93. /package/dist/{detect-providers-4U3ZPW5G.js.map → detect-providers-LFIVJYQO.js.map} +0 -0
  94. /package/dist/{digest-I2XYCK2M.js.map → digest-ZLARHLLY.js.map} +0 -0
  95. /package/dist/{init-ZO2XQT6U.js.map → init-3LVKVQ4L.js.map} +0 -0
  96. /package/dist/{logs-IENORIYR.js.map → logs-6CWVP574.js.map} +0 -0
  97. /package/dist/{reprocess-6FOP37XS.js.map → reprocess-YG3WLUI2.js.map} +0 -0
  98. /package/dist/{restart-WSA4JSE3.js.map → restart-UIP7US4U.js.map} +0 -0
  99. /package/dist/{search-QXJQUB35.js.map → search-BQLBW5CS.js.map} +0 -0
  100. /package/dist/{session-start-KQ4KCQMZ.js.map → session-start-6SHGT2AW.js.map} +0 -0
  101. /package/dist/{setup-digest-QNCM3PNQ.js.map → setup-digest-X735EZSD.js.map} +0 -0
  102. /package/dist/{setup-llm-EAOIUSPJ.js.map → setup-llm-QBSTQO7N.js.map} +0 -0
  103. /package/dist/{stats-43OESUEB.js.map → stats-QBLIEFWL.js.map} +0 -0
  104. /package/dist/{verify-IIAHBAAU.js.map → verify-X272WGBD.js.map} +0 -0
  105. /package/dist/{version-NKOECSVH.js.map → version-XE4GYTBV.js.map} +0 -0
@@ -2,6 +2,7 @@ import { createRequire as __cr } from 'node:module'; const require = __cr(import
2
2
  import {
3
3
  ARTIFACT_TYPES,
4
4
  CONVERSATION_HEADING,
5
+ TURN_HEADING_PREFIX,
5
6
  VaultWriter,
6
7
  bareSessionId,
7
8
  buildClassificationPrompt,
@@ -23,39 +24,32 @@ import {
23
24
  stripReasoningTokens,
24
25
  supersedeSpore,
25
26
  supersededIdsSchema
26
- } from "./chunk-ALBVNGCF.js";
27
+ } from "./chunk-V6BJVYNH.js";
27
28
  import {
28
29
  generateEmbedding
29
30
  } from "./chunk-RGVBGTD6.js";
30
31
  import {
31
32
  stripFrontmatter
32
- } from "./chunk-RY76WEN3.js";
33
+ } from "./chunk-GENQ5QGP.js";
33
34
  import {
34
35
  initFts
35
36
  } from "./chunk-6FQISQNA.js";
36
37
  import {
37
38
  external_exports,
38
39
  require_dist
39
- } from "./chunk-JSK7L46L.js";
40
+ } from "./chunk-ERG2IEWX.js";
40
41
  import {
41
- AgentRegistry,
42
- claudeCodeAdapter,
43
- createPerProjectAdapter
44
- } from "./chunk-RNWALAFP.js";
45
- import {
46
- AI_RESPONSE_PREVIEW_CHARS,
47
42
  CHARS_PER_TOKEN,
48
- COMMAND_PREVIEW_CHARS,
49
43
  CURATION_CLUSTER_SIMILARITY,
50
44
  DIGEST_LLM_REQUEST_TIMEOUT_MS,
51
45
  DIGEST_SUBSTRATE_TYPE_WEIGHTS,
46
+ DIGEST_TIERS,
52
47
  DIGEST_TIER_MIN_CONTEXT,
53
48
  EMBEDDING_INPUT_LIMIT,
54
49
  LLM_REASONING_MODE,
55
- PROMPT_PREVIEW_CHARS,
56
50
  SUPERSESSION_MAX_TOKENS,
57
51
  estimateTokens
58
- } from "./chunk-WBLTISAK.js";
52
+ } from "./chunk-DYDBF5W6.js";
59
53
  import {
60
54
  __toESM
61
55
  } from "./chunk-PZUWP5VK.js";
@@ -69,7 +63,7 @@ import crypto from "crypto";
69
63
  // src/daemon/trace.ts
70
64
  import fs from "fs";
71
65
  import path from "path";
72
- function readLastTimestamp(filePath) {
66
+ function readLastRecord(filePath) {
73
67
  let content;
74
68
  try {
75
69
  content = fs.readFileSync(filePath, "utf-8").trim();
@@ -80,12 +74,14 @@ function readLastTimestamp(filePath) {
80
74
  const lines = content.split("\n");
81
75
  const lastLine = lines[lines.length - 1];
82
76
  try {
83
- const record = JSON.parse(lastLine);
84
- return record.timestamp ?? null;
77
+ return JSON.parse(lastLine);
85
78
  } catch {
86
79
  return null;
87
80
  }
88
81
  }
82
+ function readLastTimestamp(filePath) {
83
+ return readLastRecord(filePath)?.timestamp ?? null;
84
+ }
89
85
  function appendTraceRecord(filePath, record) {
90
86
  fs.mkdirSync(path.dirname(filePath), { recursive: true });
91
87
  fs.appendFileSync(filePath, JSON.stringify(record) + "\n", "utf-8");
@@ -103,6 +99,10 @@ var DigestEngine = class {
103
99
  log;
104
100
  lastCycleTimestampCache = void 0;
105
101
  cycleInProgress = false;
102
+ /** Whether a digest cycle is currently running. */
103
+ get isCycleInProgress() {
104
+ return this.cycleInProgress;
105
+ }
106
106
  /** Hooks that run before each digest cycle (e.g., consolidation). */
107
107
  prePassHooks = [];
108
108
  /** Hooks that run after each successful digest cycle. */
@@ -149,7 +149,7 @@ var DigestEngine = class {
149
149
  */
150
150
  getEligibleTiers() {
151
151
  const contextWindow = this.config.digest.intelligence.context_window;
152
- return this.config.digest.tiers.filter((tier) => {
152
+ return DIGEST_TIERS.filter((tier) => {
153
153
  const minContext = DIGEST_TIER_MIN_CONTEXT[tier];
154
154
  return minContext !== void 0 && minContext <= contextWindow;
155
155
  });
@@ -189,7 +189,7 @@ ${note.content}`;
189
189
  * Write a digest extract to the vault with YAML frontmatter.
190
190
  * Uses atomic write pattern (temp file + rename).
191
191
  */
192
- writeExtract(tier, body, cycleId, model, substrateCount) {
192
+ writeExtract(tier, body, cycleId, model, substrateCount, substrateNotes, tokensUsed) {
193
193
  const digestDir = path2.join(this.vaultDir, "digest");
194
194
  fs2.mkdirSync(digestDir, { recursive: true });
195
195
  const frontmatter = {
@@ -200,6 +200,8 @@ ${note.content}`;
200
200
  substrate_count: substrateCount,
201
201
  model
202
202
  };
203
+ if (substrateNotes && substrateNotes.length > 0) frontmatter.substrate_notes = substrateNotes;
204
+ if (tokensUsed !== void 0) frontmatter.tokens_used = tokensUsed;
203
205
  const fmYaml = import_yaml.default.stringify(frontmatter, {
204
206
  defaultStringType: "QUOTE_DOUBLE",
205
207
  defaultKeyType: "PLAIN"
@@ -299,8 +301,16 @@ ${body}
299
301
  substrateIndex[key].push(note.id);
300
302
  }
301
303
  }
304
+ this.log("debug", "Substrate breakdown", {
305
+ sessions: substrateIndex.sessions.length,
306
+ spores: substrateIndex.spores.length,
307
+ plans: substrateIndex.plans.length,
308
+ artifacts: substrateIndex.artifacts.length,
309
+ team: substrateIndex.team.length
310
+ });
302
311
  const cycleTimestamp = (/* @__PURE__ */ new Date()).toISOString();
303
312
  const systemPrompt = loadPrompt("digest-system");
313
+ const allSubstrateIds = substrate.map((note) => note.id);
304
314
  for (const tier of eligibleTiers) {
305
315
  const tierPrompt = loadPrompt(`digest-${tier}`);
306
316
  const previousExtract = opts?.cleanSlate ? null : this.readPreviousExtract(tier);
@@ -343,12 +353,34 @@ ${body}
343
353
  const responseTokens = estimateTokens(extractText);
344
354
  totalTokensUsed += promptTokens + responseTokens;
345
355
  this.log("info", `Tier ${tier}: completed`, { durationMs: tierDuration, responseTokens, model: response.model });
346
- this.writeExtract(tier, extractText, cycleId, response.model, substrate.length);
356
+ this.writeExtract(tier, extractText, cycleId, response.model, substrate.length, allSubstrateIds, promptTokens + responseTokens);
347
357
  tiersGenerated.push(tier);
348
358
  } catch (err) {
349
359
  this.log("warn", `Tier ${tier}: failed`, { error: err.message });
350
360
  }
351
361
  }
362
+ if (tiersGenerated.length > 0) {
363
+ const digestDir = path2.join(this.vaultDir, "digest");
364
+ for (const tier of tiersGenerated) {
365
+ const extractPath = path2.join(digestDir, `extract-${tier}.md`);
366
+ try {
367
+ const content = fs2.readFileSync(extractPath, "utf-8");
368
+ const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
369
+ if (fmMatch) {
370
+ const parsed = import_yaml.default.parse(fmMatch[1]);
371
+ parsed.tiers_generated = tiersGenerated;
372
+ const fmYaml = import_yaml.default.stringify(parsed, { defaultStringType: "QUOTE_DOUBLE", defaultKeyType: "PLAIN" }).trim();
373
+ const extractBody = content.slice(fmMatch[0].length);
374
+ const tmpPath = `${extractPath}.tmp`;
375
+ fs2.writeFileSync(tmpPath, `---
376
+ ${fmYaml}
377
+ ---${extractBody}`, "utf-8");
378
+ fs2.renameSync(tmpPath, extractPath);
379
+ }
380
+ } catch {
381
+ }
382
+ }
383
+ }
352
384
  const result = {
353
385
  cycleId,
354
386
  timestamp: cycleTimestamp,
@@ -450,6 +482,7 @@ var Metabolism = class {
450
482
  };
451
483
 
452
484
  // src/daemon/processor.ts
485
+ var EXTRACTION_PROMPT_OVERHEAD_TOKENS = 500;
453
486
  var SUMMARIZATION_FAILED_MARKER = "summarization failed";
454
487
  var ClassificationResponseSchema = external_exports.object({
455
488
  artifacts: external_exports.array(external_exports.object({
@@ -479,11 +512,26 @@ var BufferProcessor = class {
479
512
  const charBudget = available * CHARS_PER_TOKEN;
480
513
  return data.slice(0, charBudget);
481
514
  }
482
- async process(events, sessionId) {
483
- const rawPrompt = this.buildPromptForExtraction(events, sessionId);
484
- const prompt = this.truncateForContext(rawPrompt, this.extractionMaxTokens);
515
+ async process(conversationMarkdown, sessionId) {
516
+ if (!conversationMarkdown.trim()) {
517
+ return { summary: "", observations: [], degraded: false };
518
+ }
519
+ const availableTokens = this.contextWindow - EXTRACTION_PROMPT_OVERHEAD_TOKENS - this.extractionMaxTokens;
520
+ const availableChars = availableTokens * CHARS_PER_TOKEN;
521
+ let truncated = conversationMarkdown;
522
+ if (conversationMarkdown.length > availableChars) {
523
+ truncated = conversationMarkdown.slice(-availableChars);
524
+ const turnBoundary = truncated.indexOf(TURN_HEADING_PREFIX);
525
+ if (turnBoundary > 0) {
526
+ truncated = truncated.slice(turnBoundary);
527
+ }
528
+ }
529
+ const prompt = buildExtractionPrompt(sessionId, truncated, this.extractionMaxTokens);
485
530
  try {
486
- const response = await this.backend.summarize(prompt, { maxTokens: this.extractionMaxTokens, reasoning: LLM_REASONING_MODE });
531
+ const response = await this.backend.summarize(prompt, {
532
+ maxTokens: this.extractionMaxTokens,
533
+ reasoning: LLM_REASONING_MODE
534
+ });
487
535
  const parsed = extractJson(response.text);
488
536
  return {
489
537
  summary: parsed.summary,
@@ -492,16 +540,12 @@ var BufferProcessor = class {
492
540
  };
493
541
  } catch (error) {
494
542
  return {
495
- summary: `LLM processing failed for session ${sessionId}. ${events.length} events captured. Error: ${error.message}`,
543
+ summary: `LLM processing failed for session ${sessionId}. Error: ${error.message}`,
496
544
  observations: [],
497
545
  degraded: true
498
546
  };
499
547
  }
500
548
  }
501
- buildPromptForExtraction(events, sessionId) {
502
- const toolSummary = this.summarizeEvents(events);
503
- return buildExtractionPrompt(sessionId, events.length, toolSummary, this.extractionMaxTokens);
504
- }
505
549
  async summarizeSession(conversationMarkdown, sessionId, user) {
506
550
  const truncatedContent = this.truncateForContext(conversationMarkdown, this.summaryMaxTokens);
507
551
  const summaryPrompt = buildSummaryPrompt(sessionId, user ?? "unknown", truncatedContent, this.summaryMaxTokens);
@@ -533,102 +577,7 @@ var BufferProcessor = class {
533
577
  buildPromptForClassification(candidates, sessionId) {
534
578
  return buildClassificationPrompt(sessionId, candidates, this.classificationMaxTokens);
535
579
  }
536
- summarizeEvents(events) {
537
- const toolCounts = /* @__PURE__ */ new Map();
538
- const filesAccessed = /* @__PURE__ */ new Set();
539
- const prompts = [];
540
- const aiResponses = [];
541
- for (const event of events) {
542
- if (event.type === "user_prompt") {
543
- const prompt = String(event.prompt ?? "");
544
- if (prompt) prompts.push(prompt.slice(0, PROMPT_PREVIEW_CHARS));
545
- continue;
546
- }
547
- if (event.type === "ai_response") {
548
- const content = String(event.content ?? "");
549
- if (content) aiResponses.push(content.slice(0, AI_RESPONSE_PREVIEW_CHARS));
550
- continue;
551
- }
552
- const tool = String(event.tool_name ?? event.tool ?? "unknown");
553
- toolCounts.set(tool, (toolCounts.get(tool) ?? 0) + 1);
554
- const input = event.tool_input ?? event.input;
555
- if (input?.path) filesAccessed.add(String(input.path));
556
- if (input?.file_path) filesAccessed.add(String(input.file_path));
557
- if (input?.command) filesAccessed.add(`[cmd] ${String(input.command).slice(0, COMMAND_PREVIEW_CHARS)}`);
558
- }
559
- const lines = [];
560
- if (prompts.length > 0) {
561
- lines.push("### User Prompts");
562
- for (const p of prompts) {
563
- lines.push(`- "${p}"`);
564
- }
565
- }
566
- lines.push("\n### Tool Usage");
567
- for (const [tool, count] of toolCounts) {
568
- lines.push(`- ${tool}: ${count} calls`);
569
- }
570
- if (filesAccessed.size > 0) {
571
- lines.push("\n### Files Accessed");
572
- for (const file of filesAccessed) {
573
- lines.push(`- ${file}`);
574
- }
575
- }
576
- if (aiResponses.length > 0) {
577
- lines.push("\n### AI Responses");
578
- for (const r of aiResponses) {
579
- lines.push(`- "${r}"`);
580
- }
581
- }
582
- return lines.join("\n");
583
- }
584
- };
585
-
586
- // src/capture/transcript-miner.ts
587
- var TranscriptMiner = class {
588
- registry;
589
- constructor(config) {
590
- this.registry = new AgentRegistry(config?.additionalAdapters);
591
- }
592
- /**
593
- * Extract all conversation turns for a session.
594
- * Convenience wrapper — delegates to getAllTurnsWithSource.
595
- */
596
- getAllTurns(sessionId) {
597
- return this.getAllTurnsWithSource(sessionId).turns;
598
- }
599
- /**
600
- * Extract turns using the hook-provided transcript path first (fast, no scanning),
601
- * then fall back to adapter registry scanning if the path isn't provided.
602
- */
603
- getAllTurnsWithSource(sessionId, transcriptPath) {
604
- if (transcriptPath) {
605
- const result2 = this.registry.parseTurnsFromPath(transcriptPath);
606
- if (result2) return result2;
607
- }
608
- const result = this.registry.getTranscriptTurns(sessionId);
609
- if (result) return result;
610
- return { turns: [], source: "none" };
611
- }
612
580
  };
613
- function extractTurnsFromBuffer(events) {
614
- const turns = [];
615
- let current = null;
616
- for (const event of events) {
617
- const type = event.type;
618
- if (type === "user_prompt") {
619
- if (current) turns.push(current);
620
- current = {
621
- prompt: String(event.prompt ?? "").slice(0, PROMPT_PREVIEW_CHARS),
622
- toolCount: 0,
623
- timestamp: String(event.timestamp ?? (/* @__PURE__ */ new Date()).toISOString())
624
- };
625
- } else if (type === "tool_use") {
626
- if (current) current.toolCount++;
627
- }
628
- }
629
- if (current) turns.push(current);
630
- return turns;
631
- }
632
581
 
633
582
  // src/vault/observations.ts
634
583
  function writeObservationNotes(observations, sessionId, writer, index, vaultDir) {
@@ -722,6 +671,13 @@ ${note.content}`.slice(0, EMBEDDING_INPUT_LIMIT);
722
671
  onProgress
723
672
  }
724
673
  );
674
+ if (ctx.pipeline) {
675
+ for (const note of activeNotes) {
676
+ ctx.pipeline.register(note.id, note.type, note.path);
677
+ ctx.pipeline.advance(note.id, note.type, "capture", "succeeded");
678
+ ctx.pipeline.advance(note.id, note.type, "embedding", "succeeded");
679
+ }
680
+ }
725
681
  return {
726
682
  ftsCount,
727
683
  embeddedCount: result.succeeded,
@@ -733,6 +689,16 @@ async function runDigest(ctx, llmProvider, options) {
733
689
  const { config, vaultDir, index } = ctx;
734
690
  const log = ctx.log ? (level, message, data) => ctx.log(level, message, data) : () => {
735
691
  };
692
+ if (ctx.pipeline && options?.full) {
693
+ const items = ctx.pipeline.listItems({ stage: "digest", status: "succeeded" });
694
+ let reset = 0;
695
+ for (const item of items.items) {
696
+ ctx.pipeline.advance(item.id, item.item_type, "digest", "pending");
697
+ reset++;
698
+ }
699
+ log("info", `Reset ${reset} item(s) to digest:pending for full reprocessing`);
700
+ return null;
701
+ }
736
702
  const engine = new DigestEngine({
737
703
  vaultDir,
738
704
  index,
@@ -806,6 +772,17 @@ async function runCuration(deps, dryRun) {
806
772
  });
807
773
  const allSpores = index.query({ type: "spore" });
808
774
  const activeSpores = allSpores.filter((n) => isActiveSpore(n.frontmatter));
775
+ if (deps.pipeline && !dryRun) {
776
+ let enqueued = 0;
777
+ for (const spore of activeSpores) {
778
+ deps.pipeline.register(spore.id, "spore", spore.path);
779
+ deps.pipeline.advance(spore.id, "spore", "capture", "succeeded");
780
+ deps.pipeline.advance(spore.id, "spore", "consolidation", "pending");
781
+ enqueued++;
782
+ }
783
+ log("info", `Enqueued ${enqueued} spore(s) for pipeline consolidation`);
784
+ return { scanned: activeSpores.length, clustersEvaluated: 0, superseded: 0, enqueued: true };
785
+ }
809
786
  if (activeSpores.length === 0) {
810
787
  return { scanned: 0, clustersEvaluated: 0, superseded: 0 };
811
788
  }
@@ -914,7 +891,14 @@ async function runCuration(deps, dryRun) {
914
891
  function updateTitleAndSummary(body, newTitle, newNarrative) {
915
892
  let updated = body.replace(/^# .*/m, `# ${newTitle}`);
916
893
  const summaryCallout = callout("abstract", "Summary", newNarrative);
917
- updated = updated.replace(/> \[!abstract\] Summary\n(?:> .*\n?)*/m, summaryCallout + "\n");
894
+ const hasExistingCallout = /> \[!abstract\] Summary/.test(updated);
895
+ if (hasExistingCallout) {
896
+ updated = updated.replace(/> \[!abstract\] Summary\n(?:> .*\n?)*/m, summaryCallout + "\n");
897
+ } else {
898
+ updated = updated.replace(/^(# .*\n)/m, `$1
899
+ ${summaryCallout}
900
+ `);
901
+ }
918
902
  return updated;
919
903
  }
920
904
  async function runReprocess(ctx, llmProvider, embeddingProvider, options, onProgress) {
@@ -925,14 +909,6 @@ async function runReprocess(ctx, llmProvider, embeddingProvider, options, onProg
925
909
  const dateFilter = options?.date;
926
910
  const failedOnly = options?.failed ?? false;
927
911
  const skipLlm = options?.indexOnly ?? false;
928
- const effectiveLlm = skipLlm ? null : llmProvider;
929
- const processor = effectiveLlm ? new BufferProcessor(effectiveLlm, config.intelligence.llm.context_window, config.capture) : null;
930
- const writer = new VaultWriter(vaultDir);
931
- const miner = new TranscriptMiner({
932
- additionalAdapters: config.capture.transcript_paths.map(
933
- (p) => createPerProjectAdapter(p, claudeCodeAdapter.parseTurns)
934
- )
935
- });
936
912
  const sessionsDir = path3.join(vaultDir, "sessions");
937
913
  if (!fs3.existsSync(sessionsDir)) {
938
914
  return { sessionsFound: 0, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };
@@ -946,12 +922,42 @@ async function runReprocess(ctx, llmProvider, embeddingProvider, options, onProg
946
922
  if (!file.startsWith("session-") || !file.endsWith(".md")) continue;
947
923
  const sessionId = file.replace("session-", "").replace(".md", "");
948
924
  if (sessionFilter && !sessionId.includes(sessionFilter)) continue;
949
- sessionFiles.push({ relativePath: path3.join("sessions", dateDir, file), sessionId });
925
+ sessionFiles.push({ relativePath: path3.join("sessions", dateDir, file), sessionId, dateDir });
950
926
  }
951
927
  }
952
928
  if (sessionFiles.length === 0) {
953
929
  return { sessionsFound: 0, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };
954
930
  }
931
+ if (ctx.pipeline && !skipLlm) {
932
+ let enqueued = 0;
933
+ let eligibleFiles = sessionFiles;
934
+ if (failedOnly) {
935
+ eligibleFiles = sessionFiles.filter(({ relativePath }) => {
936
+ const rawContent = fs3.readFileSync(path3.join(vaultDir, relativePath), "utf-8");
937
+ return rawContent.includes(SUMMARIZATION_FAILED_MARKER);
938
+ });
939
+ }
940
+ for (const { relativePath, sessionId } of eligibleFiles) {
941
+ ctx.pipeline.register(sessionId, "session", relativePath);
942
+ ctx.pipeline.advance(sessionId, "session", "capture", "succeeded");
943
+ ctx.pipeline.advance(sessionId, "session", "extraction", "pending");
944
+ enqueued++;
945
+ }
946
+ log("info", `Enqueued ${enqueued} session(s) for pipeline reprocessing`, {
947
+ filters: { session: sessionFilter, date: dateFilter, failed: failedOnly }
948
+ });
949
+ return {
950
+ sessionsFound: sessionFiles.length,
951
+ sessionsProcessed: enqueued,
952
+ observationsExtracted: 0,
953
+ summariesRegenerated: 0,
954
+ embeddingsQueued: 0,
955
+ enqueued: true
956
+ };
957
+ }
958
+ const effectiveLlm = skipLlm ? null : llmProvider;
959
+ const processor = effectiveLlm ? new BufferProcessor(effectiveLlm, config.intelligence.llm.context_window, config.capture) : null;
960
+ const writer = new VaultWriter(vaultDir);
955
961
  const tasks = [];
956
962
  for (const { relativePath, sessionId } of sessionFiles) {
957
963
  const rawContent = fs3.readFileSync(path3.join(vaultDir, relativePath), "utf-8");
@@ -959,18 +965,10 @@ async function runReprocess(ctx, llmProvider, embeddingProvider, options, onProg
959
965
  if (failedOnly && !hasFailed) continue;
960
966
  const { data: frontmatter, content: body } = (0, import_gray_matter.default)(rawContent);
961
967
  const bare = bareSessionId(sessionId);
962
- const turnsResult = miner.getAllTurnsWithSource(bare);
963
968
  const conversationSection = extractSection(body, CONVERSATION_HEADING);
964
969
  const fmEnd = rawContent.indexOf("---", 4);
965
970
  const frontmatterBlock = rawContent.slice(0, fmEnd + 3);
966
- const batchEvents = turnsResult && turnsResult.turns.length > 0 ? turnsResult.turns.map((t) => ({
967
- type: "turn",
968
- prompt: t.prompt,
969
- tool_count: t.toolCount,
970
- response: t.aiResponse ?? "",
971
- timestamp: t.timestamp
972
- })) : null;
973
- tasks.push({ relativePath, sessionId, bare, frontmatter, frontmatterBlock, body, conversationSection, batchEvents, turnCount: turnsResult?.turns.length ?? 0, hasFailed });
971
+ tasks.push({ relativePath, sessionId, bare, frontmatter, frontmatterBlock, body, conversationSection, hasFailed });
974
972
  }
975
973
  if (tasks.length === 0) {
976
974
  return { sessionsFound: sessionFiles.length, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };
@@ -994,8 +992,8 @@ async function runReprocess(ctx, llmProvider, embeddingProvider, options, onProg
994
992
  tasks,
995
993
  async (task) => {
996
994
  let obs = 0;
997
- if (processor && task.batchEvents) {
998
- const result = await processor.process(task.batchEvents, task.bare);
995
+ if (processor && task.conversationSection.trim()) {
996
+ const result = await processor.process(task.conversationSection, task.bare);
999
997
  if (result.observations.length > 0) {
1000
998
  writeObservationNotes(result.observations, task.bare, writer, index, vaultDir);
1001
999
  obs = result.observations.length;
@@ -1070,18 +1068,18 @@ ${task.frontmatter.summary ?? ""}`.slice(0, EMBEDDING_INPUT_LIMIT);
1070
1068
  }
1071
1069
 
1072
1070
  export {
1071
+ readLastRecord,
1073
1072
  readLastTimestamp,
1074
1073
  appendTraceRecord,
1075
1074
  DigestEngine,
1076
1075
  Metabolism,
1077
1076
  SUMMARIZATION_FAILED_MARKER,
1078
1077
  BufferProcessor,
1079
- TranscriptMiner,
1080
- extractTurnsFromBuffer,
1081
1078
  writeObservationNotes,
1082
1079
  runRebuild,
1083
1080
  runDigest,
1084
1081
  runCuration,
1082
+ updateTitleAndSummary,
1085
1083
  runReprocess
1086
1084
  };
1087
- //# sourceMappingURL=chunk-PQWQC3RF.js.map
1085
+ //# sourceMappingURL=chunk-TK7A4RX7.js.map