@goondocks/myco 0.6.3 → 0.6.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. package/.claude-plugin/marketplace.json +1 -1
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/dist/{chunk-CPVXNRGW.js → chunk-4B5RO2YV.js} +4 -4
  4. package/dist/{chunk-25FY74AP.js → chunk-4DYD4HHG.js} +25 -7
  5. package/dist/chunk-4DYD4HHG.js.map +1 -0
  6. package/dist/{chunk-DBMHUMG3.js → chunk-54WVLTKD.js} +3 -3
  7. package/dist/{chunk-WU4PCNIK.js → chunk-5LMRZDH3.js} +2 -2
  8. package/dist/{chunk-CQ4RKK67.js → chunk-AHZN4Z34.js} +2 -2
  9. package/dist/{chunk-WBLTISAK.js → chunk-DYDBF5W6.js} +36 -6
  10. package/dist/chunk-DYDBF5W6.js.map +1 -0
  11. package/dist/{chunk-JSK7L46L.js → chunk-ERG2IEWX.js} +22 -4
  12. package/dist/{chunk-JSK7L46L.js.map → chunk-ERG2IEWX.js.map} +1 -1
  13. package/dist/{chunk-RNWALAFP.js → chunk-F7GAYVWF.js} +2 -2
  14. package/dist/chunk-F7GAYVWF.js.map +1 -0
  15. package/dist/{chunk-H7PRCVGQ.js → chunk-F7PGDD2X.js} +2 -2
  16. package/dist/{chunk-RY76WEN3.js → chunk-GENQ5QGP.js} +2 -2
  17. package/dist/{chunk-YG6MLLGL.js → chunk-HYVT345Y.js} +2 -2
  18. package/dist/{chunk-LDKXXKF6.js → chunk-LEK6DEAE.js} +4 -4
  19. package/dist/{chunk-IWBWZQK6.js → chunk-MDLSAFPP.js} +2 -2
  20. package/dist/{chunk-QLUE3BUL.js → chunk-O6TBHGVO.js} +9 -2
  21. package/dist/chunk-O6TBHGVO.js.map +1 -0
  22. package/dist/{chunk-RXJHB7W4.js → chunk-OEGZ5YTJ.js} +2 -2
  23. package/dist/{chunk-XNAM6Z4O.js → chunk-P723N2LP.js} +2 -2
  24. package/dist/{chunk-PQWQC3RF.js → chunk-TK7A4RX7.js} +144 -146
  25. package/dist/chunk-TK7A4RX7.js.map +1 -0
  26. package/dist/{chunk-ALBVNGCF.js → chunk-V6BJVYNH.js} +55 -44
  27. package/dist/{chunk-ALBVNGCF.js.map → chunk-V6BJVYNH.js.map} +1 -1
  28. package/dist/{chunk-CK24O5YQ.js → chunk-XH34FX4C.js} +2 -2
  29. package/dist/{chunk-4WL5X7VS.js → chunk-YRIIBPJD.js} +3 -3
  30. package/dist/{cli-EGWAINIE.js → cli-OJYHLO4Y.js} +21 -21
  31. package/dist/{client-FDKJ4BY7.js → client-SS3C5MF6.js} +5 -5
  32. package/dist/{config-HDUFDOQN.js → config-IBS6KOLQ.js} +3 -3
  33. package/dist/{curate-OHIJFBYF.js → curate-4CKEMOPV.js} +9 -10
  34. package/dist/{curate-OHIJFBYF.js.map → curate-4CKEMOPV.js.map} +1 -1
  35. package/dist/{detect-providers-4U3ZPW5G.js → detect-providers-LFIVJYQO.js} +3 -3
  36. package/dist/{digest-I2XYCK2M.js → digest-ZLARHLLY.js} +11 -11
  37. package/dist/{init-ZO2XQT6U.js → init-3LVKVQ4L.js} +8 -8
  38. package/dist/{logs-IENORIYR.js → logs-6CWVP574.js} +3 -3
  39. package/dist/{main-XZ6X4BUX.js → main-RB727YRP.js} +2109 -390
  40. package/dist/main-RB727YRP.js.map +1 -0
  41. package/dist/{rebuild-NAH4EW5B.js → rebuild-QWVVCBCZ.js} +9 -10
  42. package/dist/{rebuild-NAH4EW5B.js.map → rebuild-QWVVCBCZ.js.map} +1 -1
  43. package/dist/{reprocess-6FOP37XS.js → reprocess-YG3WLUI2.js} +11 -11
  44. package/dist/{restart-WSA4JSE3.js → restart-UIP7US4U.js} +6 -6
  45. package/dist/{search-QXJQUB35.js → search-BQLBW5CS.js} +6 -6
  46. package/dist/{server-VXN3CJ4Y.js → server-43KSJ65Q.js} +80 -32
  47. package/dist/{server-VXN3CJ4Y.js.map → server-43KSJ65Q.js.map} +1 -1
  48. package/dist/{session-start-KQ4KCQMZ.js → session-start-6SHGT2AW.js} +9 -9
  49. package/dist/setup-digest-X735EZSD.js +15 -0
  50. package/dist/setup-llm-QBSTQO7N.js +15 -0
  51. package/dist/src/cli.js +4 -4
  52. package/dist/src/daemon/main.js +4 -4
  53. package/dist/src/hooks/post-tool-use.js +5 -5
  54. package/dist/src/hooks/session-end.js +5 -5
  55. package/dist/src/hooks/session-start.js +4 -4
  56. package/dist/src/hooks/stop.js +7 -7
  57. package/dist/src/hooks/user-prompt-submit.js +5 -5
  58. package/dist/src/mcp/server.js +4 -4
  59. package/dist/src/prompts/extraction.md +4 -4
  60. package/dist/{stats-43OESUEB.js → stats-QBLIEFWL.js} +6 -6
  61. package/dist/ui/assets/index-CjWGVHhF.css +1 -0
  62. package/dist/ui/assets/index-Cq-H7wgE.js +369 -0
  63. package/dist/ui/index.html +2 -2
  64. package/dist/{verify-IIAHBAAU.js → verify-X272WGBD.js} +6 -6
  65. package/dist/{version-NKOECSVH.js → version-XE4GYTBV.js} +4 -4
  66. package/package.json +1 -1
  67. package/dist/chunk-25FY74AP.js.map +0 -1
  68. package/dist/chunk-PQWQC3RF.js.map +0 -1
  69. package/dist/chunk-QLUE3BUL.js.map +0 -1
  70. package/dist/chunk-RNWALAFP.js.map +0 -1
  71. package/dist/chunk-WBLTISAK.js.map +0 -1
  72. package/dist/main-XZ6X4BUX.js.map +0 -1
  73. package/dist/setup-digest-QNCM3PNQ.js +0 -15
  74. package/dist/setup-llm-EAOIUSPJ.js +0 -15
  75. package/dist/ui/assets/index-Bk4X_8-Z.css +0 -1
  76. package/dist/ui/assets/index-D3SY7ZHY.js +0 -299
  77. /package/dist/{chunk-CPVXNRGW.js.map → chunk-4B5RO2YV.js.map} +0 -0
  78. /package/dist/{chunk-DBMHUMG3.js.map → chunk-54WVLTKD.js.map} +0 -0
  79. /package/dist/{chunk-WU4PCNIK.js.map → chunk-5LMRZDH3.js.map} +0 -0
  80. /package/dist/{chunk-CQ4RKK67.js.map → chunk-AHZN4Z34.js.map} +0 -0
  81. /package/dist/{chunk-H7PRCVGQ.js.map → chunk-F7PGDD2X.js.map} +0 -0
  82. /package/dist/{chunk-RY76WEN3.js.map → chunk-GENQ5QGP.js.map} +0 -0
  83. /package/dist/{chunk-YG6MLLGL.js.map → chunk-HYVT345Y.js.map} +0 -0
  84. /package/dist/{chunk-LDKXXKF6.js.map → chunk-LEK6DEAE.js.map} +0 -0
  85. /package/dist/{chunk-IWBWZQK6.js.map → chunk-MDLSAFPP.js.map} +0 -0
  86. /package/dist/{chunk-RXJHB7W4.js.map → chunk-OEGZ5YTJ.js.map} +0 -0
  87. /package/dist/{chunk-XNAM6Z4O.js.map → chunk-P723N2LP.js.map} +0 -0
  88. /package/dist/{chunk-CK24O5YQ.js.map → chunk-XH34FX4C.js.map} +0 -0
  89. /package/dist/{chunk-4WL5X7VS.js.map → chunk-YRIIBPJD.js.map} +0 -0
  90. /package/dist/{cli-EGWAINIE.js.map → cli-OJYHLO4Y.js.map} +0 -0
  91. /package/dist/{client-FDKJ4BY7.js.map → client-SS3C5MF6.js.map} +0 -0
  92. /package/dist/{config-HDUFDOQN.js.map → config-IBS6KOLQ.js.map} +0 -0
  93. /package/dist/{detect-providers-4U3ZPW5G.js.map → detect-providers-LFIVJYQO.js.map} +0 -0
  94. /package/dist/{digest-I2XYCK2M.js.map → digest-ZLARHLLY.js.map} +0 -0
  95. /package/dist/{init-ZO2XQT6U.js.map → init-3LVKVQ4L.js.map} +0 -0
  96. /package/dist/{logs-IENORIYR.js.map → logs-6CWVP574.js.map} +0 -0
  97. /package/dist/{reprocess-6FOP37XS.js.map → reprocess-YG3WLUI2.js.map} +0 -0
  98. /package/dist/{restart-WSA4JSE3.js.map → restart-UIP7US4U.js.map} +0 -0
  99. /package/dist/{search-QXJQUB35.js.map → search-BQLBW5CS.js.map} +0 -0
  100. /package/dist/{session-start-KQ4KCQMZ.js.map → session-start-6SHGT2AW.js.map} +0 -0
  101. /package/dist/{setup-digest-QNCM3PNQ.js.map → setup-digest-X735EZSD.js.map} +0 -0
  102. /package/dist/{setup-llm-EAOIUSPJ.js.map → setup-llm-QBSTQO7N.js.map} +0 -0
  103. /package/dist/{stats-43OESUEB.js.map → stats-QBLIEFWL.js.map} +0 -0
  104. /package/dist/{verify-IIAHBAAU.js.map → verify-X272WGBD.js.map} +0 -0
  105. /package/dist/{version-NKOECSVH.js.map → version-XE4GYTBV.js.map} +0 -0
@@ -1,29 +1,29 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
2
  import {
3
3
  gatherStats
4
- } from "./chunk-WU4PCNIK.js";
4
+ } from "./chunk-5LMRZDH3.js";
5
5
  import {
6
6
  BufferProcessor,
7
7
  DigestEngine,
8
8
  Metabolism,
9
9
  SUMMARIZATION_FAILED_MARKER,
10
- TranscriptMiner,
11
10
  appendTraceRecord,
12
- extractTurnsFromBuffer,
11
+ readLastRecord,
13
12
  readLastTimestamp,
14
13
  runCuration,
15
14
  runDigest,
16
15
  runRebuild,
17
16
  runReprocess,
17
+ updateTitleAndSummary,
18
18
  writeObservationNotes
19
- } from "./chunk-PQWQC3RF.js";
19
+ } from "./chunk-TK7A4RX7.js";
20
20
  import {
21
21
  consolidateSpores,
22
22
  handleMycoContext
23
- } from "./chunk-LDKXXKF6.js";
23
+ } from "./chunk-LEK6DEAE.js";
24
24
  import {
25
25
  DaemonLogger
26
- } from "./chunk-QLUE3BUL.js";
26
+ } from "./chunk-O6TBHGVO.js";
27
27
  import {
28
28
  VectorIndex
29
29
  } from "./chunk-4RMSHZE4.js";
@@ -31,10 +31,9 @@ import {
31
31
  CONVERSATION_HEADING,
32
32
  VaultWriter,
33
33
  bareSessionId,
34
- buildSimilarityPrompt,
35
34
  checkSupersession,
36
35
  extractJson,
37
- extractNumber,
36
+ extractSection,
38
37
  formatNotesForPrompt,
39
38
  formatSessionBody,
40
39
  indexNote,
@@ -44,51 +43,54 @@ import {
44
43
  sessionNoteId,
45
44
  sessionRelativePath,
46
45
  sessionWikilink,
47
- stripReasoningTokens
48
- } from "./chunk-ALBVNGCF.js";
46
+ stripReasoningTokens,
47
+ walkMarkdownFiles
48
+ } from "./chunk-V6BJVYNH.js";
49
49
  import {
50
50
  generateEmbedding
51
51
  } from "./chunk-RGVBGTD6.js";
52
52
  import {
53
53
  createEmbeddingProvider,
54
54
  createLlmProvider
55
- } from "./chunk-DBMHUMG3.js";
56
- import "./chunk-RY76WEN3.js";
55
+ } from "./chunk-54WVLTKD.js";
56
+ import {
57
+ stripFrontmatter
58
+ } from "./chunk-GENQ5QGP.js";
57
59
  import {
58
60
  initFts
59
61
  } from "./chunk-6FQISQNA.js";
60
62
  import {
61
63
  MycoIndex
62
64
  } from "./chunk-TWSTAVLO.js";
63
- import "./chunk-4WL5X7VS.js";
65
+ import "./chunk-YRIIBPJD.js";
64
66
  import "./chunk-SAKJMNSR.js";
65
67
  import {
66
68
  LmStudioBackend,
67
69
  OllamaBackend
68
- } from "./chunk-25FY74AP.js";
70
+ } from "./chunk-4DYD4HHG.js";
69
71
  import {
70
72
  CONFIG_FILENAME,
71
73
  loadConfig,
72
74
  saveConfig
73
- } from "./chunk-YG6MLLGL.js";
75
+ } from "./chunk-HYVT345Y.js";
74
76
  import {
75
77
  MycoConfigSchema,
76
78
  external_exports,
77
79
  require_dist
78
- } from "./chunk-JSK7L46L.js";
80
+ } from "./chunk-ERG2IEWX.js";
79
81
  import {
80
82
  EventBuffer
81
83
  } from "./chunk-HIN3UVOG.js";
82
84
  import {
83
85
  getPluginVersion
84
- } from "./chunk-CK24O5YQ.js";
86
+ } from "./chunk-XH34FX4C.js";
85
87
  import {
88
+ AgentRegistry,
86
89
  claudeCodeAdapter,
87
90
  createPerProjectAdapter,
88
91
  extensionForMimeType
89
- } from "./chunk-RNWALAFP.js";
92
+ } from "./chunk-F7GAYVWF.js";
90
93
  import {
91
- CANDIDATE_CONTENT_PREVIEW,
92
94
  CONSOLIDATION_MAX_TOKENS,
93
95
  CONSOLIDATION_MIN_CLUSTER_SIZE,
94
96
  CONSOLIDATION_VECTOR_FETCH_LIMIT,
@@ -98,16 +100,29 @@ import {
98
100
  DAEMON_EVICT_TIMEOUT_MS,
99
101
  EMBEDDING_INPUT_LIMIT,
100
102
  FILE_WATCH_STABILITY_MS,
103
+ ITEM_STAGE_MAP,
101
104
  LINEAGE_RECENT_SESSIONS_LIMIT,
102
105
  LLM_REASONING_MODE,
106
+ LOG_MESSAGE_PREVIEW_CHARS,
107
+ LOG_PROMPT_PREVIEW_CHARS,
103
108
  MAX_SLUG_LENGTH,
109
+ MS_PER_DAY,
110
+ PIPELINE_BACKOFF_MULTIPLIER,
111
+ PIPELINE_ITEMS_DEFAULT_LIMIT,
112
+ PIPELINE_PARSE_MAX_RETRIES,
113
+ PIPELINE_PROVIDER_ROLES,
114
+ PIPELINE_STAGES,
115
+ PIPELINE_TICK_STAGES,
104
116
  PROMPT_CONTEXT_MAX_SPORES,
105
117
  PROMPT_CONTEXT_MIN_LENGTH,
106
118
  PROMPT_CONTEXT_MIN_SIMILARITY,
119
+ PROMPT_PREVIEW_CHARS,
107
120
  RELATED_SPORES_LIMIT,
108
121
  SESSION_CONTEXT_MAX_PLANS,
109
- STALE_BUFFER_MAX_AGE_MS
110
- } from "./chunk-WBLTISAK.js";
122
+ STAGE_PROVIDER_MAP,
123
+ STALE_BUFFER_MAX_AGE_MS,
124
+ estimateTokens
125
+ } from "./chunk-DYDBF5W6.js";
111
126
  import {
112
127
  __toESM
113
128
  } from "./chunk-PZUWP5VK.js";
@@ -502,10 +517,6 @@ import fs3 from "fs";
502
517
  import path3 from "path";
503
518
  var LINEAGE_IMMEDIATE_GAP_SECONDS = 5;
504
519
  var LINEAGE_FALLBACK_MAX_HOURS = 24;
505
- var LINEAGE_SIMILARITY_THRESHOLD = 0.7;
506
- var LINEAGE_SIMILARITY_HIGH_CONFIDENCE = 0.9;
507
- var LINEAGE_SIMILARITY_CANDIDATES = 3;
508
- var LINEAGE_SIMILARITY_MAX_TOKENS = 8;
509
520
  var MS_PER_SECOND = 1e3;
510
521
  var MS_PER_HOUR = 36e5;
511
522
  var LineageGraph = class {
@@ -697,7 +708,7 @@ var ReaddirpStream = class extends Readable {
697
708
  this._directoryFilter = normalizeFilter(opts.directoryFilter);
698
709
  const statMethod = opts.lstat ? lstat : stat;
699
710
  if (wantBigintFsStats) {
700
- this._stat = (path10) => statMethod(path10, { bigint: true });
711
+ this._stat = (path12) => statMethod(path12, { bigint: true });
701
712
  } else {
702
713
  this._stat = statMethod;
703
714
  }
@@ -722,8 +733,8 @@ var ReaddirpStream = class extends Readable {
722
733
  const par = this.parent;
723
734
  const fil = par && par.files;
724
735
  if (fil && fil.length > 0) {
725
- const { path: path10, depth } = par;
726
- const slice = fil.splice(0, batch).map((dirent) => this._formatEntry(dirent, path10));
736
+ const { path: path12, depth } = par;
737
+ const slice = fil.splice(0, batch).map((dirent) => this._formatEntry(dirent, path12));
727
738
  const awaited = await Promise.all(slice);
728
739
  for (const entry of awaited) {
729
740
  if (!entry)
@@ -763,20 +774,20 @@ var ReaddirpStream = class extends Readable {
763
774
  this.reading = false;
764
775
  }
765
776
  }
766
- async _exploreDir(path10, depth) {
777
+ async _exploreDir(path12, depth) {
767
778
  let files;
768
779
  try {
769
- files = await readdir(path10, this._rdOptions);
780
+ files = await readdir(path12, this._rdOptions);
770
781
  } catch (error) {
771
782
  this._onError(error);
772
783
  }
773
- return { files, depth, path: path10 };
784
+ return { files, depth, path: path12 };
774
785
  }
775
- async _formatEntry(dirent, path10) {
786
+ async _formatEntry(dirent, path12) {
776
787
  let entry;
777
788
  const basename3 = this._isDirent ? dirent.name : dirent;
778
789
  try {
779
- const fullPath = presolve(pjoin(path10, basename3));
790
+ const fullPath = presolve(pjoin(path12, basename3));
780
791
  entry = { path: prelative(this._root, fullPath), fullPath, basename: basename3 };
781
792
  entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
782
793
  } catch (err) {
@@ -1176,16 +1187,16 @@ var delFromSet = (main2, prop, item) => {
1176
1187
  };
1177
1188
  var isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val;
1178
1189
  var FsWatchInstances = /* @__PURE__ */ new Map();
1179
- function createFsWatchInstance(path10, options, listener, errHandler, emitRaw) {
1190
+ function createFsWatchInstance(path12, options, listener, errHandler, emitRaw) {
1180
1191
  const handleEvent = (rawEvent, evPath) => {
1181
- listener(path10);
1182
- emitRaw(rawEvent, evPath, { watchedPath: path10 });
1183
- if (evPath && path10 !== evPath) {
1184
- fsWatchBroadcast(sp.resolve(path10, evPath), KEY_LISTENERS, sp.join(path10, evPath));
1192
+ listener(path12);
1193
+ emitRaw(rawEvent, evPath, { watchedPath: path12 });
1194
+ if (evPath && path12 !== evPath) {
1195
+ fsWatchBroadcast(sp.resolve(path12, evPath), KEY_LISTENERS, sp.join(path12, evPath));
1185
1196
  }
1186
1197
  };
1187
1198
  try {
1188
- return fs_watch(path10, {
1199
+ return fs_watch(path12, {
1189
1200
  persistent: options.persistent
1190
1201
  }, handleEvent);
1191
1202
  } catch (error) {
@@ -1201,12 +1212,12 @@ var fsWatchBroadcast = (fullPath, listenerType, val1, val2, val3) => {
1201
1212
  listener(val1, val2, val3);
1202
1213
  });
1203
1214
  };
1204
- var setFsWatchListener = (path10, fullPath, options, handlers) => {
1215
+ var setFsWatchListener = (path12, fullPath, options, handlers) => {
1205
1216
  const { listener, errHandler, rawEmitter } = handlers;
1206
1217
  let cont = FsWatchInstances.get(fullPath);
1207
1218
  let watcher;
1208
1219
  if (!options.persistent) {
1209
- watcher = createFsWatchInstance(path10, options, listener, errHandler, rawEmitter);
1220
+ watcher = createFsWatchInstance(path12, options, listener, errHandler, rawEmitter);
1210
1221
  if (!watcher)
1211
1222
  return;
1212
1223
  return watcher.close.bind(watcher);
@@ -1217,7 +1228,7 @@ var setFsWatchListener = (path10, fullPath, options, handlers) => {
1217
1228
  addAndConvert(cont, KEY_RAW, rawEmitter);
1218
1229
  } else {
1219
1230
  watcher = createFsWatchInstance(
1220
- path10,
1231
+ path12,
1221
1232
  options,
1222
1233
  fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS),
1223
1234
  errHandler,
@@ -1232,7 +1243,7 @@ var setFsWatchListener = (path10, fullPath, options, handlers) => {
1232
1243
  cont.watcherUnusable = true;
1233
1244
  if (isWindows && error.code === "EPERM") {
1234
1245
  try {
1235
- const fd = await open(path10, "r");
1246
+ const fd = await open(path12, "r");
1236
1247
  await fd.close();
1237
1248
  broadcastErr(error);
1238
1249
  } catch (err) {
@@ -1263,7 +1274,7 @@ var setFsWatchListener = (path10, fullPath, options, handlers) => {
1263
1274
  };
1264
1275
  };
1265
1276
  var FsWatchFileInstances = /* @__PURE__ */ new Map();
1266
- var setFsWatchFileListener = (path10, fullPath, options, handlers) => {
1277
+ var setFsWatchFileListener = (path12, fullPath, options, handlers) => {
1267
1278
  const { listener, rawEmitter } = handlers;
1268
1279
  let cont = FsWatchFileInstances.get(fullPath);
1269
1280
  const copts = cont && cont.options;
@@ -1285,7 +1296,7 @@ var setFsWatchFileListener = (path10, fullPath, options, handlers) => {
1285
1296
  });
1286
1297
  const currmtime = curr.mtimeMs;
1287
1298
  if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
1288
- foreach(cont.listeners, (listener2) => listener2(path10, curr));
1299
+ foreach(cont.listeners, (listener2) => listener2(path12, curr));
1289
1300
  }
1290
1301
  })
1291
1302
  };
@@ -1315,13 +1326,13 @@ var NodeFsHandler = class {
1315
1326
  * @param listener on fs change
1316
1327
  * @returns closer for the watcher instance
1317
1328
  */
1318
- _watchWithNodeFs(path10, listener) {
1329
+ _watchWithNodeFs(path12, listener) {
1319
1330
  const opts = this.fsw.options;
1320
- const directory = sp.dirname(path10);
1321
- const basename3 = sp.basename(path10);
1331
+ const directory = sp.dirname(path12);
1332
+ const basename3 = sp.basename(path12);
1322
1333
  const parent = this.fsw._getWatchedDir(directory);
1323
1334
  parent.add(basename3);
1324
- const absolutePath = sp.resolve(path10);
1335
+ const absolutePath = sp.resolve(path12);
1325
1336
  const options = {
1326
1337
  persistent: opts.persistent
1327
1338
  };
@@ -1331,12 +1342,12 @@ var NodeFsHandler = class {
1331
1342
  if (opts.usePolling) {
1332
1343
  const enableBin = opts.interval !== opts.binaryInterval;
1333
1344
  options.interval = enableBin && isBinaryPath(basename3) ? opts.binaryInterval : opts.interval;
1334
- closer = setFsWatchFileListener(path10, absolutePath, options, {
1345
+ closer = setFsWatchFileListener(path12, absolutePath, options, {
1335
1346
  listener,
1336
1347
  rawEmitter: this.fsw._emitRaw
1337
1348
  });
1338
1349
  } else {
1339
- closer = setFsWatchListener(path10, absolutePath, options, {
1350
+ closer = setFsWatchListener(path12, absolutePath, options, {
1340
1351
  listener,
1341
1352
  errHandler: this._boundHandleError,
1342
1353
  rawEmitter: this.fsw._emitRaw
@@ -1358,7 +1369,7 @@ var NodeFsHandler = class {
1358
1369
  let prevStats = stats;
1359
1370
  if (parent.has(basename3))
1360
1371
  return;
1361
- const listener = async (path10, newStats) => {
1372
+ const listener = async (path12, newStats) => {
1362
1373
  if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5))
1363
1374
  return;
1364
1375
  if (!newStats || newStats.mtimeMs === 0) {
@@ -1372,11 +1383,11 @@ var NodeFsHandler = class {
1372
1383
  this.fsw._emit(EV.CHANGE, file, newStats2);
1373
1384
  }
1374
1385
  if ((isMacos || isLinux || isFreeBSD) && prevStats.ino !== newStats2.ino) {
1375
- this.fsw._closeFile(path10);
1386
+ this.fsw._closeFile(path12);
1376
1387
  prevStats = newStats2;
1377
1388
  const closer2 = this._watchWithNodeFs(file, listener);
1378
1389
  if (closer2)
1379
- this.fsw._addPathCloser(path10, closer2);
1390
+ this.fsw._addPathCloser(path12, closer2);
1380
1391
  } else {
1381
1392
  prevStats = newStats2;
1382
1393
  }
@@ -1408,7 +1419,7 @@ var NodeFsHandler = class {
1408
1419
  * @param item basename of this item
1409
1420
  * @returns true if no more processing is needed for this entry.
1410
1421
  */
1411
- async _handleSymlink(entry, directory, path10, item) {
1422
+ async _handleSymlink(entry, directory, path12, item) {
1412
1423
  if (this.fsw.closed) {
1413
1424
  return;
1414
1425
  }
@@ -1418,7 +1429,7 @@ var NodeFsHandler = class {
1418
1429
  this.fsw._incrReadyCount();
1419
1430
  let linkPath;
1420
1431
  try {
1421
- linkPath = await fsrealpath(path10);
1432
+ linkPath = await fsrealpath(path12);
1422
1433
  } catch (e) {
1423
1434
  this.fsw._emitReady();
1424
1435
  return true;
@@ -1428,12 +1439,12 @@ var NodeFsHandler = class {
1428
1439
  if (dir.has(item)) {
1429
1440
  if (this.fsw._symlinkPaths.get(full) !== linkPath) {
1430
1441
  this.fsw._symlinkPaths.set(full, linkPath);
1431
- this.fsw._emit(EV.CHANGE, path10, entry.stats);
1442
+ this.fsw._emit(EV.CHANGE, path12, entry.stats);
1432
1443
  }
1433
1444
  } else {
1434
1445
  dir.add(item);
1435
1446
  this.fsw._symlinkPaths.set(full, linkPath);
1436
- this.fsw._emit(EV.ADD, path10, entry.stats);
1447
+ this.fsw._emit(EV.ADD, path12, entry.stats);
1437
1448
  }
1438
1449
  this.fsw._emitReady();
1439
1450
  return true;
@@ -1463,9 +1474,9 @@ var NodeFsHandler = class {
1463
1474
  return;
1464
1475
  }
1465
1476
  const item = entry.path;
1466
- let path10 = sp.join(directory, item);
1477
+ let path12 = sp.join(directory, item);
1467
1478
  current.add(item);
1468
- if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path10, item)) {
1479
+ if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path12, item)) {
1469
1480
  return;
1470
1481
  }
1471
1482
  if (this.fsw.closed) {
@@ -1474,8 +1485,8 @@ var NodeFsHandler = class {
1474
1485
  }
1475
1486
  if (item === target || !target && !previous.has(item)) {
1476
1487
  this.fsw._incrReadyCount();
1477
- path10 = sp.join(dir, sp.relative(dir, path10));
1478
- this._addToNodeFs(path10, initialAdd, wh, depth + 1);
1488
+ path12 = sp.join(dir, sp.relative(dir, path12));
1489
+ this._addToNodeFs(path12, initialAdd, wh, depth + 1);
1479
1490
  }
1480
1491
  }).on(EV.ERROR, this._boundHandleError);
1481
1492
  return new Promise((resolve3, reject) => {
@@ -1544,13 +1555,13 @@ var NodeFsHandler = class {
1544
1555
  * @param depth Child path actually targeted for watch
1545
1556
  * @param target Child path actually targeted for watch
1546
1557
  */
1547
- async _addToNodeFs(path10, initialAdd, priorWh, depth, target) {
1558
+ async _addToNodeFs(path12, initialAdd, priorWh, depth, target) {
1548
1559
  const ready = this.fsw._emitReady;
1549
- if (this.fsw._isIgnored(path10) || this.fsw.closed) {
1560
+ if (this.fsw._isIgnored(path12) || this.fsw.closed) {
1550
1561
  ready();
1551
1562
  return false;
1552
1563
  }
1553
- const wh = this.fsw._getWatchHelpers(path10);
1564
+ const wh = this.fsw._getWatchHelpers(path12);
1554
1565
  if (priorWh) {
1555
1566
  wh.filterPath = (entry) => priorWh.filterPath(entry);
1556
1567
  wh.filterDir = (entry) => priorWh.filterDir(entry);
@@ -1566,8 +1577,8 @@ var NodeFsHandler = class {
1566
1577
  const follow = this.fsw.options.followSymlinks;
1567
1578
  let closer;
1568
1579
  if (stats.isDirectory()) {
1569
- const absPath = sp.resolve(path10);
1570
- const targetPath = follow ? await fsrealpath(path10) : path10;
1580
+ const absPath = sp.resolve(path12);
1581
+ const targetPath = follow ? await fsrealpath(path12) : path12;
1571
1582
  if (this.fsw.closed)
1572
1583
  return;
1573
1584
  closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
@@ -1577,29 +1588,29 @@ var NodeFsHandler = class {
1577
1588
  this.fsw._symlinkPaths.set(absPath, targetPath);
1578
1589
  }
1579
1590
  } else if (stats.isSymbolicLink()) {
1580
- const targetPath = follow ? await fsrealpath(path10) : path10;
1591
+ const targetPath = follow ? await fsrealpath(path12) : path12;
1581
1592
  if (this.fsw.closed)
1582
1593
  return;
1583
1594
  const parent = sp.dirname(wh.watchPath);
1584
1595
  this.fsw._getWatchedDir(parent).add(wh.watchPath);
1585
1596
  this.fsw._emit(EV.ADD, wh.watchPath, stats);
1586
- closer = await this._handleDir(parent, stats, initialAdd, depth, path10, wh, targetPath);
1597
+ closer = await this._handleDir(parent, stats, initialAdd, depth, path12, wh, targetPath);
1587
1598
  if (this.fsw.closed)
1588
1599
  return;
1589
1600
  if (targetPath !== void 0) {
1590
- this.fsw._symlinkPaths.set(sp.resolve(path10), targetPath);
1601
+ this.fsw._symlinkPaths.set(sp.resolve(path12), targetPath);
1591
1602
  }
1592
1603
  } else {
1593
1604
  closer = this._handleFile(wh.watchPath, stats, initialAdd);
1594
1605
  }
1595
1606
  ready();
1596
1607
  if (closer)
1597
- this.fsw._addPathCloser(path10, closer);
1608
+ this.fsw._addPathCloser(path12, closer);
1598
1609
  return false;
1599
1610
  } catch (error) {
1600
1611
  if (this.fsw._handleError(error)) {
1601
1612
  ready();
1602
- return path10;
1613
+ return path12;
1603
1614
  }
1604
1615
  }
1605
1616
  }
@@ -1642,24 +1653,24 @@ function createPattern(matcher) {
1642
1653
  }
1643
1654
  return () => false;
1644
1655
  }
1645
- function normalizePath(path10) {
1646
- if (typeof path10 !== "string")
1656
+ function normalizePath(path12) {
1657
+ if (typeof path12 !== "string")
1647
1658
  throw new Error("string expected");
1648
- path10 = sp2.normalize(path10);
1649
- path10 = path10.replace(/\\/g, "/");
1659
+ path12 = sp2.normalize(path12);
1660
+ path12 = path12.replace(/\\/g, "/");
1650
1661
  let prepend = false;
1651
- if (path10.startsWith("//"))
1662
+ if (path12.startsWith("//"))
1652
1663
  prepend = true;
1653
- path10 = path10.replace(DOUBLE_SLASH_RE, "/");
1664
+ path12 = path12.replace(DOUBLE_SLASH_RE, "/");
1654
1665
  if (prepend)
1655
- path10 = "/" + path10;
1656
- return path10;
1666
+ path12 = "/" + path12;
1667
+ return path12;
1657
1668
  }
1658
1669
  function matchPatterns(patterns, testString, stats) {
1659
- const path10 = normalizePath(testString);
1670
+ const path12 = normalizePath(testString);
1660
1671
  for (let index = 0; index < patterns.length; index++) {
1661
1672
  const pattern = patterns[index];
1662
- if (pattern(path10, stats)) {
1673
+ if (pattern(path12, stats)) {
1663
1674
  return true;
1664
1675
  }
1665
1676
  }
@@ -1697,19 +1708,19 @@ var toUnix = (string) => {
1697
1708
  }
1698
1709
  return str;
1699
1710
  };
1700
- var normalizePathToUnix = (path10) => toUnix(sp2.normalize(toUnix(path10)));
1701
- var normalizeIgnored = (cwd = "") => (path10) => {
1702
- if (typeof path10 === "string") {
1703
- return normalizePathToUnix(sp2.isAbsolute(path10) ? path10 : sp2.join(cwd, path10));
1711
+ var normalizePathToUnix = (path12) => toUnix(sp2.normalize(toUnix(path12)));
1712
+ var normalizeIgnored = (cwd = "") => (path12) => {
1713
+ if (typeof path12 === "string") {
1714
+ return normalizePathToUnix(sp2.isAbsolute(path12) ? path12 : sp2.join(cwd, path12));
1704
1715
  } else {
1705
- return path10;
1716
+ return path12;
1706
1717
  }
1707
1718
  };
1708
- var getAbsolutePath = (path10, cwd) => {
1709
- if (sp2.isAbsolute(path10)) {
1710
- return path10;
1719
+ var getAbsolutePath = (path12, cwd) => {
1720
+ if (sp2.isAbsolute(path12)) {
1721
+ return path12;
1711
1722
  }
1712
- return sp2.join(cwd, path10);
1723
+ return sp2.join(cwd, path12);
1713
1724
  };
1714
1725
  var EMPTY_SET = Object.freeze(/* @__PURE__ */ new Set());
1715
1726
  var DirEntry = class {
@@ -1774,10 +1785,10 @@ var WatchHelper = class {
1774
1785
  dirParts;
1775
1786
  followSymlinks;
1776
1787
  statMethod;
1777
- constructor(path10, follow, fsw) {
1788
+ constructor(path12, follow, fsw) {
1778
1789
  this.fsw = fsw;
1779
- const watchPath = path10;
1780
- this.path = path10 = path10.replace(REPLACER_RE, "");
1790
+ const watchPath = path12;
1791
+ this.path = path12 = path12.replace(REPLACER_RE, "");
1781
1792
  this.watchPath = watchPath;
1782
1793
  this.fullWatchPath = sp2.resolve(watchPath);
1783
1794
  this.dirParts = [];
@@ -1917,20 +1928,20 @@ var FSWatcher = class extends EventEmitter {
1917
1928
  this._closePromise = void 0;
1918
1929
  let paths = unifyPaths(paths_);
1919
1930
  if (cwd) {
1920
- paths = paths.map((path10) => {
1921
- const absPath = getAbsolutePath(path10, cwd);
1931
+ paths = paths.map((path12) => {
1932
+ const absPath = getAbsolutePath(path12, cwd);
1922
1933
  return absPath;
1923
1934
  });
1924
1935
  }
1925
- paths.forEach((path10) => {
1926
- this._removeIgnoredPath(path10);
1936
+ paths.forEach((path12) => {
1937
+ this._removeIgnoredPath(path12);
1927
1938
  });
1928
1939
  this._userIgnored = void 0;
1929
1940
  if (!this._readyCount)
1930
1941
  this._readyCount = 0;
1931
1942
  this._readyCount += paths.length;
1932
- Promise.all(paths.map(async (path10) => {
1933
- const res = await this._nodeFsHandler._addToNodeFs(path10, !_internal, void 0, 0, _origAdd);
1943
+ Promise.all(paths.map(async (path12) => {
1944
+ const res = await this._nodeFsHandler._addToNodeFs(path12, !_internal, void 0, 0, _origAdd);
1934
1945
  if (res)
1935
1946
  this._emitReady();
1936
1947
  return res;
@@ -1952,17 +1963,17 @@ var FSWatcher = class extends EventEmitter {
1952
1963
  return this;
1953
1964
  const paths = unifyPaths(paths_);
1954
1965
  const { cwd } = this.options;
1955
- paths.forEach((path10) => {
1956
- if (!sp2.isAbsolute(path10) && !this._closers.has(path10)) {
1966
+ paths.forEach((path12) => {
1967
+ if (!sp2.isAbsolute(path12) && !this._closers.has(path12)) {
1957
1968
  if (cwd)
1958
- path10 = sp2.join(cwd, path10);
1959
- path10 = sp2.resolve(path10);
1969
+ path12 = sp2.join(cwd, path12);
1970
+ path12 = sp2.resolve(path12);
1960
1971
  }
1961
- this._closePath(path10);
1962
- this._addIgnoredPath(path10);
1963
- if (this._watched.has(path10)) {
1972
+ this._closePath(path12);
1973
+ this._addIgnoredPath(path12);
1974
+ if (this._watched.has(path12)) {
1964
1975
  this._addIgnoredPath({
1965
- path: path10,
1976
+ path: path12,
1966
1977
  recursive: true
1967
1978
  });
1968
1979
  }
@@ -2026,38 +2037,38 @@ var FSWatcher = class extends EventEmitter {
2026
2037
  * @param stats arguments to be passed with event
2027
2038
  * @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
2028
2039
  */
2029
- async _emit(event, path10, stats) {
2040
+ async _emit(event, path12, stats) {
2030
2041
  if (this.closed)
2031
2042
  return;
2032
2043
  const opts = this.options;
2033
2044
  if (isWindows)
2034
- path10 = sp2.normalize(path10);
2045
+ path12 = sp2.normalize(path12);
2035
2046
  if (opts.cwd)
2036
- path10 = sp2.relative(opts.cwd, path10);
2037
- const args = [path10];
2047
+ path12 = sp2.relative(opts.cwd, path12);
2048
+ const args = [path12];
2038
2049
  if (stats != null)
2039
2050
  args.push(stats);
2040
2051
  const awf = opts.awaitWriteFinish;
2041
2052
  let pw;
2042
- if (awf && (pw = this._pendingWrites.get(path10))) {
2053
+ if (awf && (pw = this._pendingWrites.get(path12))) {
2043
2054
  pw.lastChange = /* @__PURE__ */ new Date();
2044
2055
  return this;
2045
2056
  }
2046
2057
  if (opts.atomic) {
2047
2058
  if (event === EVENTS.UNLINK) {
2048
- this._pendingUnlinks.set(path10, [event, ...args]);
2059
+ this._pendingUnlinks.set(path12, [event, ...args]);
2049
2060
  setTimeout(() => {
2050
- this._pendingUnlinks.forEach((entry, path11) => {
2061
+ this._pendingUnlinks.forEach((entry, path13) => {
2051
2062
  this.emit(...entry);
2052
2063
  this.emit(EVENTS.ALL, ...entry);
2053
- this._pendingUnlinks.delete(path11);
2064
+ this._pendingUnlinks.delete(path13);
2054
2065
  });
2055
2066
  }, typeof opts.atomic === "number" ? opts.atomic : 100);
2056
2067
  return this;
2057
2068
  }
2058
- if (event === EVENTS.ADD && this._pendingUnlinks.has(path10)) {
2069
+ if (event === EVENTS.ADD && this._pendingUnlinks.has(path12)) {
2059
2070
  event = EVENTS.CHANGE;
2060
- this._pendingUnlinks.delete(path10);
2071
+ this._pendingUnlinks.delete(path12);
2061
2072
  }
2062
2073
  }
2063
2074
  if (awf && (event === EVENTS.ADD || event === EVENTS.CHANGE) && this._readyEmitted) {
@@ -2075,16 +2086,16 @@ var FSWatcher = class extends EventEmitter {
2075
2086
  this.emitWithAll(event, args);
2076
2087
  }
2077
2088
  };
2078
- this._awaitWriteFinish(path10, awf.stabilityThreshold, event, awfEmit);
2089
+ this._awaitWriteFinish(path12, awf.stabilityThreshold, event, awfEmit);
2079
2090
  return this;
2080
2091
  }
2081
2092
  if (event === EVENTS.CHANGE) {
2082
- const isThrottled = !this._throttle(EVENTS.CHANGE, path10, 50);
2093
+ const isThrottled = !this._throttle(EVENTS.CHANGE, path12, 50);
2083
2094
  if (isThrottled)
2084
2095
  return this;
2085
2096
  }
2086
2097
  if (opts.alwaysStat && stats === void 0 && (event === EVENTS.ADD || event === EVENTS.ADD_DIR || event === EVENTS.CHANGE)) {
2087
- const fullPath = opts.cwd ? sp2.join(opts.cwd, path10) : path10;
2098
+ const fullPath = opts.cwd ? sp2.join(opts.cwd, path12) : path12;
2088
2099
  let stats2;
2089
2100
  try {
2090
2101
  stats2 = await stat3(fullPath);
@@ -2115,23 +2126,23 @@ var FSWatcher = class extends EventEmitter {
2115
2126
  * @param timeout duration of time to suppress duplicate actions
2116
2127
  * @returns tracking object or false if action should be suppressed
2117
2128
  */
2118
- _throttle(actionType, path10, timeout) {
2129
+ _throttle(actionType, path12, timeout) {
2119
2130
  if (!this._throttled.has(actionType)) {
2120
2131
  this._throttled.set(actionType, /* @__PURE__ */ new Map());
2121
2132
  }
2122
2133
  const action = this._throttled.get(actionType);
2123
2134
  if (!action)
2124
2135
  throw new Error("invalid throttle");
2125
- const actionPath = action.get(path10);
2136
+ const actionPath = action.get(path12);
2126
2137
  if (actionPath) {
2127
2138
  actionPath.count++;
2128
2139
  return false;
2129
2140
  }
2130
2141
  let timeoutObject;
2131
2142
  const clear = () => {
2132
- const item = action.get(path10);
2143
+ const item = action.get(path12);
2133
2144
  const count = item ? item.count : 0;
2134
- action.delete(path10);
2145
+ action.delete(path12);
2135
2146
  clearTimeout(timeoutObject);
2136
2147
  if (item)
2137
2148
  clearTimeout(item.timeoutObject);
@@ -2139,7 +2150,7 @@ var FSWatcher = class extends EventEmitter {
2139
2150
  };
2140
2151
  timeoutObject = setTimeout(clear, timeout);
2141
2152
  const thr = { timeoutObject, clear, count: 0 };
2142
- action.set(path10, thr);
2153
+ action.set(path12, thr);
2143
2154
  return thr;
2144
2155
  }
2145
2156
  _incrReadyCount() {
@@ -2153,44 +2164,44 @@ var FSWatcher = class extends EventEmitter {
2153
2164
  * @param event
2154
2165
  * @param awfEmit Callback to be called when ready for event to be emitted.
2155
2166
  */
2156
- _awaitWriteFinish(path10, threshold, event, awfEmit) {
2167
+ _awaitWriteFinish(path12, threshold, event, awfEmit) {
2157
2168
  const awf = this.options.awaitWriteFinish;
2158
2169
  if (typeof awf !== "object")
2159
2170
  return;
2160
2171
  const pollInterval = awf.pollInterval;
2161
2172
  let timeoutHandler;
2162
- let fullPath = path10;
2163
- if (this.options.cwd && !sp2.isAbsolute(path10)) {
2164
- fullPath = sp2.join(this.options.cwd, path10);
2173
+ let fullPath = path12;
2174
+ if (this.options.cwd && !sp2.isAbsolute(path12)) {
2175
+ fullPath = sp2.join(this.options.cwd, path12);
2165
2176
  }
2166
2177
  const now = /* @__PURE__ */ new Date();
2167
2178
  const writes = this._pendingWrites;
2168
2179
  function awaitWriteFinishFn(prevStat) {
2169
2180
  statcb(fullPath, (err, curStat) => {
2170
- if (err || !writes.has(path10)) {
2181
+ if (err || !writes.has(path12)) {
2171
2182
  if (err && err.code !== "ENOENT")
2172
2183
  awfEmit(err);
2173
2184
  return;
2174
2185
  }
2175
2186
  const now2 = Number(/* @__PURE__ */ new Date());
2176
2187
  if (prevStat && curStat.size !== prevStat.size) {
2177
- writes.get(path10).lastChange = now2;
2188
+ writes.get(path12).lastChange = now2;
2178
2189
  }
2179
- const pw = writes.get(path10);
2190
+ const pw = writes.get(path12);
2180
2191
  const df = now2 - pw.lastChange;
2181
2192
  if (df >= threshold) {
2182
- writes.delete(path10);
2193
+ writes.delete(path12);
2183
2194
  awfEmit(void 0, curStat);
2184
2195
  } else {
2185
2196
  timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval, curStat);
2186
2197
  }
2187
2198
  });
2188
2199
  }
2189
- if (!writes.has(path10)) {
2190
- writes.set(path10, {
2200
+ if (!writes.has(path12)) {
2201
+ writes.set(path12, {
2191
2202
  lastChange: now,
2192
2203
  cancelWait: () => {
2193
- writes.delete(path10);
2204
+ writes.delete(path12);
2194
2205
  clearTimeout(timeoutHandler);
2195
2206
  return event;
2196
2207
  }
@@ -2201,8 +2212,8 @@ var FSWatcher = class extends EventEmitter {
2201
2212
  /**
2202
2213
  * Determines whether user has asked to ignore this path.
2203
2214
  */
2204
- _isIgnored(path10, stats) {
2205
- if (this.options.atomic && DOT_RE.test(path10))
2215
+ _isIgnored(path12, stats) {
2216
+ if (this.options.atomic && DOT_RE.test(path12))
2206
2217
  return true;
2207
2218
  if (!this._userIgnored) {
2208
2219
  const { cwd } = this.options;
@@ -2212,17 +2223,17 @@ var FSWatcher = class extends EventEmitter {
2212
2223
  const list = [...ignoredPaths.map(normalizeIgnored(cwd)), ...ignored];
2213
2224
  this._userIgnored = anymatch(list, void 0);
2214
2225
  }
2215
- return this._userIgnored(path10, stats);
2226
+ return this._userIgnored(path12, stats);
2216
2227
  }
2217
- _isntIgnored(path10, stat4) {
2218
- return !this._isIgnored(path10, stat4);
2228
+ _isntIgnored(path12, stat4) {
2229
+ return !this._isIgnored(path12, stat4);
2219
2230
  }
2220
2231
  /**
2221
2232
  * Provides a set of common helpers and properties relating to symlink handling.
2222
2233
  * @param path file or directory pattern being watched
2223
2234
  */
2224
- _getWatchHelpers(path10) {
2225
- return new WatchHelper(path10, this.options.followSymlinks, this);
2235
+ _getWatchHelpers(path12) {
2236
+ return new WatchHelper(path12, this.options.followSymlinks, this);
2226
2237
  }
2227
2238
  // Directory helpers
2228
2239
  // -----------------
@@ -2254,63 +2265,63 @@ var FSWatcher = class extends EventEmitter {
2254
2265
  * @param item base path of item/directory
2255
2266
  */
2256
2267
  _remove(directory, item, isDirectory) {
2257
- const path10 = sp2.join(directory, item);
2258
- const fullPath = sp2.resolve(path10);
2259
- isDirectory = isDirectory != null ? isDirectory : this._watched.has(path10) || this._watched.has(fullPath);
2260
- if (!this._throttle("remove", path10, 100))
2268
+ const path12 = sp2.join(directory, item);
2269
+ const fullPath = sp2.resolve(path12);
2270
+ isDirectory = isDirectory != null ? isDirectory : this._watched.has(path12) || this._watched.has(fullPath);
2271
+ if (!this._throttle("remove", path12, 100))
2261
2272
  return;
2262
2273
  if (!isDirectory && this._watched.size === 1) {
2263
2274
  this.add(directory, item, true);
2264
2275
  }
2265
- const wp = this._getWatchedDir(path10);
2276
+ const wp = this._getWatchedDir(path12);
2266
2277
  const nestedDirectoryChildren = wp.getChildren();
2267
- nestedDirectoryChildren.forEach((nested) => this._remove(path10, nested));
2278
+ nestedDirectoryChildren.forEach((nested) => this._remove(path12, nested));
2268
2279
  const parent = this._getWatchedDir(directory);
2269
2280
  const wasTracked = parent.has(item);
2270
2281
  parent.remove(item);
2271
2282
  if (this._symlinkPaths.has(fullPath)) {
2272
2283
  this._symlinkPaths.delete(fullPath);
2273
2284
  }
2274
- let relPath = path10;
2285
+ let relPath = path12;
2275
2286
  if (this.options.cwd)
2276
- relPath = sp2.relative(this.options.cwd, path10);
2287
+ relPath = sp2.relative(this.options.cwd, path12);
2277
2288
  if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
2278
2289
  const event = this._pendingWrites.get(relPath).cancelWait();
2279
2290
  if (event === EVENTS.ADD)
2280
2291
  return;
2281
2292
  }
2282
- this._watched.delete(path10);
2293
+ this._watched.delete(path12);
2283
2294
  this._watched.delete(fullPath);
2284
2295
  const eventName = isDirectory ? EVENTS.UNLINK_DIR : EVENTS.UNLINK;
2285
- if (wasTracked && !this._isIgnored(path10))
2286
- this._emit(eventName, path10);
2287
- this._closePath(path10);
2296
+ if (wasTracked && !this._isIgnored(path12))
2297
+ this._emit(eventName, path12);
2298
+ this._closePath(path12);
2288
2299
  }
2289
2300
  /**
2290
2301
  * Closes all watchers for a path
2291
2302
  */
2292
- _closePath(path10) {
2293
- this._closeFile(path10);
2294
- const dir = sp2.dirname(path10);
2295
- this._getWatchedDir(dir).remove(sp2.basename(path10));
2303
+ _closePath(path12) {
2304
+ this._closeFile(path12);
2305
+ const dir = sp2.dirname(path12);
2306
+ this._getWatchedDir(dir).remove(sp2.basename(path12));
2296
2307
  }
2297
2308
  /**
2298
2309
  * Closes only file-specific watchers
2299
2310
  */
2300
- _closeFile(path10) {
2301
- const closers = this._closers.get(path10);
2311
+ _closeFile(path12) {
2312
+ const closers = this._closers.get(path12);
2302
2313
  if (!closers)
2303
2314
  return;
2304
2315
  closers.forEach((closer) => closer());
2305
- this._closers.delete(path10);
2316
+ this._closers.delete(path12);
2306
2317
  }
2307
- _addPathCloser(path10, closer) {
2318
+ _addPathCloser(path12, closer) {
2308
2319
  if (!closer)
2309
2320
  return;
2310
- let list = this._closers.get(path10);
2321
+ let list = this._closers.get(path12);
2311
2322
  if (!list) {
2312
2323
  list = [];
2313
- this._closers.set(path10, list);
2324
+ this._closers.set(path12, list);
2314
2325
  }
2315
2326
  list.push(closer);
2316
2327
  }
@@ -2546,6 +2557,10 @@ var ConsolidationEngine = class {
2546
2557
  continue;
2547
2558
  }
2548
2559
  clustersFound++;
2560
+ this.log("debug", "Consolidation groups formed", {
2561
+ group_count: clustersFound,
2562
+ total_notes: cluster.length
2563
+ });
2549
2564
  const candidatesText = formatNotesForPrompt(cluster);
2550
2565
  const prompt = template.replace("{{count}}", String(cluster.length)).replace("{{observation_type}}", observationType ?? "unknown").replace("{{candidates}}", candidatesText).replace("{{maxTokens}}", String(this.maxTokens));
2551
2566
  let responseText;
@@ -2620,10 +2635,11 @@ var ConsolidationEngine = class {
2620
2635
  );
2621
2636
  consolidated++;
2622
2637
  sporesSuperseded += consolidateResult.sources_archived;
2623
- this.log("info", "ConsolidationEngine: consolidated cluster", {
2638
+ this.log("info", "Notes consolidated", {
2624
2639
  wisdomId: consolidateResult.wisdom_id,
2625
2640
  sourcesArchived: consolidateResult.sources_archived,
2626
- clusterSize: cluster.length
2641
+ clusterSize: cluster.length,
2642
+ similarity: vectorResults[0]?.similarity ?? 0
2627
2643
  });
2628
2644
  } catch (err) {
2629
2645
  this.log("warn", "ConsolidationEngine: consolidateSpores failed", {
@@ -2633,6 +2649,9 @@ var ConsolidationEngine = class {
2633
2649
  }
2634
2650
  cluster.forEach((n) => processedIds.add(n.id));
2635
2651
  }
2652
+ if (clustersFound === 0) {
2653
+ this.log("debug", "Consolidation: no groups above threshold");
2654
+ }
2636
2655
  const passTimestamp = (/* @__PURE__ */ new Date()).toISOString();
2637
2656
  const durationMs = Date.now() - startTime;
2638
2657
  const passResult = {
@@ -2690,6 +2709,53 @@ function isPortAvailable(port) {
2690
2709
  });
2691
2710
  }
2692
2711
 
2712
+ // src/capture/transcript-miner.ts
2713
+ var TranscriptMiner = class {
2714
+ registry;
2715
+ constructor(config) {
2716
+ this.registry = new AgentRegistry(config?.additionalAdapters);
2717
+ }
2718
+ /**
2719
+ * Extract all conversation turns for a session.
2720
+ * Convenience wrapper — delegates to getAllTurnsWithSource.
2721
+ */
2722
+ getAllTurns(sessionId) {
2723
+ return this.getAllTurnsWithSource(sessionId).turns;
2724
+ }
2725
+ /**
2726
+ * Extract turns using the hook-provided transcript path first (fast, no scanning),
2727
+ * then fall back to adapter registry scanning if the path isn't provided.
2728
+ */
2729
+ getAllTurnsWithSource(sessionId, transcriptPath) {
2730
+ if (transcriptPath) {
2731
+ const result2 = this.registry.parseTurnsFromPath(transcriptPath);
2732
+ if (result2) return result2;
2733
+ }
2734
+ const result = this.registry.getTranscriptTurns(sessionId);
2735
+ if (result) return result;
2736
+ return { turns: [], source: "none" };
2737
+ }
2738
+ };
2739
+ function extractTurnsFromBuffer(events) {
2740
+ const turns = [];
2741
+ let current = null;
2742
+ for (const event of events) {
2743
+ const type = event.type;
2744
+ if (type === "user_prompt") {
2745
+ if (current) turns.push(current);
2746
+ current = {
2747
+ prompt: String(event.prompt ?? "").slice(0, PROMPT_PREVIEW_CHARS),
2748
+ toolCount: 0,
2749
+ timestamp: String(event.timestamp ?? (/* @__PURE__ */ new Date()).toISOString())
2750
+ };
2751
+ } else if (type === "tool_use") {
2752
+ if (current) current.toolCount++;
2753
+ }
2754
+ }
2755
+ if (current) turns.push(current);
2756
+ return turns;
2757
+ }
2758
+
2693
2759
  // src/artifacts/candidates.ts
2694
2760
  import { execFileSync } from "child_process";
2695
2761
  import fs4 from "fs";
@@ -2843,10 +2909,11 @@ async function handleGetLogs(ringBuffer, query) {
2843
2909
  const since = query.since || null;
2844
2910
  const level = query.level;
2845
2911
  const limit = query.limit ? parseInt(query.limit, 10) : void 0;
2846
- const result = ringBuffer.since(since, { level, limit: isNaN(limit) ? void 0 : limit });
2912
+ const component = query.category || void 0;
2913
+ const result = ringBuffer.since(since, { level, component, limit: isNaN(limit) ? void 0 : limit });
2847
2914
  const entries = result.entries.map((entry) => {
2848
- const { component, ...rest } = entry;
2849
- return { ...rest, category: component };
2915
+ const { component: component2, ...rest } = entry;
2916
+ return { ...rest, category: component2 };
2850
2917
  });
2851
2918
  return {
2852
2919
  body: {
@@ -2997,6 +3064,7 @@ async function handleRebuild(deps) {
2997
3064
  config: deps.config,
2998
3065
  index: deps.index,
2999
3066
  vectorIndex: deps.vectorIndex ?? void 0,
3067
+ pipeline: deps.pipeline,
3000
3068
  log: deps.log
3001
3069
  },
3002
3070
  deps.embeddingProvider,
@@ -3046,9 +3114,10 @@ async function handleDigest(deps, body) {
3046
3114
  config: deps.config,
3047
3115
  index: deps.index,
3048
3116
  vectorIndex: deps.vectorIndex ?? void 0,
3117
+ pipeline: deps.pipeline,
3049
3118
  log: deps.log
3050
3119
  },
3051
- deps.llmProvider,
3120
+ deps.digestLlmProvider,
3052
3121
  options ?? void 0
3053
3122
  ).then((result) => {
3054
3123
  if (result) {
@@ -3061,6 +3130,13 @@ async function handleDigest(deps, body) {
3061
3130
  tiers: result.tiersGenerated,
3062
3131
  duration: result.durationMs
3063
3132
  });
3133
+ } else if (options?.full && deps.onForceDigest) {
3134
+ deps.onForceDigest();
3135
+ deps.progressTracker.update(token, {
3136
+ status: "completed",
3137
+ percent: PROGRESS_COMPLETE,
3138
+ message: "Full cycle queued \u2014 items reset to pending"
3139
+ });
3064
3140
  } else {
3065
3141
  deps.progressTracker.update(token, {
3066
3142
  status: "completed",
@@ -3093,6 +3169,7 @@ async function handleCurate(deps, body, runCuration2) {
3093
3169
  vectorIndex: deps.vectorIndex,
3094
3170
  llmProvider: deps.llmProvider,
3095
3171
  embeddingProvider: deps.embeddingProvider,
3172
+ pipeline: deps.pipeline,
3096
3173
  log: deps.log
3097
3174
  };
3098
3175
  if (isDryRun) {
@@ -3144,6 +3221,7 @@ async function handleReprocess(deps, body) {
3144
3221
  config: deps.config,
3145
3222
  index: deps.index,
3146
3223
  vectorIndex: deps.vectorIndex ?? void 0,
3224
+ pipeline: deps.pipeline,
3147
3225
  log: deps.log
3148
3226
  },
3149
3227
  deps.llmProvider,
@@ -3250,47 +3328,1412 @@ function handleGetSessions(index) {
3250
3328
  return { body: { sessions, dates } };
3251
3329
  }
3252
3330
 
3253
- // src/daemon/main.ts
3331
+ // src/daemon/api/pipeline.ts
3332
+ var RetryItemBody = external_exports.object({
3333
+ type: external_exports.string(),
3334
+ stage: external_exports.string()
3335
+ });
3336
+ function handlePipelineHealth(pipeline) {
3337
+ return async () => {
3338
+ const health = pipeline.health();
3339
+ return { body: health };
3340
+ };
3341
+ }
3342
+ function handlePipelineItems(pipeline) {
3343
+ return async (req) => {
3344
+ const { stage, status, type, limit, offset } = req.query;
3345
+ const parsedLimit = limit ? parseInt(limit, 10) : void 0;
3346
+ const parsedOffset = offset ? parseInt(offset, 10) : void 0;
3347
+ if (limit && (isNaN(parsedLimit) || parsedLimit < 0)) {
3348
+ return { status: 400, body: { error: "invalid_limit", message: "limit must be a non-negative integer" } };
3349
+ }
3350
+ if (offset && (isNaN(parsedOffset) || parsedOffset < 0)) {
3351
+ return { status: 400, body: { error: "invalid_offset", message: "offset must be a non-negative integer" } };
3352
+ }
3353
+ const result = pipeline.listItems({
3354
+ stage,
3355
+ status,
3356
+ type,
3357
+ limit: parsedLimit,
3358
+ offset: parsedOffset
3359
+ });
3360
+ return { body: result };
3361
+ };
3362
+ }
3363
+ function handlePipelineItemDetail(pipeline) {
3364
+ return async (req) => {
3365
+ const { id } = req.params;
3366
+ const { type } = req.query;
3367
+ if (!type) {
3368
+ return { status: 400, body: { error: "missing_type", message: 'query param "type" is required' } };
3369
+ }
3370
+ const stages = pipeline.getItemStatus(id, type);
3371
+ if (stages.length === 0) {
3372
+ return { status: 404, body: { error: "not_found", message: `No work item found: ${id} (${type})` } };
3373
+ }
3374
+ const history = pipeline.getTransitionHistory(id, type);
3375
+ return { body: { id, type, stages, history } };
3376
+ };
3377
+ }
3378
+ function handlePipelineCircuits(pipeline) {
3379
+ return async () => {
3380
+ const circuits = pipeline.listCircuits();
3381
+ return { body: circuits };
3382
+ };
3383
+ }
3384
+ function handlePipelineRetry(pipeline) {
3385
+ return async (req) => {
3386
+ const { id } = req.params;
3387
+ const parsed = RetryItemBody.safeParse(req.body);
3388
+ if (!parsed.success) {
3389
+ return { status: 400, body: { error: "validation_failed", issues: parsed.error.issues } };
3390
+ }
3391
+ const { type, stage } = parsed.data;
3392
+ const retried = pipeline.retryItem(id, type, stage);
3393
+ if (!retried) {
3394
+ return { status: 404, body: { error: "not_poisoned", message: `Item ${id} is not poisoned at stage ${stage}` } };
3395
+ }
3396
+ return { body: { retried: true, id, type, stage } };
3397
+ };
3398
+ }
3399
+ function handlePipelineSkip(pipeline) {
3400
+ return async (req) => {
3401
+ const { id } = req.params;
3402
+ const parsed = RetryItemBody.safeParse(req.body);
3403
+ if (!parsed.success) {
3404
+ return { status: 400, body: { error: "validation_failed", issues: parsed.error.issues } };
3405
+ }
3406
+ const { type, stage } = parsed.data;
3407
+ const skipped = pipeline.skipItem(id, type, stage);
3408
+ if (!skipped) {
3409
+ return { status: 404, body: { error: "not_skippable", message: `Item ${id} is not failed or poisoned at stage ${stage}` } };
3410
+ }
3411
+ return { body: { skipped: true, id, type, stage } };
3412
+ };
3413
+ }
3414
+ function handlePipelineRetryAll(pipeline) {
3415
+ return async () => {
3416
+ const count = pipeline.retryAllPoisoned();
3417
+ return { body: { retried: count } };
3418
+ };
3419
+ }
3420
+ function handlePipelineCircuitReset(pipeline) {
3421
+ return async (req) => {
3422
+ const { provider } = req.params;
3423
+ if (!PIPELINE_PROVIDER_ROLES.includes(provider)) {
3424
+ return {
3425
+ status: 400,
3426
+ body: {
3427
+ error: "unknown_provider",
3428
+ message: `Unknown provider role '${provider}'. Valid roles: ${PIPELINE_PROVIDER_ROLES.join(", ")}`
3429
+ }
3430
+ };
3431
+ }
3432
+ pipeline.resetCircuit(provider);
3433
+ const unblocked = pipeline.unblockItemsForCircuit(provider);
3434
+ return { body: { reset: true, provider, unblocked } };
3435
+ };
3436
+ }
3437
+
3438
+ // src/daemon/api/digest.ts
3254
3439
  var import_yaml = __toESM(require_dist(), 1);
3255
3440
  import fs6 from "fs";
3256
3441
  import path9 from "path";
3257
- function indexAndEmbed(relativePath, noteId, embeddingText, metadata, deps) {
3258
- indexNote(deps.index, deps.vaultDir, relativePath);
3259
- if (deps.vectorIndex && embeddingText) {
3260
- generateEmbedding(deps.embeddingProvider, embeddingText.slice(0, EMBEDDING_INPUT_LIMIT)).then((emb) => deps.vectorIndex.upsert(noteId, emb.embedding, metadata)).catch((err) => deps.logger.debug("embeddings", "Embedding failed", { id: noteId, error: err.message }));
3442
+ function handleForceDigest(setForceDigest) {
3443
+ return async () => {
3444
+ setForceDigest();
3445
+ return {
3446
+ body: {
3447
+ status: "queued",
3448
+ message: "Digest will run on next pipeline tick (upstream must be clear)"
3449
+ }
3450
+ };
3451
+ };
3452
+ }
3453
+ function readExtractTimestamp(extractPath) {
3454
+ try {
3455
+ const content = fs6.readFileSync(extractPath, "utf-8");
3456
+ const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
3457
+ if (!fmMatch) return null;
3458
+ const fm = import_yaml.default.parse(fmMatch[1]);
3459
+ return typeof fm.generated === "string" ? fm.generated : null;
3460
+ } catch {
3461
+ return null;
3261
3462
  }
3262
3463
  }
3263
- function writeObservations(observations, sessionId, deps) {
3264
- const written = writeObservationNotes(observations, sessionId, deps.vault, deps.index, deps.vaultDir);
3265
- for (const note of written) {
3266
- indexAndEmbed(
3267
- note.path,
3268
- note.id,
3269
- `${note.observation.title}
3270
- ${note.observation.content}`,
3271
- { type: "spore", importance: "high", session_id: sessionId },
3272
- deps
3464
+ function handleDigestHealth(deps) {
3465
+ return async () => {
3466
+ const tracePath = path9.join(deps.vaultDir, "digest", "trace.jsonl");
3467
+ const lastCycle = readLastRecord(tracePath);
3468
+ let lastCycleTimestamp = lastCycle?.timestamp ?? null;
3469
+ const digestDir = path9.join(deps.vaultDir, "digest");
3470
+ try {
3471
+ for (const file of fs6.readdirSync(digestDir)) {
3472
+ if (!file.startsWith("extract-") || !file.endsWith(".md")) continue;
3473
+ const extractTs = readExtractTimestamp(path9.join(digestDir, file));
3474
+ if (extractTs && (!lastCycleTimestamp || extractTs > lastCycleTimestamp)) {
3475
+ lastCycleTimestamp = extractTs;
3476
+ }
3477
+ }
3478
+ } catch {
3479
+ }
3480
+ const substrateReady = deps.pipeline.newSubstrateSinceLastDigest();
3481
+ return {
3482
+ body: {
3483
+ last_cycle: lastCycleTimestamp ? {
3484
+ cycle_id: lastCycle?.cycleId ?? null,
3485
+ timestamp: lastCycleTimestamp,
3486
+ substrate_count: lastCycle ? Object.values(lastCycle.substrate).flat().length : null,
3487
+ tiers_generated: lastCycle?.tiersGenerated ?? null,
3488
+ duration_ms: lastCycle?.durationMs ?? null,
3489
+ model: lastCycle?.model ?? null
3490
+ } : null,
3491
+ substrate_ready: substrateReady,
3492
+ substrate_threshold: deps.minNotesForCycle,
3493
+ metabolism_state: deps.metabolismState(),
3494
+ digest_ready: deps.digestReady(),
3495
+ cycle_in_progress: deps.cycleInProgress()
3496
+ }
3497
+ };
3498
+ };
3499
+ }
3500
+
3501
+ // src/daemon/pipeline.ts
3502
+ import Database from "better-sqlite3";
3503
+ import fs7 from "fs";
3504
+ import path10 from "path";
3505
+
3506
+ // src/daemon/pipeline-classify.ts
3507
+ var WELL_KNOWN_API_HOSTS = ["api.anthropic.com", "api.openai.com"];
3508
+ var TRANSIENT_STATUS_PATTERNS = [" 429 ", " 500 ", " 503 "];
3509
+ var MODEL_NOT_FOUND_PATTERNS = [
3510
+ "model not found",
3511
+ "model not loaded",
3512
+ "no model loaded"
3513
+ ];
3514
+ var RESOURCE_FAILURE_PATTERNS = [
3515
+ "model_load_failed",
3516
+ "insufficient system resources",
3517
+ "unsupported",
3518
+ "not compatible"
3519
+ ];
3520
+ var TRANSIENT_MESSAGE_PATTERNS = [
3521
+ "socket hang up"
3522
+ ];
3523
+ var CONFIG_ERROR_CODES = /* @__PURE__ */ new Set(["ECONNREFUSED"]);
3524
+ var TRANSIENT_ERROR_CODES = /* @__PURE__ */ new Set(["ETIMEDOUT", "ECONNRESET"]);
3525
+ function hasCode(err, code) {
3526
+ return err.code === code;
3527
+ }
3528
+ function hasAnyCode(err, codes) {
3529
+ const code = err.code;
3530
+ return code !== void 0 && codes.has(code);
3531
+ }
3532
+ function messageContains(err, patterns) {
3533
+ const msg = err.message.toLowerCase();
3534
+ return patterns.some((p) => msg.includes(p.toLowerCase()));
3535
+ }
3536
+ function statusMatch(err, patterns) {
3537
+ const msg = " " + err.message + " ";
3538
+ return patterns.some((p) => msg.includes(p));
3539
+ }
3540
+ function connectionRefusedAction(ctx) {
3541
+ const provider = ctx.providerName ?? "the provider";
3542
+ const url = ctx.baseUrl ? ` at ${ctx.baseUrl}` : "";
3543
+ return `${provider} is not reachable${url}. Check that ${provider} is running and the baseUrl is correct.`;
3544
+ }
3545
+ function modelNotFoundAction(ctx) {
3546
+ const model = ctx.modelName ? `'${ctx.modelName}'` : "the configured model";
3547
+ const provider = ctx.providerName ?? "the provider";
3548
+ return `Model ${model} was not found in ${provider}. Ensure the model is downloaded and loaded.`;
3549
+ }
3550
+ function resourceExhaustionAction(ctx) {
3551
+ const provider = ctx.providerName ?? "the provider";
3552
+ return `${provider} could not load the model due to insufficient resources. Try a smaller model or free up memory.`;
3553
+ }
3554
+ function authFailureAction(status, ctx) {
3555
+ const provider = ctx.providerName ?? "the provider";
3556
+ if (status === "401") {
3557
+ return `Authentication failed for ${provider}. Check that the API key is set and valid.`;
3558
+ }
3559
+ return `Access denied (403) for ${provider}. Check API key permissions and account status.`;
3560
+ }
3561
+ function dnsFailureAction(ctx) {
3562
+ const provider = ctx.providerName ?? "the provider";
3563
+ const host = ctx.configuredHost ?? "the configured host";
3564
+ return `Cannot resolve host '${host}' for ${provider}. Check the baseUrl hostname.`;
3565
+ }
3566
+ function classifyError(error, context) {
3567
+ const ctx = context ?? {};
3568
+ if (error instanceof SyntaxError) {
3569
+ return { type: "parse", suggestedAction: "LLM returned unparseable JSON. The response may be malformed or truncated." };
3570
+ }
3571
+ if (error.name === "ParseError") {
3572
+ return { type: "parse", suggestedAction: "LLM returned an empty or invalid response. Check model health and prompt." };
3573
+ }
3574
+ const msgLower = error.message.toLowerCase();
3575
+ if (msgLower.includes("empty content") || msgLower.includes("schema validation")) {
3576
+ return { type: "parse", suggestedAction: "LLM returned an empty or schema-invalid response. Check model health and prompt." };
3577
+ }
3578
+ if (msgLower.includes("only reasoning") || msgLower.includes("empty after strip") || msgLower.includes("observation extraction failed")) {
3579
+ return { type: "parse", suggestedAction: "LLM returned only reasoning tokens with no usable content. Check model reasoning settings." };
3580
+ }
3581
+ if (msgLower.includes("missing output") || msgLower.includes("no content in response") || msgLower.includes("summarization failed")) {
3582
+ return { type: "parse", suggestedAction: "LLM response is missing expected fields. Check provider health and model compatibility." };
3583
+ }
3584
+ if (hasCode(error, "ECONNREFUSED") || hasAnyCode(error, CONFIG_ERROR_CODES)) {
3585
+ return { type: "config", suggestedAction: connectionRefusedAction(ctx) };
3586
+ }
3587
+ if (messageContains(error, MODEL_NOT_FOUND_PATTERNS)) {
3588
+ return { type: "config", suggestedAction: modelNotFoundAction(ctx) };
3589
+ }
3590
+ if (messageContains(error, RESOURCE_FAILURE_PATTERNS)) {
3591
+ return { type: "config", suggestedAction: resourceExhaustionAction(ctx) };
3592
+ }
3593
+ if (statusMatch(error, [" 401 "])) {
3594
+ return { type: "config", suggestedAction: authFailureAction("401", ctx) };
3595
+ }
3596
+ if (statusMatch(error, [" 403 "])) {
3597
+ return { type: "config", suggestedAction: authFailureAction("403", ctx) };
3598
+ }
3599
+ if (/ 401$/.test(error.message) || error.message.endsWith(" 401")) {
3600
+ return { type: "config", suggestedAction: authFailureAction("401", ctx) };
3601
+ }
3602
+ if (/ 403$/.test(error.message) || error.message.endsWith(" 403")) {
3603
+ return { type: "config", suggestedAction: authFailureAction("403", ctx) };
3604
+ }
3605
+ if (hasCode(error, "ENOTFOUND")) {
3606
+ if (ctx.configuredHost && error.message.includes(ctx.configuredHost)) {
3607
+ return { type: "config", suggestedAction: dnsFailureAction(ctx) };
3608
+ }
3609
+ if (WELL_KNOWN_API_HOSTS.some((host) => error.message.includes(host))) {
3610
+ return { type: "transient" };
3611
+ }
3612
+ return { type: "config", suggestedAction: dnsFailureAction(ctx) };
3613
+ }
3614
+ if (error.name === "AbortError") {
3615
+ return { type: "transient" };
3616
+ }
3617
+ if (hasAnyCode(error, TRANSIENT_ERROR_CODES)) {
3618
+ return { type: "transient" };
3619
+ }
3620
+ if (statusMatch(error, TRANSIENT_STATUS_PATTERNS)) {
3621
+ return { type: "transient" };
3622
+ }
3623
+ if (messageContains(error, TRANSIENT_MESSAGE_PATTERNS)) {
3624
+ return { type: "transient" };
3625
+ }
3626
+ return { type: "transient" };
3627
+ }
3628
+
3629
+ // src/daemon/pipeline.ts
3630
+ var PIPELINE_DB_FILENAME = "pipeline.db";
3631
+ var SCHEMA_SQL = `
3632
+ -- work_items: every piece of content in the pipeline
3633
+ CREATE TABLE IF NOT EXISTS work_items (
3634
+ id TEXT NOT NULL,
3635
+ item_type TEXT NOT NULL,
3636
+ source_path TEXT,
3637
+ created_at TEXT NOT NULL,
3638
+ updated_at TEXT NOT NULL,
3639
+ PRIMARY KEY (id, item_type)
3640
+ );
3641
+
3642
+ -- stage_transitions: append-only audit trail
3643
+ CREATE TABLE IF NOT EXISTS stage_transitions (
3644
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
3645
+ work_item_id TEXT NOT NULL,
3646
+ item_type TEXT NOT NULL,
3647
+ stage TEXT NOT NULL,
3648
+ status TEXT NOT NULL,
3649
+ attempt INTEGER DEFAULT 1,
3650
+ error_type TEXT,
3651
+ error_message TEXT,
3652
+ started_at TEXT,
3653
+ completed_at TEXT,
3654
+ output TEXT,
3655
+ created_at TEXT NOT NULL,
3656
+ FOREIGN KEY (work_item_id, item_type) REFERENCES work_items(id, item_type)
3657
+ );
3658
+
3659
+ -- stage_history: compacted transitions older than retention window
3660
+ CREATE TABLE IF NOT EXISTS stage_history (
3661
+ work_item_id TEXT NOT NULL,
3662
+ item_type TEXT NOT NULL,
3663
+ stage TEXT NOT NULL,
3664
+ total_attempts INTEGER,
3665
+ final_status TEXT NOT NULL,
3666
+ first_attempt TEXT NOT NULL,
3667
+ last_attempt TEXT NOT NULL,
3668
+ last_error TEXT,
3669
+ error_types TEXT,
3670
+ last_output TEXT,
3671
+ PRIMARY KEY (work_item_id, item_type, stage)
3672
+ );
3673
+
3674
+ -- circuit_breakers: per-provider-role state
3675
+ CREATE TABLE IF NOT EXISTS circuit_breakers (
3676
+ provider_role TEXT PRIMARY KEY,
3677
+ state TEXT NOT NULL DEFAULT 'closed',
3678
+ failure_count INTEGER DEFAULT 0,
3679
+ last_failure TEXT,
3680
+ last_error TEXT,
3681
+ opens_at TEXT,
3682
+ updated_at TEXT NOT NULL
3683
+ );
3684
+ `;
3685
+ var INDEXES_SQL = `
3686
+ CREATE INDEX IF NOT EXISTS idx_transitions_item_stage
3687
+ ON stage_transitions(work_item_id, item_type, stage);
3688
+ CREATE INDEX IF NOT EXISTS idx_transitions_status
3689
+ ON stage_transitions(status);
3690
+ CREATE INDEX IF NOT EXISTS idx_items_type
3691
+ ON work_items(item_type);
3692
+ `;
3693
+ var VIEW_SQL = `
3694
+ CREATE VIEW IF NOT EXISTS pipeline_status AS
3695
+ WITH ranked AS (
3696
+ SELECT st.*,
3697
+ ROW_NUMBER() OVER (
3698
+ PARTITION BY st.work_item_id, st.item_type, st.stage
3699
+ ORDER BY st.id DESC
3700
+ ) AS rn
3701
+ FROM stage_transitions st
3702
+ )
3703
+ SELECT
3704
+ wi.id, wi.item_type, wi.source_path,
3705
+ r.stage, r.status, r.attempt,
3706
+ r.error_type, r.error_message,
3707
+ r.started_at, r.completed_at,
3708
+ r.output
3709
+ FROM work_items wi
3710
+ JOIN ranked r ON r.work_item_id = wi.id AND r.item_type = wi.item_type
3711
+ WHERE r.rn = 1;
3712
+ `;
3713
+ var HEALTH_STAGE_STATUS_SQL = `
3714
+ SELECT stage, status, COUNT(*) as count
3715
+ FROM pipeline_status
3716
+ GROUP BY stage, status
3717
+ `;
3718
+ var HEALTH_CIRCUITS_SQL = `
3719
+ SELECT provider_role, state, failure_count, last_error
3720
+ FROM circuit_breakers
3721
+ `;
3722
+ var PipelineManager = class {
3723
+ db;
3724
+ config;
3725
+ constructor(vaultDir, config) {
3726
+ const dbPath = path10.join(vaultDir, PIPELINE_DB_FILENAME);
3727
+ this.db = new Database(dbPath);
3728
+ this.db.pragma("journal_mode = WAL");
3729
+ this.db.pragma("foreign_keys = ON");
3730
+ this.config = config;
3731
+ this.init();
3732
+ }
3733
+ init() {
3734
+ this.db.exec("DROP VIEW IF EXISTS pipeline_status");
3735
+ this.db.exec(SCHEMA_SQL);
3736
+ try {
3737
+ this.db.exec("ALTER TABLE stage_transitions ADD COLUMN output TEXT");
3738
+ } catch {
3739
+ }
3740
+ try {
3741
+ this.db.exec("ALTER TABLE stage_history ADD COLUMN last_output TEXT");
3742
+ } catch {
3743
+ }
3744
+ this.db.exec(INDEXES_SQL);
3745
+ this.db.exec(VIEW_SQL);
3746
+ }
3747
+ /** Expose the underlying database for direct queries (used in tests and by higher-level methods). */
3748
+ getDb() {
3749
+ return this.db;
3750
+ }
3751
+ /** Read a PRAGMA value (used in tests to verify WAL mode and foreign keys). */
3752
+ getPragma(name) {
3753
+ return this.db.pragma(name, { simple: true });
3754
+ }
3755
+ /** Quick check whether the pipeline has any registered work items. */
3756
+ isEmpty() {
3757
+ const row = this.db.prepare("SELECT 1 FROM work_items LIMIT 1").get();
3758
+ return !row;
3759
+ }
3760
+ /** Aggregate pipeline health: stage/status counts, circuit states, totals. */
3761
+ health() {
3762
+ const stageRows = this.db.prepare(HEALTH_STAGE_STATUS_SQL).all();
3763
+ const stages = {};
3764
+ const totals = {
3765
+ pending: 0,
3766
+ processing: 0,
3767
+ failed: 0,
3768
+ blocked: 0,
3769
+ poisoned: 0,
3770
+ succeeded: 0
3771
+ };
3772
+ for (const row of stageRows) {
3773
+ if (!stages[row.stage]) {
3774
+ stages[row.stage] = {};
3775
+ }
3776
+ stages[row.stage][row.status] = row.count;
3777
+ if (row.status in totals) {
3778
+ totals[row.status] += row.count;
3779
+ }
3780
+ }
3781
+ const circuitRows = this.db.prepare(HEALTH_CIRCUITS_SQL).all();
3782
+ const circuits = circuitRows.map((r) => ({
3783
+ provider_role: r.provider_role,
3784
+ state: r.state,
3785
+ failure_count: r.failure_count,
3786
+ last_error: r.last_error
3787
+ }));
3788
+ return { stages, circuits, totals };
3789
+ }
3790
+ // -------------------------------------------------------------------------
3791
+ // Work item registration
3792
+ // -------------------------------------------------------------------------
3793
+ /**
3794
+ * Register a work item in the pipeline. Creates the work_items row and
3795
+ * initial stage_transitions for all applicable stages.
3796
+ *
3797
+ * Uses INSERT OR IGNORE for work_items (idempotency — re-registering is a no-op).
3798
+ * Checks if transitions already exist before inserting.
3799
+ */
3800
+ register(itemId, itemType, sourcePath) {
3801
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3802
+ this.db.prepare(
3803
+ "INSERT OR IGNORE INTO work_items (id, item_type, source_path, created_at, updated_at) VALUES (?, ?, ?, ?, ?)"
3804
+ ).run(itemId, itemType, sourcePath ?? null, now, now);
3805
+ const existingCount = this.db.prepare(
3806
+ "SELECT COUNT(*) as cnt FROM stage_transitions WHERE work_item_id = ? AND item_type = ?"
3807
+ ).get(itemId, itemType);
3808
+ if (existingCount.cnt > 0) {
3809
+ return;
3810
+ }
3811
+ const applicableStages = ITEM_STAGE_MAP[itemType] ?? [];
3812
+ const insertStmt = this.db.prepare(
3813
+ "INSERT INTO stage_transitions (work_item_id, item_type, stage, status, attempt, created_at) VALUES (?, ?, ?, ?, ?, ?)"
3273
3814
  );
3274
- deps.logger.info("processor", "Observation written", { type: note.observation.type, title: note.observation.title, session_id: sessionId });
3815
+ const insertAll = this.db.transaction(() => {
3816
+ for (const stage of PIPELINE_STAGES) {
3817
+ const status = applicableStages.includes(stage) ? "pending" : "skipped";
3818
+ insertStmt.run(itemId, itemType, stage, status, 1, now);
3819
+ }
3820
+ });
3821
+ insertAll();
3275
3822
  }
3276
- if (written.length > 0) {
3277
- const curationDeps = {
3278
- index: deps.index,
3279
- vectorIndex: deps.vectorIndex,
3280
- embeddingProvider: deps.embeddingProvider,
3281
- llmProvider: deps.llmProvider,
3282
- vaultDir: deps.vaultDir,
3283
- log: ((level, msg, data) => deps.logger[level]("curation", msg, data))
3823
+ // -------------------------------------------------------------------------
3824
+ // Stage transitions
3825
+ // -------------------------------------------------------------------------
3826
+ /**
3827
+ * Record a stage transition. Append-only — never updates existing rows.
3828
+ *
3829
+ * When status is 'failed': checks retry limits and may auto-poison.
3830
+ * When error_type is 'config': blocks all downstream stages.
3831
+ */
3832
+ advance(itemId, itemType, stage, status, error, output) {
3833
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3834
+ const priorCount = this.db.prepare(
3835
+ `SELECT COUNT(*) as cnt FROM stage_transitions
3836
+ WHERE work_item_id = ? AND item_type = ? AND stage = ?
3837
+ AND status IN ('failed', 'processing')`
3838
+ ).get(itemId, itemType, stage);
3839
+ const attempt = status === "failed" || status === "processing" ? Math.ceil((priorCount.cnt + 1) / 2) || 1 : priorCount.cnt > 0 ? Math.ceil(priorCount.cnt / 2) || 1 : 1;
3840
+ let resolvedStatus = status;
3841
+ if (status === "failed" && error?.errorType) {
3842
+ const maxRetries = error.errorType === "transient" ? this.config.retry.transient_max : PIPELINE_PARSE_MAX_RETRIES;
3843
+ const failedCount = this.db.prepare(
3844
+ `SELECT COUNT(*) as cnt FROM stage_transitions
3845
+ WHERE work_item_id = ? AND item_type = ? AND stage = ? AND status = 'failed'`
3846
+ ).get(itemId, itemType, stage);
3847
+ if (failedCount.cnt >= maxRetries) {
3848
+ resolvedStatus = "poisoned";
3849
+ }
3850
+ }
3851
+ const startedAt = status === "processing" ? now : null;
3852
+ const completedAt = ["succeeded", "failed", "poisoned", "blocked", "skipped"].includes(resolvedStatus) ? now : null;
3853
+ const outputJson = status === "succeeded" && output ? JSON.stringify(output) : null;
3854
+ this.db.prepare(
3855
+ `INSERT INTO stage_transitions
3856
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, output, created_at)
3857
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
3858
+ ).run(
3859
+ itemId,
3860
+ itemType,
3861
+ stage,
3862
+ resolvedStatus,
3863
+ attempt,
3864
+ error?.errorType ?? null,
3865
+ error?.errorMessage ?? null,
3866
+ startedAt,
3867
+ completedAt,
3868
+ outputJson,
3869
+ now
3870
+ );
3871
+ this.db.prepare("UPDATE work_items SET updated_at = ? WHERE id = ? AND item_type = ?").run(now, itemId, itemType);
3872
+ if (status === "failed" && error?.errorType === "config") {
3873
+ const stageIdx = PIPELINE_STAGES.indexOf(stage);
3874
+ if (stageIdx >= 0) {
3875
+ const downstreamStages = PIPELINE_STAGES.slice(stageIdx + 1);
3876
+ for (const downstream of downstreamStages) {
3877
+ const currentStatus = this.db.prepare(
3878
+ `SELECT status FROM stage_transitions
3879
+ WHERE work_item_id = ? AND item_type = ? AND stage = ?
3880
+ ORDER BY id DESC LIMIT 1`
3881
+ ).get(itemId, itemType, downstream);
3882
+ if (currentStatus && currentStatus.status === "pending") {
3883
+ this.db.prepare(
3884
+ `INSERT INTO stage_transitions
3885
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, created_at)
3886
+ VALUES (?, ?, ?, 'blocked', 1, 'config', ?, NULL, ?, ?)`
3887
+ ).run(
3888
+ itemId,
3889
+ itemType,
3890
+ downstream,
3891
+ `blocked by ${stage} config failure`,
3892
+ now,
3893
+ now
3894
+ );
3895
+ }
3896
+ }
3897
+ }
3898
+ }
3899
+ }
3900
+ // -------------------------------------------------------------------------
3901
+ // Status queries
3902
+ // -------------------------------------------------------------------------
3903
+ /**
3904
+ * Get current status for all stages of a work item.
3905
+ * Queries the pipeline_status view filtered by work_item_id and item_type.
3906
+ */
3907
+ getItemStatus(itemId, itemType) {
3908
+ return this.db.prepare(
3909
+ `SELECT stage, status, attempt, error_type, error_message, started_at, completed_at, output
3910
+ FROM pipeline_status
3911
+ WHERE id = ? AND item_type = ?`
3912
+ ).all(itemId, itemType);
3913
+ }
3914
+ // -------------------------------------------------------------------------
3915
+ // Batch queries
3916
+ // -------------------------------------------------------------------------
3917
+ /**
3918
+ * Get pending work items ready for processing at a given stage.
3919
+ *
3920
+ * Requirements:
3921
+ * - Only items where the requested stage is 'pending'
3922
+ * - Only items whose PREVIOUS stage (in PIPELINE_STAGES order) is 'succeeded' or 'skipped'
3923
+ * - Exclude items in backoff window
3924
+ * - Ordered by work_items.created_at ASC (oldest first)
3925
+ */
3926
+ nextBatch(stage, limit) {
3927
+ const stageIdx = PIPELINE_STAGES.indexOf(stage);
3928
+ const prevStage = stageIdx > 0 ? PIPELINE_STAGES[stageIdx - 1] : null;
3929
+ const upstreamCheck = prevStage ? `AND EXISTS (
3930
+ SELECT 1 FROM pipeline_status ps2
3931
+ WHERE ps2.id = ps.id AND ps2.item_type = ps.item_type
3932
+ AND ps2.stage = ? AND ps2.status IN ('succeeded', 'skipped')
3933
+ )` : "";
3934
+ const sql = `
3935
+ SELECT ps.id, ps.item_type, ps.source_path, wi.created_at
3936
+ FROM pipeline_status ps
3937
+ JOIN work_items wi ON wi.id = ps.id AND wi.item_type = ps.item_type
3938
+ WHERE ps.stage = ? AND ps.status IN ('pending', 'failed')
3939
+ ${upstreamCheck}
3940
+ AND NOT EXISTS (
3941
+ SELECT 1 FROM stage_transitions st2
3942
+ WHERE st2.work_item_id = ps.id AND st2.item_type = ps.item_type
3943
+ AND st2.stage = ? AND st2.status = 'failed'
3944
+ AND st2.completed_at IS NOT NULL
3945
+ AND (julianday('now') - julianday(st2.completed_at)) * 86400000 <
3946
+ ? * POWER(?, (
3947
+ SELECT COUNT(*) FROM stage_transitions st3
3948
+ WHERE st3.work_item_id = ps.id AND st3.item_type = ps.item_type
3949
+ AND st3.stage = ? AND st3.status = 'failed'
3950
+ ) - 1)
3951
+ )
3952
+ ORDER BY wi.created_at ASC
3953
+ LIMIT ?`;
3954
+ const params = [stage];
3955
+ if (prevStage) {
3956
+ params.push(prevStage);
3957
+ }
3958
+ params.push(stage, this.config.retry.backoff_base_seconds * 1e3, PIPELINE_BACKOFF_MULTIPLIER, stage, limit);
3959
+ return this.db.prepare(sql).all(...params);
3960
+ }
3961
+ // -------------------------------------------------------------------------
3962
+ // Circuit breakers
3963
+ // -------------------------------------------------------------------------
3964
+ /**
3965
+ * Get current state of a circuit breaker for the given provider role.
3966
+ * Returns the persisted row, or a synthetic default (closed, 0 failures)
3967
+ * if no row exists yet.
3968
+ */
3969
+ circuitState(providerRole) {
3970
+ const row = this.db.prepare("SELECT * FROM circuit_breakers WHERE provider_role = ?").get(providerRole);
3971
+ if (row) {
3972
+ return row;
3973
+ }
3974
+ return {
3975
+ provider_role: providerRole,
3976
+ state: "closed",
3977
+ failure_count: 0,
3978
+ last_failure: null,
3979
+ last_error: null,
3980
+ opens_at: null,
3981
+ updated_at: (/* @__PURE__ */ new Date()).toISOString()
3284
3982
  };
3285
- (async () => {
3286
- for (const note of written) {
3287
- try {
3288
- await checkSupersession(note.id, curationDeps);
3289
- } catch (err) {
3290
- deps.logger.debug("curation", "Supersession check failed", { id: note.id, error: err.message });
3983
+ }
3984
+ /**
3985
+ * Record a failure against a circuit breaker. Increments failure_count and
3986
+ * updates last_error / last_failure. If failure_count reaches the configured
3987
+ * failure_threshold, sets state to 'open' and calculates
3988
+ * opens_at = now + configured cooldown_seconds.
3989
+ */
3990
+ tripCircuit(providerRole, errorMessage) {
3991
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3992
+ const current = this.circuitState(providerRole);
3993
+ const newFailureCount = current.failure_count + 1;
3994
+ const shouldOpen = newFailureCount >= this.config.circuit_breaker.failure_threshold;
3995
+ const newState = shouldOpen ? "open" : "closed";
3996
+ const opensAt = shouldOpen ? new Date(Date.now() + this.config.circuit_breaker.cooldown_seconds * 1e3).toISOString() : null;
3997
+ this.db.prepare(
3998
+ `INSERT INTO circuit_breakers
3999
+ (provider_role, state, failure_count, last_failure, last_error, opens_at, updated_at)
4000
+ VALUES (?, ?, ?, ?, ?, ?, ?)
4001
+ ON CONFLICT(provider_role) DO UPDATE SET
4002
+ state = excluded.state,
4003
+ failure_count = excluded.failure_count,
4004
+ last_failure = excluded.last_failure,
4005
+ last_error = excluded.last_error,
4006
+ opens_at = excluded.opens_at,
4007
+ updated_at = excluded.updated_at`
4008
+ ).run(providerRole, newState, newFailureCount, now, errorMessage, opensAt, now);
4009
+ }
4010
+ /**
4011
+ * Re-open a circuit after a failed half-open probe with doubled cooldown.
4012
+ *
4013
+ * When a half-open probe fails, the circuit should re-open with a cooldown
4014
+ * of `previousCooldown * 2`, capped at the configured max_cooldown_seconds.
4015
+ * This implements exponential backoff for repeated probe failures.
4016
+ */
4017
+ reopenCircuit(providerRole, errorMessage) {
4018
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4019
+ const current = this.circuitState(providerRole);
4020
+ let previousCooldown = this.config.circuit_breaker.cooldown_seconds * 1e3;
4021
+ if (current.last_failure && current.opens_at) {
4022
+ const lastFailureMs = new Date(current.last_failure).getTime();
4023
+ const opensAtMs = new Date(current.opens_at).getTime();
4024
+ const storedCooldown = opensAtMs - lastFailureMs;
4025
+ if (storedCooldown > 0) {
4026
+ previousCooldown = storedCooldown;
4027
+ }
4028
+ }
4029
+ const doubledCooldown = Math.min(
4030
+ previousCooldown * 2,
4031
+ this.config.circuit_breaker.max_cooldown_seconds * 1e3
4032
+ );
4033
+ const opensAt = new Date(Date.now() + doubledCooldown).toISOString();
4034
+ this.db.prepare(
4035
+ `INSERT INTO circuit_breakers
4036
+ (provider_role, state, failure_count, last_failure, last_error, opens_at, updated_at)
4037
+ VALUES (?, 'open', ?, ?, ?, ?, ?)
4038
+ ON CONFLICT(provider_role) DO UPDATE SET
4039
+ state = 'open',
4040
+ failure_count = excluded.failure_count,
4041
+ last_failure = excluded.last_failure,
4042
+ last_error = excluded.last_error,
4043
+ opens_at = excluded.opens_at,
4044
+ updated_at = excluded.updated_at`
4045
+ ).run(providerRole, current.failure_count + 1, now, errorMessage, opensAt, now);
4046
+ }
4047
+ /**
4048
+ * Manually reset a circuit breaker to closed state.
4049
+ * Sets state='closed', failure_count=0, clears opens_at.
4050
+ */
4051
+ resetCircuit(providerRole) {
4052
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4053
+ this.db.prepare(
4054
+ `INSERT INTO circuit_breakers
4055
+ (provider_role, state, failure_count, last_failure, last_error, opens_at, updated_at)
4056
+ VALUES (?, 'closed', 0, NULL, NULL, NULL, ?)
4057
+ ON CONFLICT(provider_role) DO UPDATE SET
4058
+ state = 'closed',
4059
+ failure_count = 0,
4060
+ opens_at = NULL,
4061
+ updated_at = excluded.updated_at`
4062
+ ).run(providerRole, now);
4063
+ }
4064
+ /**
4065
+ * Check if an open circuit's cooldown has expired and is ready for a
4066
+ * half-open probe. If state is 'open' and current time >= opens_at,
4067
+ * transitions state to 'half-open' and returns true. Otherwise returns false.
4068
+ */
4069
+ probeCircuit(providerRole) {
4070
+ const current = this.circuitState(providerRole);
4071
+ if (current.state !== "open") {
4072
+ return false;
4073
+ }
4074
+ if (!current.opens_at) {
4075
+ return false;
4076
+ }
4077
+ const now = Date.now();
4078
+ const opensAt = new Date(current.opens_at).getTime();
4079
+ if (now < opensAt) {
4080
+ return false;
4081
+ }
4082
+ this.db.prepare(
4083
+ `UPDATE circuit_breakers
4084
+ SET state = 'half-open', updated_at = ?
4085
+ WHERE provider_role = ?`
4086
+ ).run((/* @__PURE__ */ new Date()).toISOString(), providerRole);
4087
+ return true;
4088
+ }
4089
+ /**
4090
+ * When a circuit opens, find all stages that use this provider role and
4091
+ * insert new 'blocked' transitions for all items that currently have
4092
+ * 'pending' status at those stages.
4093
+ *
4094
+ * Returns the count of blocked items.
4095
+ */
4096
+ blockItemsForCircuit(providerRole) {
4097
+ return this._transitionItemsForCircuit(providerRole, "pending", "blocked");
4098
+ }
4099
+ /**
4100
+ * When a circuit closes, find all stages that use this provider role and
4101
+ * insert new 'pending' transitions for all items that currently have
4102
+ * 'blocked' status at those stages.
4103
+ *
4104
+ * Returns the count of unblocked items.
4105
+ */
4106
+ unblockItemsForCircuit(providerRole) {
4107
+ return this._transitionItemsForCircuit(providerRole, "blocked", "pending");
4108
+ }
4109
+ /**
4110
+ * Shared implementation for blockItemsForCircuit / unblockItemsForCircuit.
4111
+ * Finds all stages mapped to providerRole, selects items at fromStatus,
4112
+ * and inserts a new transition at toStatus.
4113
+ */
4114
+ _transitionItemsForCircuit(providerRole, fromStatus, toStatus) {
4115
+ const affectedStages = Object.keys(STAGE_PROVIDER_MAP).filter((stage) => STAGE_PROVIDER_MAP[stage] === providerRole);
4116
+ if (affectedStages.length === 0) {
4117
+ return 0;
4118
+ }
4119
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4120
+ let transitionedCount = 0;
4121
+ const isBlocking = toStatus === "blocked";
4122
+ const doTransition = this.db.transaction(() => {
4123
+ for (const stage of affectedStages) {
4124
+ const items = this.db.prepare(
4125
+ `SELECT id, item_type FROM pipeline_status
4126
+ WHERE stage = ? AND status = ?`
4127
+ ).all(stage, fromStatus);
4128
+ for (const item of items) {
4129
+ if (isBlocking) {
4130
+ this.db.prepare(
4131
+ `INSERT INTO stage_transitions
4132
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, created_at)
4133
+ VALUES (?, ?, ?, 'blocked', 1, 'config', ?, NULL, ?, ?)`
4134
+ ).run(item.id, item.item_type, stage, `circuit open: ${providerRole}`, now, now);
4135
+ } else {
4136
+ this.db.prepare(
4137
+ `INSERT INTO stage_transitions
4138
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, created_at)
4139
+ VALUES (?, ?, ?, 'pending', 1, NULL, NULL, NULL, NULL, ?)`
4140
+ ).run(item.id, item.item_type, stage, now);
4141
+ }
4142
+ transitionedCount++;
4143
+ }
4144
+ }
4145
+ });
4146
+ doTransition();
4147
+ return transitionedCount;
4148
+ }
4149
+ // -------------------------------------------------------------------------
4150
+ // Compaction
4151
+ // -------------------------------------------------------------------------
4152
+ /**
4153
+ * Compact stage_transitions older than retentionDays into stage_history rows.
4154
+ *
4155
+ * For each (work_item_id, item_type, stage) group whose transitions are older
4156
+ * than the cutoff, inserts or replaces a stage_history row aggregating those
4157
+ * transitions, then deletes the original rows.
4158
+ *
4159
+ * Returns `{ compacted, deleted }`: compacted = number of groups written to
4160
+ * stage_history; deleted = number of transition rows removed.
4161
+ */
4162
+ compact(retentionDays = this.config.retention_days) {
4163
+ const cutoff = new Date(Date.now() - retentionDays * MS_PER_DAY).toISOString();
4164
+ const oldRows = this.db.prepare(
4165
+ `SELECT id, work_item_id, item_type, stage, status, attempt, error_type, output, created_at
4166
+ FROM stage_transitions
4167
+ WHERE created_at < ?
4168
+ ORDER BY id ASC`
4169
+ ).all(cutoff);
4170
+ if (oldRows.length === 0) {
4171
+ return { compacted: 0, deleted: 0 };
4172
+ }
4173
+ const groups = /* @__PURE__ */ new Map();
4174
+ for (const row of oldRows) {
4175
+ const key = `${row.work_item_id}\0${row.item_type}\0${row.stage}`;
4176
+ const existing = groups.get(key);
4177
+ if (existing) {
4178
+ existing.rows.push(row);
4179
+ } else {
4180
+ groups.set(key, { work_item_id: row.work_item_id, item_type: row.item_type, stage: row.stage, rows: [row] });
4181
+ }
4182
+ }
4183
+ const upsertHistory = this.db.prepare(
4184
+ `INSERT OR REPLACE INTO stage_history
4185
+ (work_item_id, item_type, stage, total_attempts, final_status, first_attempt, last_attempt, last_error, error_types, last_output)
4186
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
4187
+ );
4188
+ const deleteTransitions = this.db.prepare(
4189
+ "DELETE FROM stage_transitions WHERE work_item_id = ? AND item_type = ? AND stage = ? AND created_at < ?"
4190
+ );
4191
+ let compacted = 0;
4192
+ let deleted = 0;
4193
+ const doCompact = this.db.transaction(() => {
4194
+ for (const group of groups.values()) {
4195
+ const latestRow = group.rows[group.rows.length - 1];
4196
+ const earliestRow = group.rows[0];
4197
+ const errorTypeCounts = {};
4198
+ for (const row of group.rows) {
4199
+ if (row.error_type) {
4200
+ errorTypeCounts[row.error_type] = (errorTypeCounts[row.error_type] ?? 0) + 1;
4201
+ }
4202
+ }
4203
+ const lastErrorRow = this.db.prepare(
4204
+ `SELECT error_message FROM stage_transitions
4205
+ WHERE work_item_id = ? AND item_type = ? AND stage = ?
4206
+ ORDER BY id DESC LIMIT 1`
4207
+ ).get(group.work_item_id, group.item_type, group.stage);
4208
+ const lastSucceeded = group.rows.filter((r) => r.status === "succeeded").pop();
4209
+ const lastOutput = lastSucceeded?.output ?? null;
4210
+ upsertHistory.run(
4211
+ group.work_item_id,
4212
+ group.item_type,
4213
+ group.stage,
4214
+ group.rows.length,
4215
+ latestRow.status,
4216
+ earliestRow.created_at,
4217
+ latestRow.created_at,
4218
+ lastErrorRow?.error_message ?? null,
4219
+ JSON.stringify(errorTypeCounts),
4220
+ lastOutput
4221
+ );
4222
+ const deleteResult = deleteTransitions.run(
4223
+ group.work_item_id,
4224
+ group.item_type,
4225
+ group.stage,
4226
+ cutoff
4227
+ );
4228
+ compacted++;
4229
+ deleted += deleteResult.changes;
4230
+ }
4231
+ });
4232
+ doCompact();
4233
+ return { compacted, deleted };
4234
+ }
4235
+ // -------------------------------------------------------------------------
4236
+ // Recovery
4237
+ // -------------------------------------------------------------------------
4238
+ /**
4239
+ * Recover stuck items on daemon startup.
4240
+ *
4241
+ * Finds all items currently in 'processing' status (via the pipeline_status view)
4242
+ * and inserts a new 'pending' transition for each, effectively resetting them
4243
+ * for reprocessing.
4244
+ *
4245
+ * Returns the count of recovered items.
4246
+ */
4247
+ recoverStuck() {
4248
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4249
+ const stuckItems = this.db.prepare(
4250
+ `SELECT id, item_type, stage FROM pipeline_status WHERE status = 'processing'`
4251
+ ).all();
4252
+ if (stuckItems.length === 0) {
4253
+ return 0;
4254
+ }
4255
+ const insertPending = this.db.prepare(
4256
+ `INSERT INTO stage_transitions
4257
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, created_at)
4258
+ VALUES (?, ?, ?, 'pending', 1, NULL, NULL, NULL, NULL, ?)`
4259
+ );
4260
+ const doRecover = this.db.transaction(() => {
4261
+ for (const item of stuckItems) {
4262
+ insertPending.run(item.id, item.item_type, item.stage, now);
4263
+ }
4264
+ });
4265
+ doRecover();
4266
+ return stuckItems.length;
4267
+ }
4268
+ // -------------------------------------------------------------------------
4269
+ // API query helpers
4270
+ // -------------------------------------------------------------------------
4271
+ /**
4272
+ * List work items from the pipeline_status view with optional filters and pagination.
4273
+ *
4274
+ * Filters: stage, status, item_type. All optional.
4275
+ * Returns rows ordered by work_items.created_at DESC (newest first).
4276
+ */
4277
+ listItems(filters) {
4278
+ const conditions = [];
4279
+ const params = [];
4280
+ if (filters.stage) {
4281
+ conditions.push("ps.stage = ?");
4282
+ params.push(filters.stage);
4283
+ }
4284
+ if (filters.status) {
4285
+ conditions.push("ps.status = ?");
4286
+ params.push(filters.status);
4287
+ }
4288
+ if (filters.type) {
4289
+ conditions.push("ps.item_type = ?");
4290
+ params.push(filters.type);
4291
+ }
4292
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
4293
+ const countSql = `SELECT COUNT(*) as total FROM pipeline_status ps ${where}`;
4294
+ const countRow = this.db.prepare(countSql).get(...params);
4295
+ const limit = filters.limit ?? PIPELINE_ITEMS_DEFAULT_LIMIT;
4296
+ const offset = filters.offset ?? 0;
4297
+ const querySql = `
4298
+ SELECT ps.id, ps.item_type, ps.source_path, ps.stage, ps.status,
4299
+ ps.attempt, ps.error_type, ps.error_message, ps.started_at, ps.completed_at
4300
+ FROM pipeline_status ps
4301
+ JOIN work_items wi ON wi.id = ps.id AND wi.item_type = ps.item_type
4302
+ ${where}
4303
+ ORDER BY wi.created_at DESC
4304
+ LIMIT ? OFFSET ?
4305
+ `;
4306
+ const items = this.db.prepare(querySql).all(...params, limit, offset);
4307
+ return { items, total: countRow.total };
4308
+ }
4309
+ /**
4310
+ * Get the full transition history for a single work item.
4311
+ * Returns all stage_transitions rows ordered by id ASC (oldest first).
4312
+ */
4313
+ getTransitionHistory(itemId, itemType) {
4314
+ return this.db.prepare(
4315
+ `SELECT id, work_item_id, item_type, stage, status, attempt,
4316
+ error_type, error_message, started_at, completed_at, output, created_at
4317
+ FROM stage_transitions
4318
+ WHERE work_item_id = ? AND item_type = ?
4319
+ ORDER BY id ASC`
4320
+ ).all(itemId, itemType);
4321
+ }
4322
+ /**
4323
+ * Retry a single poisoned work item by inserting a new 'pending' transition
4324
+ * at the specified stage. Returns true if the item was poisoned and retried,
4325
+ * false if the item was not found or not poisoned at that stage.
4326
+ */
4327
+ retryItem(itemId, itemType, stage) {
4328
+ const current = this.db.prepare(
4329
+ `SELECT status FROM pipeline_status
4330
+ WHERE id = ? AND item_type = ? AND stage = ?`
4331
+ ).get(itemId, itemType, stage);
4332
+ if (!current || current.status !== "poisoned") {
4333
+ return false;
4334
+ }
4335
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4336
+ this.db.prepare(
4337
+ `INSERT INTO stage_transitions
4338
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, created_at)
4339
+ VALUES (?, ?, ?, 'pending', 1, NULL, NULL, NULL, NULL, ?)`
4340
+ ).run(itemId, itemType, stage, now);
4341
+ return true;
4342
+ }
4343
+ /**
4344
+ * Skip a poisoned or failed work item at the specified stage.
4345
+ * Inserts a 'skipped' transition so the item no longer counts as an error.
4346
+ * Returns true if the item was in an error state and was skipped.
4347
+ */
4348
+ skipItem(itemId, itemType, stage) {
4349
+ const current = this.db.prepare(
4350
+ `SELECT status FROM pipeline_status
4351
+ WHERE id = ? AND item_type = ? AND stage = ?`
4352
+ ).get(itemId, itemType, stage);
4353
+ if (!current || current.status !== "poisoned" && current.status !== "failed") {
4354
+ return false;
4355
+ }
4356
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4357
+ this.db.prepare(
4358
+ `INSERT INTO stage_transitions
4359
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, created_at)
4360
+ VALUES (?, ?, ?, 'skipped', 1, NULL, NULL, NULL, ?, ?)`
4361
+ ).run(itemId, itemType, stage, now, now);
4362
+ return true;
4363
+ }
4364
+ /**
4365
+ * Retry all poisoned items by inserting new 'pending' transitions.
4366
+ * Returns the count of items retried.
4367
+ */
4368
+ retryAllPoisoned() {
4369
+ const poisonedItems = this.db.prepare(
4370
+ `SELECT id, item_type, stage FROM pipeline_status WHERE status = 'poisoned'`
4371
+ ).all();
4372
+ if (poisonedItems.length === 0) {
4373
+ return 0;
4374
+ }
4375
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4376
+ const insertPending = this.db.prepare(
4377
+ `INSERT INTO stage_transitions
4378
+ (work_item_id, item_type, stage, status, attempt, error_type, error_message, started_at, completed_at, created_at)
4379
+ VALUES (?, ?, ?, 'pending', 1, NULL, NULL, NULL, NULL, ?)`
4380
+ );
4381
+ const doRetry = this.db.transaction(() => {
4382
+ for (const item of poisonedItems) {
4383
+ insertPending.run(item.id, item.item_type, item.stage, now);
4384
+ }
4385
+ });
4386
+ doRetry();
4387
+ return poisonedItems.length;
4388
+ }
4389
+ /**
4390
+ * List all circuit breaker rows from the database.
4391
+ */
4392
+ listCircuits() {
4393
+ return this.db.prepare("SELECT * FROM circuit_breakers ORDER BY provider_role ASC").all();
4394
+ }
4395
+ // -------------------------------------------------------------------------
4396
+ // Tick processing
4397
+ // -------------------------------------------------------------------------
4398
+ handlers = null;
4399
+ tickInProgress = false;
4400
+ tickLogger = null;
4401
+ /** Register stage handlers called by tick(). Must be set before tick() is useful. */
4402
+ setHandlers(handlers) {
4403
+ this.handlers = handlers;
4404
+ }
4405
+ /** Set a logger for tick diagnostics. */
4406
+ setLogger(logger) {
4407
+ this.tickLogger = logger;
4408
+ }
4409
+ /**
4410
+ * Process one tick of the pipeline: for each tick-processable stage
4411
+ * (extraction, embedding, consolidation), fetch a batch of pending items
4412
+ * and run the corresponding handler.
4413
+ *
4414
+ * Stages are processed sequentially; items within a batch run concurrently.
4415
+ *
4416
+ * Guarded by tickInProgress — if a tick is already running, returns immediately.
4417
+ */
4418
+ async tick(batchSize) {
4419
+ if (this.tickInProgress) {
4420
+ return;
4421
+ }
4422
+ if (!this.handlers) {
4423
+ return;
4424
+ }
4425
+ this.tickInProgress = true;
4426
+ try {
4427
+ for (const stage of PIPELINE_TICK_STAGES) {
4428
+ const providerRole = STAGE_PROVIDER_MAP[stage];
4429
+ if (providerRole) {
4430
+ const circuit = this.circuitState(providerRole);
4431
+ if (circuit.state === "open") {
4432
+ const canProbe = this.probeCircuit(providerRole);
4433
+ if (!canProbe) {
4434
+ const blocked = this.blockItemsForCircuit(providerRole);
4435
+ if (blocked > 0) {
4436
+ this.tickLogger?.("debug", "pipeline", `Circuit open for ${providerRole}, blocked ${blocked} items`, { stage, providerRole });
4437
+ }
4438
+ continue;
4439
+ }
4440
+ this.tickLogger?.("debug", "pipeline", `Circuit half-open probe for ${providerRole}`, { stage, providerRole });
4441
+ }
4442
+ }
4443
+ const batch = this.nextBatch(stage, batchSize);
4444
+ if (batch.length === 0) {
4445
+ continue;
3291
4446
  }
4447
+ const handler = this.handlers[stage];
4448
+ if (!handler) {
4449
+ continue;
4450
+ }
4451
+ await Promise.all(
4452
+ batch.map(async (item) => {
4453
+ this.advance(item.id, item.item_type, stage, "processing");
4454
+ try {
4455
+ const output = await handler(item.id, item.item_type, item.source_path);
4456
+ this.advance(item.id, item.item_type, stage, "succeeded", void 0, output ?? void 0);
4457
+ if (providerRole) {
4458
+ const circuitAfter = this.circuitState(providerRole);
4459
+ if (circuitAfter.state === "half-open") {
4460
+ this.resetCircuit(providerRole);
4461
+ const unblocked = this.unblockItemsForCircuit(providerRole);
4462
+ this.tickLogger?.("info", "pipeline", `Circuit closed after successful probe, unblocked ${unblocked} items`, { stage, providerRole });
4463
+ }
4464
+ }
4465
+ } catch (err) {
4466
+ const error = err instanceof Error ? err : new Error(String(err));
4467
+ const classified = classifyError(error);
4468
+ this.advance(item.id, item.item_type, stage, "failed", {
4469
+ errorType: classified.type,
4470
+ errorMessage: error.message
4471
+ });
4472
+ this.tickLogger?.("warn", "pipeline", `Stage handler failed: ${stage}`, {
4473
+ itemId: item.id,
4474
+ itemType: item.item_type,
4475
+ errorType: classified.type,
4476
+ error: error.message
4477
+ });
4478
+ if (classified.type === "config" && providerRole) {
4479
+ const circuitBefore = this.circuitState(providerRole);
4480
+ if (circuitBefore.state === "half-open") {
4481
+ this.reopenCircuit(providerRole, error.message);
4482
+ const blocked = this.blockItemsForCircuit(providerRole);
4483
+ this.tickLogger?.("warn", "pipeline", `Half-open probe failed for ${providerRole}, re-opened with doubled cooldown, blocked ${blocked} items`, { stage, providerRole });
4484
+ } else {
4485
+ this.tripCircuit(providerRole, error.message);
4486
+ const circuitAfter = this.circuitState(providerRole);
4487
+ if (circuitAfter.state === "open") {
4488
+ const blocked = this.blockItemsForCircuit(providerRole);
4489
+ this.tickLogger?.("warn", "pipeline", `Circuit opened for ${providerRole}, blocked ${blocked} items`, { stage, providerRole });
4490
+ }
4491
+ }
4492
+ }
4493
+ }
4494
+ })
4495
+ );
3292
4496
  }
3293
- })();
4497
+ } finally {
4498
+ this.tickInProgress = false;
4499
+ }
4500
+ }
4501
+ // -------------------------------------------------------------------------
4502
+ // Digest gating helpers
4503
+ // -------------------------------------------------------------------------
4504
+ /**
4505
+ * Check if any upstream stages (extraction, embedding, consolidation) have
4506
+ * active work items (pending, processing, or blocked — but not failed/poisoned).
4507
+ *
4508
+ * Used to gate the digest engine: digest should not run while upstream
4509
+ * stages still have work in flight.
4510
+ */
4511
+ hasUpstreamWork() {
4512
+ const active = this.db.prepare(
4513
+ `SELECT COUNT(*) as cnt FROM pipeline_status
4514
+ WHERE stage IN ('extraction', 'embedding', 'consolidation')
4515
+ AND status IN ('pending', 'processing', 'blocked')`
4516
+ ).get();
4517
+ if (active.cnt > 0) return true;
4518
+ const midPipeline = this.db.prepare(
4519
+ `SELECT COUNT(*) as cnt FROM pipeline_status ps1
4520
+ WHERE ps1.stage = 'digest' AND ps1.status = 'pending'
4521
+ AND EXISTS (
4522
+ SELECT 1 FROM pipeline_status ps2
4523
+ WHERE ps2.id = ps1.id AND ps2.item_type = ps1.item_type
4524
+ AND ps2.stage != 'digest'
4525
+ AND ps2.status NOT IN ('succeeded', 'skipped', 'failed', 'poisoned')
4526
+ )`
4527
+ ).get();
4528
+ return midPipeline.cnt > 0;
4529
+ }
4530
+ /**
4531
+ * Count items at digest:pending where ALL other stages for that item
4532
+ * are in a terminal state (succeeded, skipped, failed, or poisoned).
4533
+ *
4534
+ * Used to gate digest: only run when enough processed substrate has accumulated.
4535
+ */
4536
+ newSubstrateSinceLastDigest() {
4537
+ const result = this.db.prepare(
4538
+ `SELECT COUNT(*) as cnt FROM pipeline_status ps1
4539
+ WHERE ps1.stage = 'digest' AND ps1.status = 'pending'
4540
+ AND NOT EXISTS (
4541
+ SELECT 1 FROM pipeline_status ps2
4542
+ WHERE ps2.id = ps1.id AND ps2.item_type = ps1.item_type
4543
+ AND ps2.stage != 'digest'
4544
+ AND ps2.status NOT IN ('succeeded', 'skipped', 'failed', 'poisoned')
4545
+ )`
4546
+ ).get();
4547
+ return result.cnt;
4548
+ }
4549
+ /**
4550
+ * Mark all digest:pending items as digest:succeeded after a successful cycle.
4551
+ * Returns the number of items advanced.
4552
+ */
4553
+ advanceDigestItems(output) {
4554
+ const items = this.db.prepare(
4555
+ `SELECT id, item_type FROM pipeline_status
4556
+ WHERE stage = 'digest' AND status = 'pending'`
4557
+ ).all();
4558
+ const advanceAll = this.db.transaction(() => {
4559
+ for (const item of items) {
4560
+ this.advance(item.id, item.item_type, "digest", "succeeded", void 0, output);
4561
+ }
4562
+ });
4563
+ advanceAll();
4564
+ return items.length;
4565
+ }
4566
+ // -------------------------------------------------------------------------
4567
+ // Rebuild from vault
4568
+ // -------------------------------------------------------------------------
4569
+ /**
4570
+ * Walk the vault and infer pipeline stage completion from existing data.
4571
+ *
4572
+ * Used for first-run migration: when pipeline.db is empty but the vault
4573
+ * already has session, spore, and artifact files from prior processing.
4574
+ *
4575
+ * Algorithm:
4576
+ * 1. Walk sessions/, spores/, artifacts/ directories for .md files
4577
+ * 2. Register each as a work item with inferred stage statuses
4578
+ * 3. Check vector index for embedding status
4579
+ * 4. Check digest trace for digest status
4580
+ * 5. Infer extraction status for sessions (check if spores reference them)
4581
+ * 6. Mark spore consolidation as pending (cannot reliably infer)
4582
+ */
4583
+ rebuild(vaultDir, vectorIndex, digestTracePath) {
4584
+ const digestedIds = this.loadDigestedIds(digestTracePath);
4585
+ const sporeSessionIds = this.collectSporeSessionIds(vaultDir);
4586
+ const stages = {};
4587
+ let registered = 0;
4588
+ const bumpStage = (key) => {
4589
+ stages[key] = (stages[key] ?? 0) + 1;
4590
+ };
4591
+ const runRebuild2 = this.db.transaction(() => {
4592
+ const sessionsDir = path10.join(vaultDir, "sessions");
4593
+ for (const filePath of walkMarkdownFiles(sessionsDir)) {
4594
+ const filename = path10.basename(filePath, ".md");
4595
+ const itemId = filename.startsWith("session-") ? filename.slice("session-".length) : filename;
4596
+ const relativePath = path10.relative(vaultDir, filePath);
4597
+ this.register(itemId, "session", relativePath);
4598
+ registered++;
4599
+ this.advance(itemId, "session", "capture", "succeeded");
4600
+ bumpStage("capture:succeeded");
4601
+ if (sporeSessionIds.has(itemId) || sporeSessionIds.has(`session-${itemId}`)) {
4602
+ this.advance(itemId, "session", "extraction", "succeeded");
4603
+ bumpStage("extraction:succeeded");
4604
+ } else {
4605
+ bumpStage("extraction:pending");
4606
+ }
4607
+ if (vectorIndex?.has(itemId) || vectorIndex?.has(`session-${itemId}`)) {
4608
+ this.advance(itemId, "session", "embedding", "succeeded");
4609
+ bumpStage("embedding:succeeded");
4610
+ } else {
4611
+ bumpStage("embedding:pending");
4612
+ }
4613
+ if (digestedIds.has(itemId) || digestedIds.has(`session-${itemId}`)) {
4614
+ this.advance(itemId, "session", "digest", "succeeded");
4615
+ bumpStage("digest:succeeded");
4616
+ } else {
4617
+ bumpStage("digest:pending");
4618
+ }
4619
+ }
4620
+ const sporesDir = path10.join(vaultDir, "spores");
4621
+ for (const filePath of walkMarkdownFiles(sporesDir)) {
4622
+ const itemId = path10.basename(filePath, ".md");
4623
+ const relativePath = path10.relative(vaultDir, filePath);
4624
+ this.register(itemId, "spore", relativePath);
4625
+ registered++;
4626
+ this.advance(itemId, "spore", "capture", "succeeded");
4627
+ bumpStage("capture:succeeded");
4628
+ if (vectorIndex?.has(itemId)) {
4629
+ this.advance(itemId, "spore", "embedding", "succeeded");
4630
+ bumpStage("embedding:succeeded");
4631
+ } else {
4632
+ bumpStage("embedding:pending");
4633
+ }
4634
+ bumpStage("consolidation:pending");
4635
+ if (digestedIds.has(itemId)) {
4636
+ this.advance(itemId, "spore", "digest", "succeeded");
4637
+ bumpStage("digest:succeeded");
4638
+ } else {
4639
+ bumpStage("digest:pending");
4640
+ }
4641
+ }
4642
+ const artifactsDir = path10.join(vaultDir, "artifacts");
4643
+ for (const filePath of walkMarkdownFiles(artifactsDir)) {
4644
+ const itemId = path10.basename(filePath, ".md");
4645
+ const relativePath = path10.relative(vaultDir, filePath);
4646
+ this.register(itemId, "artifact", relativePath);
4647
+ registered++;
4648
+ this.advance(itemId, "artifact", "capture", "succeeded");
4649
+ bumpStage("capture:succeeded");
4650
+ if (vectorIndex?.has(itemId)) {
4651
+ this.advance(itemId, "artifact", "embedding", "succeeded");
4652
+ bumpStage("embedding:succeeded");
4653
+ } else {
4654
+ bumpStage("embedding:pending");
4655
+ }
4656
+ if (digestedIds.has(itemId)) {
4657
+ this.advance(itemId, "artifact", "digest", "succeeded");
4658
+ bumpStage("digest:succeeded");
4659
+ } else {
4660
+ bumpStage("digest:pending");
4661
+ }
4662
+ }
4663
+ });
4664
+ runRebuild2();
4665
+ return { registered, stages };
4666
+ }
4667
+ /**
4668
+ * Read digest trace JSONL and collect all note IDs that appear in any
4669
+ * substrate array. Returns an empty Set if the trace file doesn't exist.
4670
+ */
4671
+ loadDigestedIds(tracePath) {
4672
+ const ids = /* @__PURE__ */ new Set();
4673
+ if (!tracePath) return ids;
4674
+ let content;
4675
+ try {
4676
+ content = fs7.readFileSync(tracePath, "utf-8").trim();
4677
+ } catch {
4678
+ return ids;
4679
+ }
4680
+ if (!content) return ids;
4681
+ for (const line of content.split("\n")) {
4682
+ try {
4683
+ const record = JSON.parse(line);
4684
+ if (record.substrate) {
4685
+ for (const arr of Object.values(record.substrate)) {
4686
+ if (Array.isArray(arr)) {
4687
+ for (const id of arr) {
4688
+ ids.add(id);
4689
+ }
4690
+ }
4691
+ }
4692
+ }
4693
+ } catch {
4694
+ }
4695
+ }
4696
+ return ids;
4697
+ }
4698
+ /**
4699
+ * Walk spore files and collect session IDs they reference.
4700
+ * Used to infer whether extraction has been completed for a session.
4701
+ */
4702
+ collectSporeSessionIds(vaultDir) {
4703
+ const sessionIds = /* @__PURE__ */ new Set();
4704
+ const sporesDir = path10.join(vaultDir, "spores");
4705
+ for (const filePath of walkMarkdownFiles(sporesDir)) {
4706
+ try {
4707
+ const content = fs7.readFileSync(filePath, "utf-8");
4708
+ const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
4709
+ if (!fmMatch) continue;
4710
+ const sessionMatch = fmMatch[1].match(/^session:\s*(?:"\[\[)?([^"\]]+)/m);
4711
+ if (sessionMatch) {
4712
+ const rawSession = sessionMatch[1].trim();
4713
+ sessionIds.add(rawSession);
4714
+ if (rawSession.startsWith("session-")) {
4715
+ sessionIds.add(rawSession.slice("session-".length));
4716
+ }
4717
+ }
4718
+ } catch {
4719
+ }
4720
+ }
4721
+ return sessionIds;
4722
+ }
4723
+ /** Close the database connection. */
4724
+ close() {
4725
+ this.db.close();
4726
+ }
4727
+ };
4728
+
4729
+ // src/daemon/main.ts
4730
+ var import_yaml2 = __toESM(require_dist(), 1);
4731
+ import fs8 from "fs";
4732
+ import path11 from "path";
4733
+ function indexAndEmbed(relativePath, noteId, embeddingText, metadata, deps) {
4734
+ indexNote(deps.index, deps.vaultDir, relativePath);
4735
+ if (deps.vectorIndex && embeddingText) {
4736
+ generateEmbedding(deps.embeddingProvider, embeddingText.slice(0, EMBEDDING_INPUT_LIMIT)).then((emb) => deps.vectorIndex.upsert(noteId, emb.embedding, metadata)).catch((err) => deps.logger.debug("embeddings", "Embedding failed", { id: noteId, error: err.message }));
3294
4737
  }
3295
4738
  }
3296
4739
  async function captureArtifacts(candidates, classified, sessionId, deps, lineage) {
@@ -3325,28 +4768,28 @@ ${candidate.content}`,
3325
4768
  }
3326
4769
  }
3327
4770
  function migrateSporeFiles(vaultDir) {
3328
- const sporesDir = path9.join(vaultDir, "spores");
3329
- if (!fs6.existsSync(sporesDir)) return 0;
4771
+ const sporesDir = path11.join(vaultDir, "spores");
4772
+ if (!fs8.existsSync(sporesDir)) return 0;
3330
4773
  let moved = 0;
3331
- const entries = fs6.readdirSync(sporesDir);
4774
+ const entries = fs8.readdirSync(sporesDir);
3332
4775
  for (const entry of entries) {
3333
- const fullPath = path9.join(sporesDir, entry);
4776
+ const fullPath = path11.join(sporesDir, entry);
3334
4777
  if (!entry.endsWith(".md")) continue;
3335
- if (fs6.statSync(fullPath).isDirectory()) continue;
4778
+ if (fs8.statSync(fullPath).isDirectory()) continue;
3336
4779
  try {
3337
- const content = fs6.readFileSync(fullPath, "utf-8");
4780
+ const content = fs8.readFileSync(fullPath, "utf-8");
3338
4781
  const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
3339
4782
  if (!fmMatch) continue;
3340
- const parsed = import_yaml.default.parse(fmMatch[1]);
4783
+ const parsed = import_yaml2.default.parse(fmMatch[1]);
3341
4784
  const obsType = parsed.observation_type;
3342
4785
  if (!obsType) continue;
3343
4786
  const normalizedType = obsType.replace(/_/g, "-");
3344
- const targetDir = path9.join(sporesDir, normalizedType);
3345
- fs6.mkdirSync(targetDir, { recursive: true });
3346
- const targetPath = path9.join(targetDir, entry);
3347
- fs6.renameSync(fullPath, targetPath);
4787
+ const targetDir = path11.join(sporesDir, normalizedType);
4788
+ fs8.mkdirSync(targetDir, { recursive: true });
4789
+ const targetPath = path11.join(targetDir, entry);
4790
+ fs8.renameSync(fullPath, targetPath);
3348
4791
  const now = /* @__PURE__ */ new Date();
3349
- fs6.utimesSync(targetPath, now, now);
4792
+ fs8.utimesSync(targetPath, now, now);
3350
4793
  moved++;
3351
4794
  } catch {
3352
4795
  }
@@ -3359,22 +4802,28 @@ async function main() {
3359
4802
  process.stderr.write("Usage: mycod --vault <path>\n");
3360
4803
  process.exit(1);
3361
4804
  }
3362
- const vaultDir = path9.resolve(vaultArg);
4805
+ const vaultDir = path11.resolve(vaultArg);
3363
4806
  const config = loadConfig(vaultDir);
3364
- const logger = new DaemonLogger(path9.join(vaultDir, "logs"), {
4807
+ const logger = new DaemonLogger(path11.join(vaultDir, "logs"), {
3365
4808
  level: config.daemon.log_level,
3366
4809
  maxSize: config.daemon.max_log_size
3367
4810
  });
4811
+ logger.info("daemon", "Config loaded", {
4812
+ vault: vaultDir,
4813
+ digest_enabled: config.digest.enabled,
4814
+ intelligence_provider: config.intelligence.llm.provider,
4815
+ embedding_provider: config.intelligence.embedding.provider
4816
+ });
3368
4817
  let uiDir = null;
3369
4818
  {
3370
- let dir = path9.dirname(new URL(import.meta.url).pathname);
4819
+ let dir = path11.dirname(new URL(import.meta.url).pathname);
3371
4820
  for (let i = 0; i < 5; i++) {
3372
- const candidate = path9.join(dir, "dist", "ui");
3373
- if (fs6.existsSync(path9.join(dir, "package.json")) && fs6.existsSync(candidate)) {
4821
+ const candidate = path11.join(dir, "dist", "ui");
4822
+ if (fs8.existsSync(path11.join(dir, "package.json")) && fs8.existsSync(candidate)) {
3374
4823
  uiDir = candidate;
3375
4824
  break;
3376
4825
  }
3377
- dir = path9.dirname(dir);
4826
+ dir = path11.dirname(dir);
3378
4827
  }
3379
4828
  }
3380
4829
  if (uiDir) {
@@ -3382,16 +4831,8 @@ async function main() {
3382
4831
  }
3383
4832
  const server = new DaemonServer({ vaultDir, logger, uiDir: uiDir ?? void 0 });
3384
4833
  const registry = new SessionRegistry({
3385
- gracePeriod: config.daemon.grace_period,
3386
- onEmpty: async () => {
3387
- logger.info("daemon", "Grace period expired, shutting down");
3388
- metabolism?.stop();
3389
- planWatcher.stopFileWatcher();
3390
- await server.stop();
3391
- vectorIndex?.close();
3392
- index.close();
3393
- logger.close();
3394
- process.exit(0);
4834
+ gracePeriod: 0,
4835
+ onEmpty: () => {
3395
4836
  }
3396
4837
  });
3397
4838
  const llmProvider = createLlmProvider(config.intelligence.llm);
@@ -3399,15 +4840,198 @@ async function main() {
3399
4840
  let vectorIndex = null;
3400
4841
  try {
3401
4842
  const testEmbed = await embeddingProvider.embed("test");
3402
- vectorIndex = new VectorIndex(path9.join(vaultDir, "vectors.db"), testEmbed.dimensions);
4843
+ vectorIndex = new VectorIndex(path11.join(vaultDir, "vectors.db"), testEmbed.dimensions);
3403
4844
  logger.info("embeddings", "Vector index initialized", { dimensions: testEmbed.dimensions });
3404
4845
  } catch (error) {
3405
4846
  logger.warn("embeddings", "Vector index unavailable", { error: error.message });
3406
4847
  }
3407
4848
  const processor = new BufferProcessor(llmProvider, config.intelligence.llm.context_window, config.capture);
3408
4849
  const vault = new VaultWriter(vaultDir);
3409
- const index = new MycoIndex(path9.join(vaultDir, "index.db"));
4850
+ const index = new MycoIndex(path11.join(vaultDir, "index.db"));
3410
4851
  const lineageGraph = new LineageGraph(vaultDir);
4852
+ const pipeline = new PipelineManager(vaultDir, config.pipeline);
4853
+ const recoveredCount = pipeline.recoverStuck();
4854
+ if (recoveredCount > 0) {
4855
+ logger.info("pipeline", "Recovered stuck pipeline items", { count: recoveredCount });
4856
+ }
4857
+ if (pipeline.isEmpty()) {
4858
+ logger.info("pipeline", "First-run migration: rebuilding pipeline from vault");
4859
+ const result = pipeline.rebuild(vaultDir, vectorIndex, path11.join(vaultDir, "digest", "trace.jsonl"));
4860
+ logger.info("pipeline", "Pipeline rebuild complete", { registered: result.registered, stages: result.stages });
4861
+ }
4862
+ let consolidationEngine = null;
4863
+ let consolidationPassRanThisTick = false;
4864
+ pipeline.setHandlers({
4865
+ extraction: async (itemId, itemType, sourcePath) => {
4866
+ if (itemType !== "session") return;
4867
+ logger.info("pipeline", "Extraction started", { session_id: itemId });
4868
+ const fullPath = sourcePath ? path11.join(vaultDir, sourcePath) : null;
4869
+ if (!fullPath || !fs8.existsSync(fullPath)) {
4870
+ throw new Error(`Session note not found: ${sourcePath}`);
4871
+ }
4872
+ const fileContent = fs8.readFileSync(fullPath, "utf-8");
4873
+ const { body, frontmatter } = stripFrontmatter(fileContent);
4874
+ const conversationMarkdown = extractSection(body, CONVERSATION_HEADING);
4875
+ const user = typeof frontmatter.user === "string" && frontmatter.user ? frontmatter.user : void 0;
4876
+ if (!conversationMarkdown.trim()) {
4877
+ throw new Error(`No conversation content in session note: ${sourcePath}`);
4878
+ }
4879
+ const extractionResult = await processor.process(conversationMarkdown, itemId);
4880
+ if (extractionResult.degraded) {
4881
+ throw new Error(`Observation extraction failed for session ${itemId}`);
4882
+ }
4883
+ const { summary: narrative, title } = await processor.summarizeSession(
4884
+ conversationMarkdown,
4885
+ itemId,
4886
+ user
4887
+ );
4888
+ if (narrative.includes(SUMMARIZATION_FAILED_MARKER)) {
4889
+ throw new Error(`Summarization failed for session ${itemId}: ${narrative}`);
4890
+ }
4891
+ const written = writeObservationNotes(
4892
+ extractionResult.observations,
4893
+ itemId,
4894
+ vault,
4895
+ index,
4896
+ vaultDir
4897
+ );
4898
+ for (const note of written) {
4899
+ indexAndEmbed(
4900
+ note.path,
4901
+ note.id,
4902
+ `${note.observation.title}
4903
+ ${note.observation.content}`,
4904
+ { type: "spore", importance: "high", session_id: itemId },
4905
+ indexDeps
4906
+ );
4907
+ logger.info("pipeline", "Spore written", { type: note.observation.type, title: note.observation.title, session_id: itemId });
4908
+ }
4909
+ const fmEnd = fileContent.indexOf("---", 4);
4910
+ const parsedFm = import_yaml2.default.parse(fileContent.slice(4, fmEnd));
4911
+ parsedFm.observations_count = written.length;
4912
+ parsedFm.summary_tokens = estimateTokens(narrative);
4913
+ parsedFm.extraction_model = config.intelligence.llm.model;
4914
+ const fmYaml = import_yaml2.default.stringify(parsedFm, { defaultStringType: "QUOTE_DOUBLE", defaultKeyType: "PLAIN" }).trim();
4915
+ const updatedBody = updateTitleAndSummary(body, title, narrative);
4916
+ fs8.writeFileSync(fullPath, `---
4917
+ ${fmYaml}
4918
+ ---
4919
+
4920
+ ${updatedBody}`, "utf-8");
4921
+ indexNote(index, vaultDir, sourcePath);
4922
+ for (const note of written) {
4923
+ pipeline.register(note.id, "spore", note.path);
4924
+ pipeline.advance(note.id, "spore", "capture", "succeeded");
4925
+ }
4926
+ logger.info("pipeline", "Extraction completed", {
4927
+ session_id: itemId,
4928
+ observations: written.length,
4929
+ title
4930
+ });
4931
+ logger.debug("pipeline", "Extraction detail", {
4932
+ session_id: itemId,
4933
+ types: written.map((n) => n.observation.type)
4934
+ });
4935
+ return {
4936
+ observations: written.length,
4937
+ observation_ids: written.map((n) => n.id),
4938
+ summary_tokens: estimateTokens(narrative),
4939
+ title,
4940
+ model: config.intelligence.llm.model
4941
+ };
4942
+ },
4943
+ embedding: async (itemId, itemType, sourcePath) => {
4944
+ if (!vectorIndex || !embeddingProvider) {
4945
+ throw new Error("Embedding provider or vector index not available");
4946
+ }
4947
+ if (!sourcePath) {
4948
+ throw new Error(`No source path for ${itemType}/${itemId}`);
4949
+ }
4950
+ const fullPath = path11.join(vaultDir, sourcePath);
4951
+ if (!fs8.existsSync(fullPath)) {
4952
+ throw new Error(`Vault note not found: ${sourcePath}`);
4953
+ }
4954
+ const fileContent = fs8.readFileSync(fullPath, "utf-8");
4955
+ const { body, frontmatter } = stripFrontmatter(fileContent);
4956
+ let embeddableText;
4957
+ if (itemType === "session") {
4958
+ const title = typeof frontmatter.title === "string" ? frontmatter.title : "";
4959
+ const summary = typeof frontmatter.summary === "string" ? frontmatter.summary : "";
4960
+ const calloutMatch = body.match(/> \[!abstract\] Summary\n((?:> .*\n?)*)/);
4961
+ const narrative = calloutMatch ? calloutMatch[1].replace(/^> /gm, "").trim() : "";
4962
+ embeddableText = `${title}
4963
+ ${narrative || summary}`.trim();
4964
+ } else {
4965
+ const titleMatch = body.match(/^#\s+(.+)$/m);
4966
+ const title = titleMatch ? titleMatch[1] : "";
4967
+ embeddableText = `${title}
4968
+ ${body}`.trim();
4969
+ }
4970
+ if (!embeddableText) {
4971
+ logger.debug("pipeline", "No embeddable content, skipping", { id: itemId, type: itemType });
4972
+ return;
4973
+ }
4974
+ const embeddingStart = Date.now();
4975
+ const result = await generateEmbedding(
4976
+ embeddingProvider,
4977
+ embeddableText.slice(0, EMBEDDING_INPUT_LIMIT)
4978
+ );
4979
+ const metadata = {
4980
+ type: itemType,
4981
+ file_path: sourcePath,
4982
+ session_id: typeof frontmatter.session === "string" ? frontmatter.session : typeof frontmatter.id === "string" && itemType === "session" ? frontmatter.id : "",
4983
+ branch: typeof frontmatter.branch === "string" ? frontmatter.branch : ""
4984
+ };
4985
+ vectorIndex.upsert(itemId, result.embedding, metadata);
4986
+ if (itemType === "session" && sourcePath) {
4987
+ vault.updateNoteFrontmatter(sourcePath, {
4988
+ embedding_model: config.intelligence.embedding.model
4989
+ }, true);
4990
+ }
4991
+ logger.info("pipeline", "Embedding stored", {
4992
+ id: itemId,
4993
+ type: itemType,
4994
+ dimensions: result.dimensions,
4995
+ duration_ms: Date.now() - embeddingStart
4996
+ });
4997
+ return {
4998
+ model: result.model,
4999
+ dimensions: result.dimensions
5000
+ };
5001
+ },
5002
+ consolidation: async (itemId, itemType) => {
5003
+ if (itemType !== "spore") return;
5004
+ const supersededIds = await checkSupersession(itemId, {
5005
+ index,
5006
+ vectorIndex,
5007
+ embeddingProvider,
5008
+ llmProvider,
5009
+ vaultDir,
5010
+ log: ((level, msg, data) => logger[level]("curation", msg, data))
5011
+ });
5012
+ if (consolidationEngine && !consolidationPassRanThisTick) {
5013
+ consolidationPassRanThisTick = true;
5014
+ await consolidationEngine.runPass();
5015
+ }
5016
+ return {
5017
+ superseded: supersededIds ?? [],
5018
+ wisdom_created: null,
5019
+ candidates_evaluated: 0
5020
+ };
5021
+ }
5022
+ });
5023
+ pipeline.setLogger((level, domain, message, data) => {
5024
+ const fn = logger[level];
5025
+ if (typeof fn === "function") fn.call(logger, domain, message, data);
5026
+ });
5027
+ const pipelineTickTimer = setInterval(async () => {
5028
+ try {
5029
+ consolidationPassRanThisTick = false;
5030
+ await pipeline.tick(config.pipeline.batch_size);
5031
+ } catch (err) {
5032
+ logger.error("pipeline", "Pipeline tick failed", { error: err.message });
5033
+ }
5034
+ }, config.pipeline.tick_interval_seconds * 1e3);
3411
5035
  const transcriptMiner = new TranscriptMiner({
3412
5036
  additionalAdapters: config.capture.transcript_paths.map(
3413
5037
  (p) => createPerProjectAdapter(p, claudeCodeAdapter.parseTurns)
@@ -3415,21 +5039,28 @@ async function main() {
3415
5039
  });
3416
5040
  let activeStopProcessing = null;
3417
5041
  const indexDeps = { index, vaultDir, vectorIndex, embeddingProvider, llmProvider, logger };
3418
- const bufferDir = path9.join(vaultDir, "buffer");
5042
+ const bufferDir = path11.join(vaultDir, "buffer");
3419
5043
  const sessionBuffers = /* @__PURE__ */ new Map();
3420
5044
  const sessionFilePaths = /* @__PURE__ */ new Map();
3421
5045
  const capturedArtifactPaths = /* @__PURE__ */ new Map();
3422
- if (fs6.existsSync(bufferDir)) {
5046
+ let startupCleanedCount = 0;
5047
+ if (fs8.existsSync(bufferDir)) {
3423
5048
  const cutoff = Date.now() - STALE_BUFFER_MAX_AGE_MS;
3424
- for (const file of fs6.readdirSync(bufferDir)) {
3425
- const filePath = path9.join(bufferDir, file);
3426
- const stat4 = fs6.statSync(filePath);
5049
+ for (const file of fs8.readdirSync(bufferDir)) {
5050
+ const filePath = path11.join(bufferDir, file);
5051
+ const stat4 = fs8.statSync(filePath);
3427
5052
  if (stat4.mtimeMs < cutoff) {
3428
- fs6.unlinkSync(filePath);
5053
+ fs8.unlinkSync(filePath);
5054
+ startupCleanedCount++;
3429
5055
  logger.debug("daemon", "Cleaned stale buffer", { file });
3430
5056
  }
3431
5057
  }
3432
5058
  }
5059
+ if (startupCleanedCount > 0) {
5060
+ logger.info("daemon", "Buffer cleanup complete", {
5061
+ stale_removed: startupCleanedCount
5062
+ });
5063
+ }
3433
5064
  const needsMigrationReindex = index.query({ type: "memory" }).length > 0;
3434
5065
  const migrated = migrateSporeFiles(vaultDir);
3435
5066
  if (migrated > 0) {
@@ -3462,10 +5093,10 @@ async function main() {
3462
5093
  logger.info("watcher", "Plan detected", { source: event.source, file: event.filePath });
3463
5094
  if (event.filePath) {
3464
5095
  try {
3465
- const content = fs6.readFileSync(event.filePath, "utf-8");
3466
- const relativePath = path9.relative(vaultDir, event.filePath);
3467
- const title = content.match(/^#\s+(.+)$/m)?.[1] ?? path9.basename(event.filePath);
3468
- const planId = `plan-${path9.basename(event.filePath, ".md")}`;
5096
+ const content = fs8.readFileSync(event.filePath, "utf-8");
5097
+ const relativePath = path11.relative(vaultDir, event.filePath);
5098
+ const title = content.match(/^#\s+(.+)$/m)?.[1] ?? path11.basename(event.filePath);
5099
+ const planId = `plan-${path11.basename(event.filePath, ".md")}`;
3469
5100
  indexAndEmbed(
3470
5101
  relativePath,
3471
5102
  planId,
@@ -3486,16 +5117,17 @@ ${content}`,
3486
5117
  }
3487
5118
  });
3488
5119
  planWatcher.startFileWatcher();
5120
+ const digestLlmConfig = {
5121
+ provider: config.digest.intelligence.provider ?? config.intelligence.llm.provider,
5122
+ model: config.digest.intelligence.model ?? config.intelligence.llm.model,
5123
+ base_url: config.digest.intelligence.base_url ?? config.intelligence.llm.base_url,
5124
+ context_window: config.digest.intelligence.context_window
5125
+ };
5126
+ const digestLlm = config.digest.intelligence.model || config.digest.intelligence.provider ? createLlmProvider(digestLlmConfig) : llmProvider;
3489
5127
  let metabolism = null;
5128
+ let triggerForceDigest;
3490
5129
  if (config.digest.enabled) {
3491
- const digestLlmConfig = {
3492
- provider: config.digest.intelligence.provider ?? config.intelligence.llm.provider,
3493
- model: config.digest.intelligence.model ?? config.intelligence.llm.model,
3494
- base_url: config.digest.intelligence.base_url ?? config.intelligence.llm.base_url,
3495
- context_window: config.digest.intelligence.context_window
3496
- };
3497
5130
  logger.debug("digest", "Digest LLM config", digestLlmConfig);
3498
- const digestLlm = config.digest.intelligence.model || config.digest.intelligence.provider ? createLlmProvider(digestLlmConfig) : llmProvider;
3499
5131
  logger.debug("digest", `Using ${digestLlm.name} provider for digest`);
3500
5132
  const digestEngine = new DigestEngine({
3501
5133
  vaultDir,
@@ -3505,7 +5137,7 @@ ${content}`,
3505
5137
  log: (level, message, data) => logger[level]("digest", message, data)
3506
5138
  });
3507
5139
  if (config.digest.consolidation.enabled) {
3508
- const consolidationEngine = new ConsolidationEngine({
5140
+ consolidationEngine = new ConsolidationEngine({
3509
5141
  vaultDir,
3510
5142
  index,
3511
5143
  vectorIndex,
@@ -3514,55 +5146,77 @@ ${content}`,
3514
5146
  maxTokens: config.digest.consolidation.max_tokens,
3515
5147
  log: (level, message, data) => logger[level]("consolidation", message, data)
3516
5148
  });
3517
- digestEngine.registerPrePass("consolidation", async () => {
3518
- const result = await consolidationEngine.runPass();
3519
- if (result && result.consolidated > 0) {
3520
- logger.info("consolidation", `Consolidation pass: ${result.consolidated} wisdom notes, ${result.sporesSuperseded} spores superseded`);
3521
- }
3522
- });
3523
- logger.info("consolidation", "Auto-consolidation enabled as digest pre-pass");
5149
+ logger.info("consolidation", "Auto-consolidation enabled as pipeline stage");
3524
5150
  }
3525
5151
  metabolism = new Metabolism(config.digest.metabolism);
3526
- logger.debug("digest", "Firing initial digest cycle (background)");
3527
- digestEngine.runCycle().then((result) => {
3528
- if (result) {
3529
- metabolism.onSubstrateFound();
3530
- logger.info("digest", `Initial digest cycle: ${result.tiersGenerated.length} tiers, ${result.durationMs}ms`);
3531
- }
3532
- }).catch((err) => {
3533
- logger.warn("digest", "Initial digest cycle failed", { error: err.message });
3534
- metabolism.onEmptyCycle();
3535
- });
5152
+ let digestReady = true;
5153
+ let forceDigest = false;
5154
+ triggerForceDigest = () => {
5155
+ forceDigest = true;
5156
+ };
3536
5157
  metabolism.start(async () => {
5158
+ digestReady = true;
5159
+ });
5160
+ const originalTick = pipeline.tick.bind(pipeline);
5161
+ pipeline.tick = async (batchSize) => {
5162
+ await originalTick(batchSize);
5163
+ if (!digestReady) return;
5164
+ if (pipeline.hasUpstreamWork()) {
5165
+ logger.debug("digest", "Digest deferred \u2014 upstream stages pending");
5166
+ return;
5167
+ }
5168
+ if (!forceDigest) {
5169
+ const ready = pipeline.newSubstrateSinceLastDigest();
5170
+ if (ready < config.digest.substrate.min_notes_for_cycle) {
5171
+ logger.debug("digest", "Digest deferred \u2014 insufficient substrate", {
5172
+ ready,
5173
+ threshold: config.digest.substrate.min_notes_for_cycle
5174
+ });
5175
+ return;
5176
+ }
5177
+ }
5178
+ digestReady = false;
5179
+ forceDigest = false;
3537
5180
  try {
3538
5181
  const cycleResult = await digestEngine.runCycle();
3539
5182
  if (cycleResult) {
3540
5183
  metabolism.onSubstrateFound();
5184
+ const digestOutput = {
5185
+ included_in_cycle: cycleResult.cycleId,
5186
+ tiers_generated: cycleResult.tiersGenerated
5187
+ };
5188
+ const advanced = pipeline.advanceDigestItems(digestOutput);
5189
+ if (advanced > 0) {
5190
+ logger.debug("digest", `Advanced ${advanced} pipeline items to digest:succeeded`);
5191
+ }
3541
5192
  logger.info("digest", `Digest cycle ${cycleResult.cycleId}: ${cycleResult.tiersGenerated.length} tiers`);
3542
5193
  } else {
3543
5194
  metabolism.onEmptyCycle();
3544
- logger.debug("digest", "No substrate, backing off");
3545
5195
  }
3546
5196
  } catch (err) {
3547
5197
  logger.warn("digest", "Digest cycle failed", { error: err.message });
3548
5198
  metabolism.onEmptyCycle();
3549
5199
  }
3550
- });
3551
- logger.info("digest", "Digest enabled \u2014 starting metabolism");
5200
+ };
5201
+ server.registerRoute("POST", "/api/pipeline/digest/force", handleForceDigest(() => {
5202
+ forceDigest = true;
5203
+ }));
5204
+ server.registerRoute("GET", "/api/pipeline/digest-health", handleDigestHealth({
5205
+ vaultDir,
5206
+ pipeline,
5207
+ minNotesForCycle: config.digest.substrate.min_notes_for_cycle,
5208
+ metabolismState: () => metabolism?.state ?? "disabled",
5209
+ digestReady: () => digestReady,
5210
+ cycleInProgress: () => digestEngine.isCycleInProgress
5211
+ }));
5212
+ logger.info("digest", "Digest enabled \u2014 controlled by pipeline tick");
3552
5213
  }
3553
5214
  const batchManager = new BatchManager(async (closedBatch) => {
3554
5215
  if (closedBatch.length === 0) return;
3555
5216
  const sessionId = closedBatch[0].session_id;
3556
- const asRecords = closedBatch;
3557
- const result = await processor.process(asRecords, sessionId);
3558
- if (!result.degraded) {
3559
- writeObservations(result.observations, sessionId, { vault, ...indexDeps });
3560
- }
3561
- logger.debug("processor", "Batch processed", {
5217
+ logger.debug("processor", "Batch closed (extraction deferred to pipeline)", {
3562
5218
  session_id: sessionId,
3563
- events: closedBatch.length,
3564
- observations: result.observations.length,
3565
- degraded: result.degraded
5219
+ events: closedBatch.length
3566
5220
  });
3567
5221
  const allPaths = sessionFilePaths.get(sessionId);
3568
5222
  const alreadyCaptured = capturedArtifactPaths.get(sessionId) ?? /* @__PURE__ */ new Set();
@@ -3580,7 +5234,7 @@ ${content}`,
3580
5234
  }
3581
5235
  const captured = capturedArtifactPaths.get(sessionId);
3582
5236
  for (const c of candidates) {
3583
- const absPath = path9.resolve(process.cwd(), c.path);
5237
+ const absPath = path11.resolve(process.cwd(), c.path);
3584
5238
  captured.add(absPath);
3585
5239
  }
3586
5240
  }).catch((err) => logger.warn("processor", "Incremental artifact capture failed", {
@@ -3612,7 +5266,7 @@ ${content}`,
3612
5266
  logger.debug("lineage", "Heuristic detection failed", { error: err.message });
3613
5267
  }
3614
5268
  metabolism?.activate();
3615
- logger.info("lifecycle", "Session registered", { session_id, branch });
5269
+ logger.info("lifecycle", "Session registered", { session_id, branch, started_at: started_at ?? null });
3616
5270
  return { body: { ok: true, sessions: registry.sessions } };
3617
5271
  });
3618
5272
  server.registerRoute("POST", "/sessions/unregister", async (req) => {
@@ -3620,14 +5274,14 @@ ${content}`,
3620
5274
  registry.unregister(session_id);
3621
5275
  try {
3622
5276
  const cutoff = Date.now() - STALE_BUFFER_MAX_AGE_MS;
3623
- for (const file of fs6.readdirSync(bufferDir)) {
5277
+ for (const file of fs8.readdirSync(bufferDir)) {
3624
5278
  if (!file.endsWith(".jsonl")) continue;
3625
5279
  const bufferSessionId = file.replace(".jsonl", "");
3626
5280
  if (bufferSessionId === session_id) continue;
3627
- const filePath = path9.join(bufferDir, file);
3628
- const stat4 = fs6.statSync(filePath);
5281
+ const filePath = path11.join(bufferDir, file);
5282
+ const stat4 = fs8.statSync(filePath);
3629
5283
  if (stat4.mtimeMs < cutoff) {
3630
- fs6.unlinkSync(filePath);
5284
+ fs8.unlinkSync(filePath);
3631
5285
  logger.debug("daemon", "Cleaned stale buffer", { file });
3632
5286
  }
3633
5287
  }
@@ -3644,6 +5298,15 @@ ${content}`,
3644
5298
  const validated = EventBody.parse(req.body);
3645
5299
  const event = { ...validated, timestamp: validated.timestamp ?? (/* @__PURE__ */ new Date()).toISOString() };
3646
5300
  logger.debug("hooks", "Event received", { type: event.type, session_id: event.session_id });
5301
+ if (validated.type === "user_prompt") {
5302
+ const v = validated;
5303
+ const promptText = String(v.prompt ?? "");
5304
+ logger.info("hooks", "User prompt received", {
5305
+ session_id: validated.session_id,
5306
+ prompt_preview: promptText.slice(0, LOG_PROMPT_PREVIEW_CHARS),
5307
+ prompt_length: promptText.length
5308
+ });
5309
+ }
3647
5310
  if (!registry.getSession(event.session_id)) {
3648
5311
  registry.register(event.session_id, { started_at: event.timestamp });
3649
5312
  logger.debug("lifecycle", "Auto-registered session from event", { session_id: event.session_id });
@@ -3655,6 +5318,10 @@ ${content}`,
3655
5318
  batchManager.addEvent(event);
3656
5319
  if (validated.type === "tool_use") {
3657
5320
  const v = validated;
5321
+ logger.debug("hooks", "Tool use event", {
5322
+ session_id: validated.session_id,
5323
+ tool_name: String(v.tool_name ?? "")
5324
+ });
3658
5325
  planWatcher.checkToolEvent({ tool_name: String(v.tool_name ?? ""), tool_input: v.tool_input, session_id: validated.session_id });
3659
5326
  const toolName = String(v.tool_name ?? "");
3660
5327
  if (toolName === "Write" || toolName === "Edit") {
@@ -3677,7 +5344,16 @@ ${content}`,
3677
5344
  logger.debug("lifecycle", "Auto-registered session from stop event", { session_id: sessionId });
3678
5345
  }
3679
5346
  const sessionMeta = registry.getSession(sessionId);
3680
- logger.info("hooks", "Stop received", { session_id: sessionId, has_transcript_path: !!hookTranscriptPath, has_response: !!lastAssistantMessage });
5347
+ logger.info("hooks", "Stop received", {
5348
+ session_id: sessionId,
5349
+ has_transcript_path: !!hookTranscriptPath,
5350
+ has_response: !!lastAssistantMessage
5351
+ });
5352
+ logger.debug("hooks", "Stop event detail", {
5353
+ session_id: sessionId,
5354
+ transcript_path: hookTranscriptPath ?? null,
5355
+ last_message_preview: lastAssistantMessage?.slice(0, LOG_MESSAGE_PREVIEW_CHARS) ?? null
5356
+ });
3681
5357
  const run = () => processStopEvent(sessionId, user, sessionMeta, hookTranscriptPath, lastAssistantMessage).catch((err) => {
3682
5358
  logger.error("processor", "Stop processing failed", { session_id: sessionId, error: err.message });
3683
5359
  });
@@ -3687,6 +5363,32 @@ ${content}`,
3687
5363
  });
3688
5364
  return { body: { ok: true } };
3689
5365
  });
5366
+ function enrichTurnsWithToolMetadata(turns, events) {
5367
+ if (events.length === 0 || turns.length === 0) return;
5368
+ const toolEvents = events.filter((e) => e.type === "tool_use");
5369
+ if (toolEvents.length === 0) return;
5370
+ let cursor = 0;
5371
+ for (let i = 0; i < turns.length; i++) {
5372
+ const turnEnd = i + 1 < turns.length ? turns[i + 1].timestamp : null;
5373
+ const breakdown = {};
5374
+ const files = /* @__PURE__ */ new Set();
5375
+ while (cursor < toolEvents.length) {
5376
+ const ts = String(toolEvents[cursor].timestamp ?? "");
5377
+ if (turnEnd !== null && ts >= turnEnd) break;
5378
+ const evt = toolEvents[cursor];
5379
+ const toolName = String(evt.tool_name ?? evt.tool ?? "unknown");
5380
+ breakdown[toolName] = (breakdown[toolName] ?? 0) + 1;
5381
+ const input = evt.tool_input;
5382
+ const filePath = input?.file_path ?? input?.path;
5383
+ if (typeof filePath === "string") files.add(filePath);
5384
+ cursor++;
5385
+ }
5386
+ if (Object.keys(breakdown).length > 0) {
5387
+ turns[i].toolBreakdown = breakdown;
5388
+ if (files.size > 0) turns[i].files = [...files];
5389
+ }
5390
+ }
5391
+ }
3690
5392
  async function processStopEvent(sessionId, user, sessionMeta, hookTranscriptPath, lastAssistantMessage) {
3691
5393
  const lastBatch = batchManager.finalize(sessionId);
3692
5394
  const transcriptResult = transcriptMiner.getAllTurnsWithSource(sessionId, hookTranscriptPath);
@@ -3720,17 +5422,24 @@ ${content}`,
3720
5422
  lastTurn.aiResponse = lastAssistantMessage;
3721
5423
  }
3722
5424
  }
5425
+ enrichTurnsWithToolMetadata(allTurns, bufferEvents);
5426
+ const imageCount = allTurns.reduce((sum, t) => sum + (t.images?.length ?? 0), 0);
5427
+ logger.debug("processor", "Transcript parsed", {
5428
+ session_id: sessionId,
5429
+ turn_count: allTurns.length,
5430
+ image_count: imageCount
5431
+ });
3723
5432
  const ended = (/* @__PURE__ */ new Date()).toISOString();
3724
5433
  let started = allTurns.length > 0 && allTurns[0].timestamp ? allTurns[0].timestamp : ended;
3725
- const sessionsDir = path9.join(vaultDir, "sessions");
5434
+ const sessionsDir = path11.join(vaultDir, "sessions");
3726
5435
  const sessionFileName = `${sessionNoteId(sessionId)}.md`;
3727
5436
  let existingContent;
3728
5437
  const duplicatePaths = [];
3729
5438
  try {
3730
- for (const dateDir of fs6.readdirSync(sessionsDir)) {
3731
- const candidate = path9.join(sessionsDir, dateDir, sessionFileName);
5439
+ for (const dateDir of fs8.readdirSync(sessionsDir)) {
5440
+ const candidate = path11.join(sessionsDir, dateDir, sessionFileName);
3732
5441
  try {
3733
- const content = fs6.readFileSync(candidate, "utf-8");
5442
+ const content = fs8.readFileSync(candidate, "utf-8");
3734
5443
  if (!existingContent || content.length > existingContent.length) {
3735
5444
  existingContent = content;
3736
5445
  }
@@ -3741,11 +5450,18 @@ ${content}`,
3741
5450
  } catch {
3742
5451
  }
3743
5452
  let existingTurnCount = 0;
5453
+ let existingExtractionFields = null;
3744
5454
  if (existingContent) {
3745
5455
  const fmMatch = existingContent.match(/^---\n([\s\S]*?)\n---/);
3746
5456
  if (fmMatch) {
3747
- const parsed = import_yaml.default.parse(fmMatch[1]);
5457
+ const parsed = import_yaml2.default.parse(fmMatch[1]);
3748
5458
  if (typeof parsed.started === "string") started = parsed.started;
5459
+ const extractionKeys = ["observations_count", "summary_tokens", "extraction_model", "embedding_model"];
5460
+ const preserved = {};
5461
+ for (const key of extractionKeys) {
5462
+ if (parsed[key] !== void 0) preserved[key] = parsed[key];
5463
+ }
5464
+ if (Object.keys(preserved).length > 0) existingExtractionFields = preserved;
3749
5465
  }
3750
5466
  const turnMatches = existingContent.match(/^### Turn \d+/gm);
3751
5467
  existingTurnCount = turnMatches?.length ?? 0;
@@ -3766,47 +5482,22 @@ ${content}`,
3766
5482
  logger.debug("processor", "No new turns, skipping session rewrite", { session_id: sessionId, turns: allTurns.length });
3767
5483
  return;
3768
5484
  }
3769
- const conversationText = allTurns.map((t, i) => {
3770
- const parts = [`### Turn ${i + 1}`];
3771
- if (t.prompt) parts.push(`Prompt: ${t.prompt}`);
3772
- if (t.toolCount > 0) parts.push(`Tools: ${t.toolCount} calls`);
3773
- if (t.aiResponse) parts.push(`Response: ${t.aiResponse}`);
3774
- return parts.join("\n");
3775
- }).join("\n\n");
3776
- const conversationSection = `${CONVERSATION_HEADING}
3777
-
3778
- ${conversationText}`;
3779
- const observationPromise = lastBatch.length > 0 ? processor.process(lastBatch, sessionId).catch((err) => {
3780
- logger.warn("processor", "Observation extraction failed", { session_id: sessionId, error: err.message });
3781
- return null;
3782
- }) : Promise.resolve(null);
3783
- const artifactPromise = artifactCandidates.length > 0 ? processor.classifyArtifacts(artifactCandidates, sessionId).then((classified) => captureArtifacts(artifactCandidates, classified, sessionId, { vault, ...indexDeps }, lineageGraph)).catch((err) => {
3784
- logger.warn("processor", "Artifact capture failed", { session_id: sessionId, error: err.message });
3785
- }) : Promise.resolve();
3786
- const summaryPromise = processor.summarizeSession(conversationSection, sessionId, user).catch((err) => {
3787
- logger.warn("processor", "Session summarization failed", { session_id: sessionId, error: err.message });
3788
- return null;
3789
- });
3790
- const [observationResult, , summaryResult] = await Promise.all([observationPromise, artifactPromise, summaryPromise]);
3791
- if (observationResult && !observationResult.degraded) {
3792
- writeObservations(observationResult.observations, sessionId, { vault, ...indexDeps });
3793
- }
3794
5485
  const date = started.slice(0, 10);
3795
5486
  const relativePath = sessionRelativePath(sessionId, date);
3796
- const targetFullPath = path9.join(vaultDir, relativePath);
5487
+ const targetFullPath = path11.join(vaultDir, relativePath);
3797
5488
  for (const dup of duplicatePaths) {
3798
5489
  if (dup !== targetFullPath) {
3799
5490
  try {
3800
- fs6.unlinkSync(dup);
5491
+ fs8.unlinkSync(dup);
3801
5492
  logger.debug("lifecycle", "Removed duplicate session file", { path: dup });
3802
5493
  } catch {
3803
5494
  }
3804
5495
  }
3805
5496
  }
3806
- const attachmentsDir = path9.join(vaultDir, "attachments");
5497
+ const attachmentsDir = path11.join(vaultDir, "attachments");
3807
5498
  const hasImages = allTurns.some((t) => t.images?.length);
3808
5499
  if (hasImages) {
3809
- fs6.mkdirSync(attachmentsDir, { recursive: true });
5500
+ fs8.mkdirSync(attachmentsDir, { recursive: true });
3810
5501
  }
3811
5502
  const turnImageNames = /* @__PURE__ */ new Map();
3812
5503
  for (let i = 0; i < allTurns.length; i++) {
@@ -3817,9 +5508,9 @@ ${conversationText}`;
3817
5508
  const img = turn.images[j];
3818
5509
  const ext = extensionForMimeType(img.mediaType);
3819
5510
  const filename = `${bareSessionId(sessionId)}-t${i + 1}-${j + 1}.${ext}`;
3820
- const filePath = path9.join(attachmentsDir, filename);
3821
- if (!fs6.existsSync(filePath)) {
3822
- fs6.writeFileSync(filePath, Buffer.from(img.data, "base64"));
5511
+ const filePath = path11.join(attachmentsDir, filename);
5512
+ if (!fs8.existsSync(filePath)) {
5513
+ fs8.writeFileSync(filePath, Buffer.from(img.data, "base64"));
3823
5514
  logger.debug("processor", "Image saved", { filename, turn: i + 1 });
3824
5515
  }
3825
5516
  names.push(filename);
@@ -3828,9 +5519,15 @@ ${conversationText}`;
3828
5519
  }
3829
5520
  let title = `Session ${sessionId}`;
3830
5521
  let narrative = "";
3831
- if (summaryResult) {
3832
- title = summaryResult.title;
3833
- narrative = summaryResult.summary;
5522
+ if (existingContent) {
5523
+ const existingTitle = existingContent.match(/^# (.+)$/m)?.[1];
5524
+ if (existingTitle && existingTitle !== `Session ${sessionId}`) {
5525
+ title = existingTitle;
5526
+ }
5527
+ const calloutMatch = existingContent.match(/> \[!abstract\] Summary\n((?:> .*\n?)*)/);
5528
+ if (calloutMatch) {
5529
+ narrative = calloutMatch[1].replace(/^> /gm, "").trim();
5530
+ }
3834
5531
  }
3835
5532
  const relatedMemories = index.query({ type: "spore", limit: RELATED_SPORES_LIMIT }).filter((n) => {
3836
5533
  const fm = n.frontmatter;
@@ -3848,13 +5545,15 @@ ${conversationText}`;
3848
5545
  turns: allTurns.map((t, i) => ({
3849
5546
  prompt: t.prompt,
3850
5547
  toolCount: t.toolCount,
5548
+ toolBreakdown: t.toolBreakdown,
5549
+ files: t.files,
3851
5550
  aiResponse: t.aiResponse,
3852
5551
  images: turnImageNames.get(i)
3853
5552
  }))
3854
5553
  });
3855
5554
  const parentId = lineageGraph.getParent(sessionId);
3856
5555
  const parentLink = parentId ? lineageGraph.getLinks().find((l) => l.child === sessionId) : void 0;
3857
- vault.writeSession({
5556
+ const notePath = vault.writeSession({
3858
5557
  id: sessionId,
3859
5558
  user,
3860
5559
  started,
@@ -3863,62 +5562,23 @@ ${conversationText}`;
3863
5562
  parent: parentId ? sessionWikilink(parentId) : void 0,
3864
5563
  parent_reason: parentLink?.signal,
3865
5564
  tools_used: allTurns.reduce((sum, t) => sum + t.toolCount, 0),
5565
+ transcript_source: turnSource,
5566
+ transcript_path: hookTranscriptPath,
3866
5567
  summary
3867
5568
  });
3868
- indexAndEmbed(
3869
- relativePath,
3870
- sessionNoteId(sessionId),
3871
- narrative,
3872
- { type: "session", session_id: sessionId },
3873
- indexDeps
3874
- );
3875
- logger.debug("processor", "Session turns", { source: turnSource, total: allTurns.length });
3876
- await artifactPromise;
3877
- if (!parentId && vectorIndex && narrative) {
3878
- generateEmbedding(embeddingProvider, narrative).then(async (emb) => {
3879
- const candidates = vectorIndex.search(emb.embedding, { limit: LINEAGE_SIMILARITY_CANDIDATES }).filter((r) => r.metadata.type === "session" && r.id !== sessionNoteId(sessionId));
3880
- if (candidates.length === 0) return;
3881
- const candidateNotes = index.queryByIds(candidates.map((c) => c.id));
3882
- const noteMap = new Map(candidateNotes.map((n) => [n.id, n]));
3883
- const scores = await Promise.all(candidates.map(async (candidate) => {
3884
- const note = noteMap.get(candidate.id);
3885
- if (!note) return { id: candidate.id, score: 0 };
3886
- try {
3887
- const prompt = buildSimilarityPrompt(narrative, note.content.slice(0, CANDIDATE_CONTENT_PREVIEW));
3888
- const response = await llmProvider.summarize(prompt, { maxTokens: LINEAGE_SIMILARITY_MAX_TOKENS, reasoning: LLM_REASONING_MODE });
3889
- const score = extractNumber(response.text);
3890
- return { id: candidate.id, score: isNaN(score) ? 0 : score };
3891
- } catch {
3892
- return { id: candidate.id, score: 0 };
3893
- }
3894
- }));
3895
- const best = scores.reduce((a, b) => b.score > a.score ? b : a);
3896
- if (best.score >= LINEAGE_SIMILARITY_THRESHOLD) {
3897
- const bestParentId = bareSessionId(best.id);
3898
- const confidence = best.score >= LINEAGE_SIMILARITY_HIGH_CONFIDENCE ? "high" : "medium";
3899
- lineageGraph.addLink({
3900
- parent: bestParentId,
3901
- child: sessionId,
3902
- signal: "semantic_similarity",
3903
- confidence
3904
- });
3905
- try {
3906
- vault.updateNoteFrontmatter(relativePath, {
3907
- parent: sessionWikilink(bestParentId),
3908
- parent_reason: "semantic_similarity"
3909
- });
3910
- indexNote(index, vaultDir, relativePath);
3911
- } catch {
3912
- }
3913
- logger.info("lineage", "LLM similarity parent detected", {
3914
- child: sessionId,
3915
- parent: bestParentId,
3916
- score: best.score
3917
- });
3918
- }
3919
- }).catch((err) => logger.debug("lineage", "Similarity detection failed", { error: err.message }));
5569
+ logger.debug("processor", "Session note written", {
5570
+ session_id: sessionId,
5571
+ path: notePath,
5572
+ content_length: summary.length
5573
+ });
5574
+ if (existingExtractionFields) {
5575
+ vault.updateNoteFrontmatter(relativePath, existingExtractionFields);
3920
5576
  }
3921
- logger.info("processor", "Session note written", { session_id: sessionId, path: relativePath });
5577
+ indexNote(index, vaultDir, relativePath);
5578
+ logger.debug("processor", "Session turns", { source: turnSource, total: allTurns.length });
5579
+ pipeline.register(sessionId, "session", relativePath);
5580
+ pipeline.advance(sessionId, "session", "capture", "succeeded");
5581
+ logger.info("processor", "Session captured and registered in pipeline", { session_id: sessionId, path: relativePath });
3922
5582
  }
3923
5583
  server.registerRoute("POST", "/context", async (req) => {
3924
5584
  const { session_id, branch } = ContextBody.parse(req.body);
@@ -3931,16 +5591,23 @@ ${conversationText}`;
3931
5591
  if (branch) meta.push(`
3932
5592
  Branch:: \`${branch}\``);
3933
5593
  meta.push(`Session:: \`${session_id}\``);
3934
- logger.debug("context", `Injecting digest extract (tier ${result.tier})`, { session_id, fallback: result.fallback });
5594
+ logger.info("context", "Session context injected", {
5595
+ session_id,
5596
+ source: "digest",
5597
+ tier: result.tier,
5598
+ fallback: result.fallback
5599
+ });
3935
5600
  return { body: { text: meta.join("\n\n"), source: "digest", tier: result.tier } };
3936
5601
  }
3937
5602
  }
3938
5603
  const parts = [];
5604
+ const layerCounts = { plans: 0, parent_session: false, branch: !!branch };
3939
5605
  const plans = index.query({ type: "plan" });
3940
5606
  const activePlans = plans.filter((p) => {
3941
5607
  const status = p.frontmatter.status;
3942
5608
  return status === "active" || status === "in_progress";
3943
5609
  });
5610
+ layerCounts.plans = activePlans.length;
3944
5611
  if (activePlans.length > 0) {
3945
5612
  const planLines = activePlans.slice(0, SESSION_CONTEXT_MAX_PLANS).map((p) => {
3946
5613
  const status = p.frontmatter.status;
@@ -3949,11 +5616,13 @@ Branch:: \`${branch}\``);
3949
5616
  parts.push(`### Active Plans
3950
5617
  ${planLines.join("\n")}`);
3951
5618
  }
5619
+ let parentId;
3952
5620
  if (session_id) {
3953
- const parentId = lineageGraph.getParent(session_id);
5621
+ parentId = lineageGraph.getParent(session_id);
3954
5622
  if (parentId) {
3955
5623
  const parentNotes = index.queryByIds([sessionNoteId(parentId)]);
3956
5624
  if (parentNotes.length > 0) {
5625
+ layerCounts.parent_session = true;
3957
5626
  const parent = parentNotes[0];
3958
5627
  parts.push(`### Previous Session
3959
5628
  - **${parent.title}**: ${parent.content.slice(0, CONTEXT_SESSION_PREVIEW_CHARS)} \`[${parent.id}]\``);
@@ -3965,6 +5634,17 @@ ${planLines.join("\n")}`);
3965
5634
  }
3966
5635
  parts.push(`Session:: \`${session_id}\``);
3967
5636
  if (parts.length > 0) {
5637
+ logger.info("context", "Session context injected", {
5638
+ session_id,
5639
+ source: "layers",
5640
+ ...layerCounts,
5641
+ parts: parts.length
5642
+ });
5643
+ logger.debug("context", "Session context layer detail", {
5644
+ session_id,
5645
+ plan_titles: activePlans.slice(0, 5).map((p) => p.title),
5646
+ parent_id: parentId ?? null
5647
+ });
3968
5648
  return { body: { text: parts.join("\n\n") } };
3969
5649
  }
3970
5650
  return { body: { text: "" } };
@@ -3982,6 +5662,7 @@ ${planLines.join("\n")}`);
3982
5662
  if (!prompt || prompt.length < PROMPT_CONTEXT_MIN_LENGTH || !vectorIndex) {
3983
5663
  return { body: { text: "" } };
3984
5664
  }
5665
+ const searchStart = Date.now();
3985
5666
  try {
3986
5667
  const emb = await generateEmbedding(embeddingProvider, prompt.slice(0, EMBEDDING_INPUT_LIMIT));
3987
5668
  const results = vectorIndex.search(emb.embedding, {
@@ -3989,7 +5670,14 @@ ${planLines.join("\n")}`);
3989
5670
  type: "spore",
3990
5671
  relativeThreshold: PROMPT_CONTEXT_MIN_SIMILARITY
3991
5672
  });
3992
- if (results.length === 0) return { body: { text: "" } };
5673
+ if (results.length === 0) {
5674
+ logger.debug("context", "No matching spores for prompt", {
5675
+ session_id,
5676
+ prompt_preview: prompt.slice(0, LOG_PROMPT_PREVIEW_CHARS),
5677
+ search_duration_ms: Date.now() - searchStart
5678
+ });
5679
+ return { body: { text: "" } };
5680
+ }
3993
5681
  const noteMap = new Map(
3994
5682
  index.queryByIds(results.map((r) => r.id)).map((n) => [n.id, n])
3995
5683
  );
@@ -4005,10 +5693,18 @@ ${planLines.join("\n")}`);
4005
5693
  if (lines.length === 0) return { body: { text: "" } };
4006
5694
  const injected = `**Relevant spores for this task:**
4007
5695
  ${lines.join("\n")}`;
4008
- logger.debug("context", "Prompt context injected", {
5696
+ logger.info("context", "Prompt context injected", {
4009
5697
  session_id,
4010
5698
  spores: lines.length,
4011
- prompt_preview: prompt.slice(0, 50)
5699
+ prompt_preview: prompt.slice(0, LOG_PROMPT_PREVIEW_CHARS)
5700
+ });
5701
+ logger.debug("context", "Prompt context spore details", {
5702
+ session_id,
5703
+ spore_ids: results.map((r) => r.id),
5704
+ similarities: results.map((r) => r.similarity.toFixed(3)),
5705
+ prompt_length: prompt.length,
5706
+ context_chars: injected.length,
5707
+ search_duration_ms: Date.now() - searchStart
4012
5708
  });
4013
5709
  return { body: { text: injected } };
4014
5710
  } catch (err) {
@@ -4036,6 +5732,17 @@ ${lines.join("\n")}`;
4036
5732
  metabolism
4037
5733
  }));
4038
5734
  server.registerRoute("GET", "/api/logs", async (req) => handleGetLogs(logger.getRingBuffer(), req.query));
5735
+ const ExternalLogBody = external_exports.object({
5736
+ level: external_exports.enum(["debug", "info", "warn", "error"]),
5737
+ component: external_exports.string(),
5738
+ message: external_exports.string(),
5739
+ data: external_exports.record(external_exports.string(), external_exports.unknown()).optional()
5740
+ });
5741
+ server.registerRoute("POST", "/api/log", async (req) => {
5742
+ const { level, component, message, data } = ExternalLogBody.parse(req.body);
5743
+ logger.log(level, component, message, data);
5744
+ return { body: { ok: true } };
5745
+ });
4039
5746
  server.registerRoute("GET", "/api/models", async (req) => handleGetModels(req));
4040
5747
  server.registerRoute("POST", "/api/restart", async (req) => handleRestart({ vaultDir, progressTracker }, req.body));
4041
5748
  server.registerRoute("GET", "/api/progress/:token", async (req) => handleGetProgress(progressTracker, req.params.token));
@@ -4045,11 +5752,13 @@ ${lines.join("\n")}`;
4045
5752
  index,
4046
5753
  vectorIndex,
4047
5754
  llmProvider,
5755
+ digestLlmProvider: digestLlm,
4048
5756
  embeddingProvider,
4049
5757
  progressTracker,
5758
+ pipeline,
5759
+ onForceDigest: triggerForceDigest,
4050
5760
  log: (level, message, data) => {
4051
- const fn = logger[level];
4052
- if (typeof fn === "function") fn.call(logger, "operations", message, data);
5761
+ logger.log(level, "operations", message, data);
4053
5762
  }
4054
5763
  };
4055
5764
  server.registerRoute("POST", "/api/rebuild", async () => handleRebuild(operationDeps));
@@ -4057,6 +5766,14 @@ ${lines.join("\n")}`;
4057
5766
  server.registerRoute("POST", "/api/curate", async (req) => handleCurate(operationDeps, req.body, runCuration));
4058
5767
  server.registerRoute("POST", "/api/reprocess", async (req) => handleReprocess(operationDeps, req.body));
4059
5768
  server.registerRoute("GET", "/api/sessions", async () => handleGetSessions(index));
5769
+ server.registerRoute("GET", "/api/pipeline/health", handlePipelineHealth(pipeline));
5770
+ server.registerRoute("GET", "/api/pipeline/items", handlePipelineItems(pipeline));
5771
+ server.registerRoute("GET", "/api/pipeline/items/:id", handlePipelineItemDetail(pipeline));
5772
+ server.registerRoute("GET", "/api/pipeline/circuits", handlePipelineCircuits(pipeline));
5773
+ server.registerRoute("POST", "/api/pipeline/retry/:id", handlePipelineRetry(pipeline));
5774
+ server.registerRoute("POST", "/api/pipeline/skip/:id", handlePipelineSkip(pipeline));
5775
+ server.registerRoute("POST", "/api/pipeline/retry-all", handlePipelineRetryAll(pipeline));
5776
+ server.registerRoute("POST", "/api/pipeline/circuit/:provider/reset", handlePipelineCircuitReset(pipeline));
4060
5777
  await server.evictExistingDaemon();
4061
5778
  const resolvedPort = await resolvePort(config.daemon.port, vaultDir);
4062
5779
  if (resolvedPort === 0) {
@@ -4076,7 +5793,7 @@ ${lines.join("\n")}`;
4076
5793
  try {
4077
5794
  const { loadTemplate } = await import("./templates-XPRBOWCE.js");
4078
5795
  const portalContent = loadTemplate("portal", { port: String(server.port) });
4079
- fs6.writeFileSync(path9.join(vaultDir, "_portal.md"), portalContent, "utf-8");
5796
+ fs8.writeFileSync(path11.join(vaultDir, "_portal.md"), portalContent, "utf-8");
4080
5797
  } catch {
4081
5798
  }
4082
5799
  if (needsMigrationReindex) {
@@ -4094,6 +5811,8 @@ ${lines.join("\n")}`;
4094
5811
  await activeStopProcessing;
4095
5812
  }
4096
5813
  metabolism?.stop();
5814
+ clearInterval(pipelineTickTimer);
5815
+ pipeline.close();
4097
5816
  planWatcher.stopFileWatcher();
4098
5817
  registry.destroy();
4099
5818
  await server.stop();
@@ -4114,4 +5833,4 @@ export {
4114
5833
  chokidar/index.js:
4115
5834
  (*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) *)
4116
5835
  */
4117
- //# sourceMappingURL=main-XZ6X4BUX.js.map
5836
+ //# sourceMappingURL=main-RB727YRP.js.map