@velvetmonkey/flywheel-memory 2.0.145 → 2.0.146

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +3 -3
  2. package/dist/index.js +852 -164
  3. package/package.json +2 -2
package/dist/index.js CHANGED
@@ -5657,8 +5657,8 @@ function createContext(variables = {}) {
5657
5657
  steps: {}
5658
5658
  };
5659
5659
  }
5660
- function resolvePath(obj, path37) {
5661
- const parts = path37.split(".");
5660
+ function resolvePath(obj, path39) {
5661
+ const parts = path39.split(".");
5662
5662
  let current = obj;
5663
5663
  for (const part of parts) {
5664
5664
  if (current === void 0 || current === null) {
@@ -6116,7 +6116,7 @@ __export(conditions_exports, {
6116
6116
  shouldStepExecute: () => shouldStepExecute
6117
6117
  });
6118
6118
  import fs28 from "fs/promises";
6119
- import path29 from "path";
6119
+ import path30 from "path";
6120
6120
  async function evaluateCondition(condition, vaultPath2, context) {
6121
6121
  const interpolatedPath = condition.path ? interpolate(condition.path, context) : void 0;
6122
6122
  const interpolatedSection = condition.section ? interpolate(condition.section, context) : void 0;
@@ -6169,7 +6169,7 @@ async function evaluateCondition(condition, vaultPath2, context) {
6169
6169
  }
6170
6170
  }
6171
6171
  async function evaluateFileExists(vaultPath2, notePath, expectExists) {
6172
- const fullPath = path29.join(vaultPath2, notePath);
6172
+ const fullPath = path30.join(vaultPath2, notePath);
6173
6173
  try {
6174
6174
  await fs28.access(fullPath);
6175
6175
  return {
@@ -6184,7 +6184,7 @@ async function evaluateFileExists(vaultPath2, notePath, expectExists) {
6184
6184
  }
6185
6185
  }
6186
6186
  async function evaluateSectionExists(vaultPath2, notePath, sectionName, expectExists) {
6187
- const fullPath = path29.join(vaultPath2, notePath);
6187
+ const fullPath = path30.join(vaultPath2, notePath);
6188
6188
  try {
6189
6189
  await fs28.access(fullPath);
6190
6190
  } catch {
@@ -6215,7 +6215,7 @@ async function evaluateSectionExists(vaultPath2, notePath, sectionName, expectEx
6215
6215
  }
6216
6216
  }
6217
6217
  async function evaluateFrontmatterExists(vaultPath2, notePath, fieldName, expectExists) {
6218
- const fullPath = path29.join(vaultPath2, notePath);
6218
+ const fullPath = path30.join(vaultPath2, notePath);
6219
6219
  try {
6220
6220
  await fs28.access(fullPath);
6221
6221
  } catch {
@@ -6246,7 +6246,7 @@ async function evaluateFrontmatterExists(vaultPath2, notePath, fieldName, expect
6246
6246
  }
6247
6247
  }
6248
6248
  async function evaluateFrontmatterEquals(vaultPath2, notePath, fieldName, expectedValue) {
6249
- const fullPath = path29.join(vaultPath2, notePath);
6249
+ const fullPath = path30.join(vaultPath2, notePath);
6250
6250
  try {
6251
6251
  await fs28.access(fullPath);
6252
6252
  } catch {
@@ -6390,7 +6390,7 @@ var init_taskHelpers = __esm({
6390
6390
  });
6391
6391
 
6392
6392
  // src/index.ts
6393
- import * as path36 from "path";
6393
+ import * as path38 from "path";
6394
6394
  import { readFileSync as readFileSync6, realpathSync, existsSync as existsSync3 } from "fs";
6395
6395
  import { fileURLToPath as fileURLToPath2 } from "url";
6396
6396
  import { dirname as dirname7, join as join21 } from "path";
@@ -6628,8 +6628,8 @@ function updateIndexProgress(parsed, total) {
6628
6628
  function normalizeTarget(target) {
6629
6629
  return target.toLowerCase().replace(/\.md$/, "");
6630
6630
  }
6631
- function normalizeNotePath(path37) {
6632
- return path37.toLowerCase().replace(/\.md$/, "");
6631
+ function normalizeNotePath(path39) {
6632
+ return path39.toLowerCase().replace(/\.md$/, "");
6633
6633
  }
6634
6634
  async function buildVaultIndex(vaultPath2, options = {}) {
6635
6635
  const { timeoutMs = DEFAULT_TIMEOUT_MS, onProgress } = options;
@@ -6798,7 +6798,7 @@ function findSimilarEntity(index, target) {
6798
6798
  }
6799
6799
  const maxDist = normalizedLen <= 10 ? 1 : 2;
6800
6800
  let bestMatch;
6801
- for (const [entity, path37] of index.entities) {
6801
+ for (const [entity, path39] of index.entities) {
6802
6802
  const lenDiff = Math.abs(entity.length - normalizedLen);
6803
6803
  if (lenDiff > maxDist) {
6804
6804
  continue;
@@ -6806,7 +6806,7 @@ function findSimilarEntity(index, target) {
6806
6806
  const dist = levenshteinDistance(normalized, entity);
6807
6807
  if (dist > 0 && dist <= maxDist) {
6808
6808
  if (!bestMatch || dist < bestMatch.distance) {
6809
- bestMatch = { path: path37, entity, distance: dist };
6809
+ bestMatch = { path: path39, entity, distance: dist };
6810
6810
  if (dist === 1) {
6811
6811
  return bestMatch;
6812
6812
  }
@@ -7337,30 +7337,30 @@ var EventQueue = class {
7337
7337
  * Add a new event to the queue
7338
7338
  */
7339
7339
  push(type, rawPath) {
7340
- const path37 = normalizePath(rawPath);
7340
+ const path39 = normalizePath(rawPath);
7341
7341
  const now = Date.now();
7342
7342
  const event = {
7343
7343
  type,
7344
- path: path37,
7344
+ path: path39,
7345
7345
  timestamp: now
7346
7346
  };
7347
- let pending = this.pending.get(path37);
7347
+ let pending = this.pending.get(path39);
7348
7348
  if (!pending) {
7349
7349
  pending = {
7350
7350
  events: [],
7351
7351
  timer: null,
7352
7352
  lastEvent: now
7353
7353
  };
7354
- this.pending.set(path37, pending);
7354
+ this.pending.set(path39, pending);
7355
7355
  }
7356
7356
  pending.events.push(event);
7357
7357
  pending.lastEvent = now;
7358
- console.error(`[flywheel] QUEUE: pushed ${type} for ${path37}, pending=${this.pending.size}`);
7358
+ console.error(`[flywheel] QUEUE: pushed ${type} for ${path39}, pending=${this.pending.size}`);
7359
7359
  if (pending.timer) {
7360
7360
  clearTimeout(pending.timer);
7361
7361
  }
7362
7362
  pending.timer = setTimeout(() => {
7363
- this.flushPath(path37);
7363
+ this.flushPath(path39);
7364
7364
  }, this.config.debounceMs);
7365
7365
  if (this.pending.size >= this.config.batchSize) {
7366
7366
  this.flush();
@@ -7381,10 +7381,10 @@ var EventQueue = class {
7381
7381
  /**
7382
7382
  * Flush a single path's events
7383
7383
  */
7384
- flushPath(path37) {
7385
- const pending = this.pending.get(path37);
7384
+ flushPath(path39) {
7385
+ const pending = this.pending.get(path39);
7386
7386
  if (!pending || pending.events.length === 0) return;
7387
- console.error(`[flywheel] QUEUE: flushing ${path37}, events=${pending.events.length}`);
7387
+ console.error(`[flywheel] QUEUE: flushing ${path39}, events=${pending.events.length}`);
7388
7388
  if (pending.timer) {
7389
7389
  clearTimeout(pending.timer);
7390
7390
  pending.timer = null;
@@ -7393,7 +7393,7 @@ var EventQueue = class {
7393
7393
  if (coalescedType) {
7394
7394
  const coalesced = {
7395
7395
  type: coalescedType,
7396
- path: path37,
7396
+ path: path39,
7397
7397
  originalEvents: [...pending.events]
7398
7398
  };
7399
7399
  this.onBatch({
@@ -7402,7 +7402,7 @@ var EventQueue = class {
7402
7402
  timestamp: Date.now()
7403
7403
  });
7404
7404
  }
7405
- this.pending.delete(path37);
7405
+ this.pending.delete(path39);
7406
7406
  }
7407
7407
  /**
7408
7408
  * Flush all pending events
@@ -7414,7 +7414,7 @@ var EventQueue = class {
7414
7414
  }
7415
7415
  if (this.pending.size === 0) return;
7416
7416
  const events = [];
7417
- for (const [path37, pending] of this.pending) {
7417
+ for (const [path39, pending] of this.pending) {
7418
7418
  if (pending.timer) {
7419
7419
  clearTimeout(pending.timer);
7420
7420
  }
@@ -7422,7 +7422,7 @@ var EventQueue = class {
7422
7422
  if (coalescedType) {
7423
7423
  events.push({
7424
7424
  type: coalescedType,
7425
- path: path37,
7425
+ path: path39,
7426
7426
  originalEvents: [...pending.events]
7427
7427
  });
7428
7428
  }
@@ -7808,31 +7808,31 @@ function createVaultWatcher(options) {
7808
7808
  usePolling: config.usePolling,
7809
7809
  interval: config.usePolling ? config.pollInterval : void 0
7810
7810
  });
7811
- watcher.on("add", (path37) => {
7812
- console.error(`[flywheel] RAW EVENT: add ${path37}`);
7813
- if (shouldWatch(path37, vaultPath2)) {
7814
- console.error(`[flywheel] ACCEPTED: add ${path37}`);
7815
- eventQueue.push("add", path37);
7811
+ watcher.on("add", (path39) => {
7812
+ console.error(`[flywheel] RAW EVENT: add ${path39}`);
7813
+ if (shouldWatch(path39, vaultPath2)) {
7814
+ console.error(`[flywheel] ACCEPTED: add ${path39}`);
7815
+ eventQueue.push("add", path39);
7816
7816
  } else {
7817
- console.error(`[flywheel] FILTERED: add ${path37}`);
7817
+ console.error(`[flywheel] FILTERED: add ${path39}`);
7818
7818
  }
7819
7819
  });
7820
- watcher.on("change", (path37) => {
7821
- console.error(`[flywheel] RAW EVENT: change ${path37}`);
7822
- if (shouldWatch(path37, vaultPath2)) {
7823
- console.error(`[flywheel] ACCEPTED: change ${path37}`);
7824
- eventQueue.push("change", path37);
7820
+ watcher.on("change", (path39) => {
7821
+ console.error(`[flywheel] RAW EVENT: change ${path39}`);
7822
+ if (shouldWatch(path39, vaultPath2)) {
7823
+ console.error(`[flywheel] ACCEPTED: change ${path39}`);
7824
+ eventQueue.push("change", path39);
7825
7825
  } else {
7826
- console.error(`[flywheel] FILTERED: change ${path37}`);
7826
+ console.error(`[flywheel] FILTERED: change ${path39}`);
7827
7827
  }
7828
7828
  });
7829
- watcher.on("unlink", (path37) => {
7830
- console.error(`[flywheel] RAW EVENT: unlink ${path37}`);
7831
- if (shouldWatch(path37, vaultPath2)) {
7832
- console.error(`[flywheel] ACCEPTED: unlink ${path37}`);
7833
- eventQueue.push("unlink", path37);
7829
+ watcher.on("unlink", (path39) => {
7830
+ console.error(`[flywheel] RAW EVENT: unlink ${path39}`);
7831
+ if (shouldWatch(path39, vaultPath2)) {
7832
+ console.error(`[flywheel] ACCEPTED: unlink ${path39}`);
7833
+ eventQueue.push("unlink", path39);
7834
7834
  } else {
7835
- console.error(`[flywheel] FILTERED: unlink ${path37}`);
7835
+ console.error(`[flywheel] FILTERED: unlink ${path39}`);
7836
7836
  }
7837
7837
  });
7838
7838
  watcher.on("ready", () => {
@@ -10324,8 +10324,8 @@ function getNoteAccessFrequency(stateDb2, daysBack = 30) {
10324
10324
  }
10325
10325
  }
10326
10326
  }
10327
- return Array.from(noteMap.entries()).map(([path37, stats]) => ({
10328
- path: path37,
10327
+ return Array.from(noteMap.entries()).map(([path39, stats]) => ({
10328
+ path: path39,
10329
10329
  access_count: stats.access_count,
10330
10330
  last_accessed: stats.last_accessed,
10331
10331
  tools_used: Array.from(stats.tools)
@@ -10874,7 +10874,9 @@ var TOOL_CATEGORY = {
10874
10874
  flywheel_trust_report: "diagnostics",
10875
10875
  flywheel_benchmark: "diagnostics",
10876
10876
  vault_session_history: "diagnostics",
10877
- vault_entity_history: "diagnostics"
10877
+ vault_entity_history: "diagnostics",
10878
+ flywheel_learning_report: "diagnostics",
10879
+ flywheel_calibration_export: "diagnostics"
10878
10880
  };
10879
10881
  function generateInstructions(categories, registry) {
10880
10882
  const parts = [];
@@ -11001,11 +11003,11 @@ Use "note_intelligence" for per-note analysis (completeness, quality, suggestion
11001
11003
  }
11002
11004
 
11003
11005
  // src/tool-registry.ts
11004
- import * as path35 from "path";
11006
+ import * as path37 from "path";
11005
11007
  import { dirname as dirname5, join as join19 } from "path";
11006
11008
  import { statSync as statSync6, readFileSync as readFileSync5 } from "fs";
11007
11009
  import { fileURLToPath } from "url";
11008
- import { z as z38 } from "zod";
11010
+ import { z as z40 } from "zod";
11009
11011
  import { getSessionId } from "@velvetmonkey/vault-core";
11010
11012
  init_vault_scope();
11011
11013
 
@@ -12253,14 +12255,14 @@ function registerWikilinkTools(server2, getIndex, getVaultPath, getStateDb3 = ()
12253
12255
  };
12254
12256
  function findSimilarEntity2(target, entities) {
12255
12257
  const targetLower = target.toLowerCase();
12256
- for (const [name, path37] of entities) {
12258
+ for (const [name, path39] of entities) {
12257
12259
  if (name.startsWith(targetLower) || targetLower.startsWith(name)) {
12258
- return path37;
12260
+ return path39;
12259
12261
  }
12260
12262
  }
12261
- for (const [name, path37] of entities) {
12263
+ for (const [name, path39] of entities) {
12262
12264
  if (name.includes(targetLower) || targetLower.includes(name)) {
12263
- return path37;
12265
+ return path39;
12264
12266
  }
12265
12267
  }
12266
12268
  return void 0;
@@ -13098,8 +13100,8 @@ function registerHealthTools(server2, getIndex, getVaultPath, getConfig2 = () =>
13098
13100
  daily_counts: z4.record(z4.number())
13099
13101
  }).describe("Activity summary for the last 7 days")
13100
13102
  };
13101
- function isPeriodicNote3(path37) {
13102
- const filename = path37.split("/").pop() || "";
13103
+ function isPeriodicNote3(path39) {
13104
+ const filename = path39.split("/").pop() || "";
13103
13105
  const nameWithoutExt = filename.replace(/\.md$/, "");
13104
13106
  const patterns = [
13105
13107
  /^\d{4}-\d{2}-\d{2}$/,
@@ -13114,7 +13116,7 @@ function registerHealthTools(server2, getIndex, getVaultPath, getConfig2 = () =>
13114
13116
  // YYYY (yearly)
13115
13117
  ];
13116
13118
  const periodicFolders = ["daily", "weekly", "monthly", "quarterly", "yearly", "journal", "journals"];
13117
- const folder = path37.split("/")[0]?.toLowerCase() || "";
13119
+ const folder = path39.split("/")[0]?.toLowerCase() || "";
13118
13120
  return patterns.some((p) => p.test(nameWithoutExt)) || periodicFolders.includes(folder);
13119
13121
  }
13120
13122
  server2.registerTool(
@@ -13718,7 +13720,7 @@ function rankBacklinks(backlinks, notePath, index, stateDb2, maxLinks = TOP_LINK
13718
13720
  return out;
13719
13721
  }).sort((a, b) => (b.weight ?? 1) - (a.weight ?? 1)).slice(0, maxLinks);
13720
13722
  }
13721
- var COMPACT_OUTLINK_NAMES = 10;
13723
+ var COMPACT_OUTLINK_NAMES = 5;
13722
13724
  function enrichResultCompact(result, index, stateDb2, opts) {
13723
13725
  const note = index.notes.get(result.path);
13724
13726
  const normalizedPath = result.path.toLowerCase().replace(/\.md$/, "");
@@ -13963,13 +13965,13 @@ function multiHopBackfill(primaryResults, index, stateDb2, config = {}) {
13963
13965
  candidates.sort((a, b) => b.score - a.score);
13964
13966
  return candidates.slice(0, cfg.maxBackfill).map((c) => c.result);
13965
13967
  }
13966
- function scoreCandidate(path37, index, stateDb2) {
13967
- const note = index.notes.get(path37);
13968
+ function scoreCandidate(path39, index, stateDb2) {
13969
+ const note = index.notes.get(path39);
13968
13970
  const decay = recencyDecay(note?.modified);
13969
13971
  let hubScore = 1;
13970
13972
  if (stateDb2) {
13971
13973
  try {
13972
- const title = note?.title ?? path37.replace(/\.md$/, "").split("/").pop() ?? "";
13974
+ const title = note?.title ?? path39.replace(/\.md$/, "").split("/").pop() ?? "";
13973
13975
  const entity = getEntityByName3(stateDb2, title);
13974
13976
  if (entity) hubScore = entity.hubScore ?? 1;
13975
13977
  } catch {
@@ -14045,26 +14047,59 @@ function stripFrontmatter(content) {
14045
14047
  function splitIntoParagraphs(content, maxChunkChars) {
14046
14048
  const MIN_PARAGRAPH_CHARS = 50;
14047
14049
  const raw = content.split(/\n\n+/).map((p) => p.trim()).filter((p) => p.length > 0);
14050
+ let currentSection;
14051
+ const withSections = [];
14052
+ for (const paragraph of raw) {
14053
+ const headingMatch = paragraph.match(/^#{1,6}\s+(.+)/);
14054
+ if (headingMatch) {
14055
+ currentSection = headingMatch[1].trim();
14056
+ }
14057
+ withSections.push({ text: paragraph, section: currentSection });
14058
+ }
14048
14059
  const merged = [];
14049
14060
  let buffer2 = "";
14050
- for (const paragraph of raw) {
14061
+ let bufferSection;
14062
+ for (const { text: paragraph, section } of withSections) {
14051
14063
  if (buffer2) {
14052
14064
  buffer2 += "\n\n" + paragraph;
14053
14065
  if (buffer2.length >= MIN_PARAGRAPH_CHARS) {
14054
- merged.push(buffer2.slice(0, maxChunkChars));
14066
+ merged.push({ text: buffer2.slice(0, maxChunkChars), section: bufferSection });
14055
14067
  buffer2 = "";
14056
14068
  }
14057
14069
  } else if (paragraph.length < MIN_PARAGRAPH_CHARS) {
14058
14070
  buffer2 = paragraph;
14071
+ bufferSection = section;
14059
14072
  } else {
14060
- merged.push(paragraph.slice(0, maxChunkChars));
14073
+ merged.push({ text: paragraph.slice(0, maxChunkChars), section });
14061
14074
  }
14062
14075
  }
14063
14076
  if (buffer2) {
14064
- merged.push(buffer2.slice(0, maxChunkChars));
14077
+ merged.push({ text: buffer2.slice(0, maxChunkChars), section: bufferSection });
14065
14078
  }
14066
14079
  return merged;
14067
14080
  }
14081
+ function expandWindow(paragraphs, matchIdx, maxChars = 800) {
14082
+ let result = paragraphs[matchIdx].text;
14083
+ let lo = matchIdx;
14084
+ let hi = matchIdx;
14085
+ for (let step = 0; step < 2; step++) {
14086
+ if (lo > 0) {
14087
+ const candidate = paragraphs[lo - 1].text + "\n\n" + result;
14088
+ if (candidate.length <= maxChars) {
14089
+ result = candidate;
14090
+ lo--;
14091
+ }
14092
+ }
14093
+ if (hi < paragraphs.length - 1) {
14094
+ const candidate = result + "\n\n" + paragraphs[hi + 1].text;
14095
+ if (candidate.length <= maxChars) {
14096
+ result = candidate;
14097
+ hi++;
14098
+ }
14099
+ }
14100
+ }
14101
+ return result;
14102
+ }
14068
14103
  function scoreByKeywords(chunk, queryTokens, queryStems) {
14069
14104
  const chunkTokens = new Set(tokenize(chunk.toLowerCase()));
14070
14105
  const chunkStems = new Set([...chunkTokens].map((t) => stem(t)));
@@ -14080,7 +14115,7 @@ function scoreByKeywords(chunk, queryTokens, queryStems) {
14080
14115
  }
14081
14116
  async function extractBestSnippets(filePath, queryEmbedding, queryTokens, options) {
14082
14117
  const maxSnippets = options?.maxSnippets ?? 1;
14083
- const maxChunkChars = options?.maxChunkChars ?? 500;
14118
+ const maxChunkChars = options?.maxChunkChars ?? 800;
14084
14119
  let content;
14085
14120
  try {
14086
14121
  content = fs13.readFileSync(filePath, "utf-8");
@@ -14089,35 +14124,83 @@ async function extractBestSnippets(filePath, queryEmbedding, queryTokens, option
14089
14124
  }
14090
14125
  const body = stripFrontmatter(content);
14091
14126
  if (body.length < 50) {
14092
- return body.length > 0 ? [{ text: body, score: 1 }] : [];
14127
+ return body.length > 0 ? [{ text: body, score: 1, confidence: 1 }] : [];
14093
14128
  }
14094
14129
  const paragraphs = splitIntoParagraphs(body, maxChunkChars);
14095
14130
  if (paragraphs.length === 0) return [];
14096
14131
  const queryStems = queryTokens.map((t) => stem(t));
14097
- const scored = paragraphs.map((text, idx) => ({
14098
- text,
14132
+ const scored = paragraphs.map((para, idx) => ({
14133
+ ...para,
14099
14134
  idx,
14100
- keywordScore: scoreByKeywords(text, queryTokens, queryStems)
14135
+ keywordScore: scoreByKeywords(para.text, queryTokens, queryStems)
14101
14136
  }));
14102
14137
  scored.sort((a, b) => b.keywordScore - a.keywordScore);
14138
+ const maxPossibleScore = queryTokens.length * 10;
14103
14139
  const topKeyword = scored.slice(0, 5);
14140
+ const buildSnippet = (match, score) => ({
14141
+ text: expandWindow(paragraphs, match.idx, maxChunkChars),
14142
+ score,
14143
+ section: match.section,
14144
+ confidence: maxPossibleScore > 0 ? Math.min(1, score / maxPossibleScore) : 0
14145
+ });
14104
14146
  if (queryEmbedding && hasEmbeddingsIndex()) {
14105
14147
  try {
14106
14148
  const reranked = [];
14107
14149
  for (const chunk of topKeyword) {
14108
14150
  const chunkEmbedding = await embedTextCached(chunk.text);
14109
14151
  const sim = cosineSimilarity(queryEmbedding, chunkEmbedding);
14110
- reranked.push({ text: chunk.text, score: sim });
14152
+ reranked.push({ match: chunk, sim });
14111
14153
  }
14112
- reranked.sort((a, b) => b.score - a.score);
14113
- return reranked.slice(0, maxSnippets);
14154
+ reranked.sort((a, b) => b.sim - a.sim);
14155
+ return reranked.slice(0, maxSnippets).map((r) => buildSnippet(r.match, r.sim));
14114
14156
  } catch {
14115
14157
  }
14116
14158
  }
14117
- return topKeyword.slice(0, maxSnippets).map((c) => ({
14118
- text: c.text,
14119
- score: c.keywordScore
14120
- }));
14159
+ return topKeyword.slice(0, maxSnippets).map((c) => buildSnippet(c, c.keywordScore));
14160
+ }
14161
+ var MONTH_MAP = {
14162
+ jan: "01",
14163
+ feb: "02",
14164
+ mar: "03",
14165
+ apr: "04",
14166
+ may: "05",
14167
+ jun: "06",
14168
+ jul: "07",
14169
+ aug: "08",
14170
+ sep: "09",
14171
+ oct: "10",
14172
+ nov: "11",
14173
+ dec: "12",
14174
+ january: "01",
14175
+ february: "02",
14176
+ march: "03",
14177
+ april: "04",
14178
+ june: "06",
14179
+ july: "07",
14180
+ august: "08",
14181
+ september: "09",
14182
+ october: "10",
14183
+ november: "11",
14184
+ december: "12"
14185
+ };
14186
+ function extractDates(text) {
14187
+ const dates = /* @__PURE__ */ new Set();
14188
+ for (const m of text.matchAll(/\b(\d{4})-(\d{2})-(\d{2})\b/g)) {
14189
+ dates.add(m[0]);
14190
+ }
14191
+ for (const m of text.matchAll(/\b((?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\w*)\s+(\d{1,2}),?\s+(\d{4})\b/gi)) {
14192
+ const month = MONTH_MAP[m[1].toLowerCase()];
14193
+ if (month) dates.add(`${m[3]}-${month}-${m[2].padStart(2, "0")}`);
14194
+ }
14195
+ for (const m of text.matchAll(/\b(\d{1,2})\s+((?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\w*)\s+(\d{4})\b/gi)) {
14196
+ const month = MONTH_MAP[m[2].toLowerCase()];
14197
+ if (month) dates.add(`${m[3]}-${month}-${m[1].padStart(2, "0")}`);
14198
+ }
14199
+ for (const m of text.matchAll(/\b((?:January|February|March|April|May|June|July|August|September|October|November|December))\s+(\d{4})\b/gi)) {
14200
+ const month = MONTH_MAP[m[1].toLowerCase()];
14201
+ if (month) dates.add(`${m[2]}-${month}-01`);
14202
+ }
14203
+ return [...dates].sort();
14121
14204
  }
14122
14205
 
14123
14206
  // src/tools/read/query.ts
@@ -14152,6 +14235,53 @@ function applyGraphReranking(results, stateDb2) {
14152
14235
  (a, b) => (b._combined_score ?? b.rrf_score ?? 0) - (a._combined_score ?? a.rrf_score ?? 0)
14153
14236
  );
14154
14237
  }
14238
+ function applySandwichOrdering(results) {
14239
+ if (results.length < 3) return;
14240
+ const secondBest = results.splice(1, 1)[0];
14241
+ results.push(secondBest);
14242
+ }
14243
+ function applyEntityBridging(results, stateDb2, maxBridgesPerResult = 3) {
14244
+ if (!stateDb2 || results.length < 2) return;
14245
+ const linkMap = /* @__PURE__ */ new Map();
14246
+ try {
14247
+ const paths = results.map((r) => r.path).filter(Boolean);
14248
+ for (const path39 of paths) {
14249
+ const rows = stateDb2.db.prepare(
14250
+ "SELECT target FROM note_links WHERE note_path = ?"
14251
+ ).all(path39);
14252
+ linkMap.set(path39, new Set(rows.map((r) => r.target)));
14253
+ }
14254
+ } catch {
14255
+ return;
14256
+ }
14257
+ for (const r of results) {
14258
+ const myPath = r.path;
14259
+ const myLinks = linkMap.get(myPath);
14260
+ if (!myLinks || myLinks.size === 0) continue;
14261
+ const bridges = [];
14262
+ for (const other of results) {
14263
+ const otherPath = other.path;
14264
+ if (otherPath === myPath) continue;
14265
+ const otherLinks = linkMap.get(otherPath);
14266
+ if (!otherLinks) continue;
14267
+ for (const entity of myLinks) {
14268
+ if (otherLinks.has(entity) && bridges.length < maxBridgesPerResult) {
14269
+ bridges.push({ entity, in_result: otherPath });
14270
+ }
14271
+ }
14272
+ if (bridges.length >= maxBridgesPerResult) break;
14273
+ }
14274
+ if (bridges.length > 0) {
14275
+ r.bridges = bridges;
14276
+ }
14277
+ }
14278
+ }
14279
+ function stripInternalFields(results) {
14280
+ const INTERNAL = ["rrf_score", "in_fts5", "in_semantic", "in_entity", "graph_boost", "_combined_score"];
14281
+ for (const r of results) {
14282
+ for (const key of INTERNAL) delete r[key];
14283
+ }
14284
+ }
14155
14285
  async function enhanceSnippets(results, query, vaultPath2) {
14156
14286
  if (!hasEmbeddingsIndex()) return;
14157
14287
  const queryTokens = tokenize(query).map((t) => t.toLowerCase());
@@ -14166,6 +14296,10 @@ async function enhanceSnippets(results, query, vaultPath2) {
14166
14296
  const snippets = await extractBestSnippets(`${vaultPath2}/${r.path}`, queryEmb, queryTokens);
14167
14297
  if (snippets.length > 0 && snippets[0].text.length > 0) {
14168
14298
  r.snippet = snippets[0].text;
14299
+ if (snippets[0].section) r.section = snippets[0].section;
14300
+ if (snippets[0].confidence != null) r.snippet_confidence = Math.round(snippets[0].confidence * 100) / 100;
14301
+ const dates = extractDates(snippets[0].text);
14302
+ if (dates.length > 0) r.dates_mentioned = dates;
14169
14303
  }
14170
14304
  } catch {
14171
14305
  }
@@ -14437,12 +14571,17 @@ function registerQueryTools(server2, getIndex, getVaultPath, getStateDb3) {
14437
14571
  in_semantic: item.in_semantic,
14438
14572
  in_entity: item.in_entity
14439
14573
  }));
14440
- const hopResults2 = multiHopBackfill(results2, index, stateDb2, { maxBackfill: limit });
14441
- const expansionTerms2 = extractExpansionTerms(results2, query, index);
14442
- const expansionResults2 = expandQuery(expansionTerms2, [...results2, ...hopResults2], index, stateDb2);
14443
- results2.push(...hopResults2, ...expansionResults2);
14574
+ if (results2.length < 3) {
14575
+ const hopResults = multiHopBackfill(results2, index, stateDb2, { maxBackfill: limit });
14576
+ const expansionTerms = extractExpansionTerms(results2, query, index);
14577
+ const expansionResults = expandQuery(expansionTerms, [...results2, ...hopResults], index, stateDb2);
14578
+ results2.push(...hopResults, ...expansionResults);
14579
+ }
14444
14580
  applyGraphReranking(results2, stateDb2);
14581
+ applyEntityBridging(results2, stateDb2);
14582
+ applySandwichOrdering(results2);
14445
14583
  await enhanceSnippets(results2, query, vaultPath2);
14584
+ stripInternalFields(results2);
14446
14585
  return { content: [{ type: "text", text: JSON.stringify({
14447
14586
  method: "hybrid",
14448
14587
  query,
@@ -14474,12 +14613,17 @@ function registerQueryTools(server2, getIndex, getVaultPath, getStateDb3) {
14474
14613
  ...enrichResultCompact({ path: item.path, title: item.title, snippet: item.snippet }, index, stateDb2),
14475
14614
  ..."in_fts5" in item ? { in_fts5: true } : { in_entity: true }
14476
14615
  }));
14477
- const hopResults2 = multiHopBackfill(results2, index, stateDb2, { maxBackfill: limit });
14478
- const expansionTerms2 = extractExpansionTerms(results2, query, index);
14479
- const expansionResults2 = expandQuery(expansionTerms2, [...results2, ...hopResults2], index, stateDb2);
14480
- results2.push(...hopResults2, ...expansionResults2);
14616
+ if (results2.length < 3) {
14617
+ const hopResults = multiHopBackfill(results2, index, stateDb2, { maxBackfill: limit });
14618
+ const expansionTerms = extractExpansionTerms(results2, query, index);
14619
+ const expansionResults = expandQuery(expansionTerms, [...results2, ...hopResults], index, stateDb2);
14620
+ results2.push(...hopResults, ...expansionResults);
14621
+ }
14481
14622
  applyGraphReranking(results2, stateDb2);
14623
+ applyEntityBridging(results2, stateDb2);
14624
+ applySandwichOrdering(results2);
14482
14625
  await enhanceSnippets(results2, query, vaultPath2);
14626
+ stripInternalFields(results2);
14483
14627
  return { content: [{ type: "text", text: JSON.stringify({
14484
14628
  method: "fts5",
14485
14629
  query,
@@ -14490,12 +14634,17 @@ function registerQueryTools(server2, getIndex, getVaultPath, getStateDb3) {
14490
14634
  const stateDbFts = getStateDb3();
14491
14635
  const fts5Filtered = applyFolderFilter(fts5Results);
14492
14636
  const results = fts5Filtered.map((r) => ({ ...enrichResultCompact({ path: r.path, title: r.title, snippet: r.snippet }, index, stateDbFts), in_fts5: true }));
14493
- const hopResults = multiHopBackfill(results, index, stateDbFts, { maxBackfill: limit });
14494
- const expansionTerms = extractExpansionTerms(results, query, index);
14495
- const expansionResults = expandQuery(expansionTerms, [...results, ...hopResults], index, stateDbFts);
14496
- results.push(...hopResults, ...expansionResults);
14637
+ if (results.length < 3) {
14638
+ const hopResults = multiHopBackfill(results, index, stateDbFts, { maxBackfill: limit });
14639
+ const expansionTerms = extractExpansionTerms(results, query, index);
14640
+ const expansionResults = expandQuery(expansionTerms, [...results, ...hopResults], index, stateDbFts);
14641
+ results.push(...hopResults, ...expansionResults);
14642
+ }
14497
14643
  applyGraphReranking(results, stateDbFts);
14644
+ applyEntityBridging(results, stateDbFts);
14645
+ applySandwichOrdering(results);
14498
14646
  await enhanceSnippets(results, query, vaultPath2);
14647
+ stripInternalFields(results);
14499
14648
  return { content: [{ type: "text", text: JSON.stringify({
14500
14649
  method: "fts5",
14501
14650
  query,
@@ -15238,30 +15387,30 @@ function registerPrimitiveTools(server2, getIndex, getVaultPath, getConfig2 = ()
15238
15387
  include_content: z7.boolean().default(true).describe("Include the text content under each top-level section. Set false to get structure only.")
15239
15388
  }
15240
15389
  },
15241
- async ({ path: path37, include_content }) => {
15390
+ async ({ path: path39, include_content }) => {
15242
15391
  const index = getIndex();
15243
15392
  const vaultPath2 = getVaultPath();
15244
- const result = await getNoteStructure(index, path37, vaultPath2);
15393
+ const result = await getNoteStructure(index, path39, vaultPath2);
15245
15394
  if (!result) {
15246
15395
  return {
15247
- content: [{ type: "text", text: JSON.stringify({ error: "Note not found", path: path37 }, null, 2) }]
15396
+ content: [{ type: "text", text: JSON.stringify({ error: "Note not found", path: path39 }, null, 2) }]
15248
15397
  };
15249
15398
  }
15250
15399
  if (include_content) {
15251
15400
  for (const section of result.sections) {
15252
- const sectionResult = await getSectionContent(index, path37, section.heading.text, vaultPath2, true);
15401
+ const sectionResult = await getSectionContent(index, path39, section.heading.text, vaultPath2, true);
15253
15402
  if (sectionResult) {
15254
15403
  section.content = sectionResult.content;
15255
15404
  }
15256
15405
  }
15257
15406
  }
15258
- const note = index.notes.get(path37);
15407
+ const note = index.notes.get(path39);
15259
15408
  const enriched = { ...result };
15260
15409
  if (note) {
15261
15410
  enriched.frontmatter = note.frontmatter;
15262
15411
  enriched.tags = note.tags;
15263
15412
  enriched.aliases = note.aliases;
15264
- const normalizedPath = path37.toLowerCase().replace(/\.md$/, "");
15413
+ const normalizedPath = path39.toLowerCase().replace(/\.md$/, "");
15265
15414
  const backlinks = index.backlinks.get(normalizedPath) || [];
15266
15415
  enriched.backlink_count = backlinks.length;
15267
15416
  enriched.outlink_count = note.outlinks.length;
@@ -15294,15 +15443,15 @@ function registerPrimitiveTools(server2, getIndex, getVaultPath, getConfig2 = ()
15294
15443
  include_subheadings: z7.boolean().default(true).describe("Include content under subheadings")
15295
15444
  }
15296
15445
  },
15297
- async ({ path: path37, heading, include_subheadings }) => {
15446
+ async ({ path: path39, heading, include_subheadings }) => {
15298
15447
  const index = getIndex();
15299
15448
  const vaultPath2 = getVaultPath();
15300
- const result = await getSectionContent(index, path37, heading, vaultPath2, include_subheadings);
15449
+ const result = await getSectionContent(index, path39, heading, vaultPath2, include_subheadings);
15301
15450
  if (!result) {
15302
15451
  return {
15303
15452
  content: [{ type: "text", text: JSON.stringify({
15304
15453
  error: "Section not found",
15305
- path: path37,
15454
+ path: path39,
15306
15455
  heading
15307
15456
  }, null, 2) }]
15308
15457
  };
@@ -15356,16 +15505,16 @@ function registerPrimitiveTools(server2, getIndex, getVaultPath, getConfig2 = ()
15356
15505
  offset: z7.coerce.number().default(0).describe("Number of results to skip (for pagination)")
15357
15506
  }
15358
15507
  },
15359
- async ({ path: path37, status, has_due_date, folder, tag, limit: requestedLimit, offset }) => {
15508
+ async ({ path: path39, status, has_due_date, folder, tag, limit: requestedLimit, offset }) => {
15360
15509
  const limit = Math.min(requestedLimit ?? 25, MAX_LIMIT);
15361
15510
  const index = getIndex();
15362
15511
  const vaultPath2 = getVaultPath();
15363
15512
  const config = getConfig2();
15364
- if (path37) {
15365
- const result2 = await getTasksFromNote(index, path37, vaultPath2, config.exclude_task_tags || []);
15513
+ if (path39) {
15514
+ const result2 = await getTasksFromNote(index, path39, vaultPath2, config.exclude_task_tags || []);
15366
15515
  if (!result2) {
15367
15516
  return {
15368
- content: [{ type: "text", text: JSON.stringify({ error: "Note not found", path: path37 }, null, 2) }]
15517
+ content: [{ type: "text", text: JSON.stringify({ error: "Note not found", path: path39 }, null, 2) }]
15369
15518
  };
15370
15519
  }
15371
15520
  let filtered = result2;
@@ -15375,7 +15524,7 @@ function registerPrimitiveTools(server2, getIndex, getVaultPath, getConfig2 = ()
15375
15524
  const paged2 = filtered.slice(offset, offset + limit);
15376
15525
  return {
15377
15526
  content: [{ type: "text", text: JSON.stringify({
15378
- path: path37,
15527
+ path: path39,
15379
15528
  total_count: filtered.length,
15380
15529
  returned_count: paged2.length,
15381
15530
  open: result2.filter((t) => t.status === "open").length,
@@ -19110,6 +19259,7 @@ init_writer();
19110
19259
  init_wikilinks();
19111
19260
  import { z as z18 } from "zod";
19112
19261
  import fs26 from "fs/promises";
19262
+ import path28 from "path";
19113
19263
  function registerMergeTools(server2, getVaultPath) {
19114
19264
  server2.tool(
19115
19265
  "merge_entities",
@@ -19267,7 +19417,7 @@ ${trimmedSource}`;
19267
19417
  );
19268
19418
  server2.tool(
19269
19419
  "absorb_as_alias",
19270
- "Absorb an entity name as an alias of a target note: adds alias to target frontmatter and rewrites all [[source]] links to [[target|source]]. Lighter than merge_entities \u2014 no source note required, no content append, no deletion.",
19420
+ "Absorb an entity name as an alias of a target note: adds alias to target frontmatter, rewrites all [[source]] links to [[target|source]], and deletes the source note if it exists. Lighter than merge_entities \u2014 no source note required, no content append.",
19271
19421
  {
19272
19422
  source_name: z18.string().describe('The entity name to absorb (e.g. "Foo")'),
19273
19423
  target_path: z18.string().describe('Vault-relative path of the target entity note (e.g. "entities/Bar.md")'),
@@ -19310,6 +19460,7 @@ ${trimmedSource}`;
19310
19460
  const backlinks = await findBacklinks(vaultPath2, source_name, []);
19311
19461
  let totalBacklinksUpdated = 0;
19312
19462
  const modifiedFiles = [];
19463
+ const sourceNoteFile = await findSourceNote(vaultPath2, source_name, target_path);
19313
19464
  if (dry_run) {
19314
19465
  for (const backlink of backlinks) {
19315
19466
  if (backlink.path === target_path) continue;
@@ -19348,6 +19499,11 @@ ${trimmedSource}`;
19348
19499
  modifiedFiles.push(backlink.path);
19349
19500
  }
19350
19501
  }
19502
+ let sourceDeleted = false;
19503
+ if (sourceNoteFile) {
19504
+ await fs26.unlink(`${vaultPath2}/${sourceNoteFile}`);
19505
+ sourceDeleted = true;
19506
+ }
19351
19507
  initializeEntityIndex(vaultPath2).catch((err) => {
19352
19508
  console.error(`[Flywheel] Entity cache rebuild failed: ${err}`);
19353
19509
  });
@@ -19356,7 +19512,8 @@ ${trimmedSource}`;
19356
19512
  const previewLines = [
19357
19513
  `${dry_run ? "Would absorb" : "Absorbed"}: "${source_name}" \u2192 "${targetTitle}"`,
19358
19514
  `Alias ${dry_run ? "to add" : "added"}: ${aliasAdded ? source_name : "no (matches target title)"}`,
19359
- `Backlinks ${dry_run ? "to update" : "updated"}: ${totalBacklinksUpdated}`
19515
+ `Backlinks ${dry_run ? "to update" : "updated"}: ${totalBacklinksUpdated}`,
19516
+ sourceNoteFile ? `Source note ${dry_run ? "to delete" : "deleted"}: ${sourceNoteFile}` : "Source note: none found"
19360
19517
  ];
19361
19518
  if (modifiedFiles.length > 0) {
19362
19519
  previewLines.push(`Files ${dry_run ? "to modify" : "modified"}: ${modifiedFiles.join(", ")}`);
@@ -19388,6 +19545,33 @@ ${trimmedSource}`;
19388
19545
  }
19389
19546
  );
19390
19547
  }
19548
+ async function findSourceNote(vaultPath2, sourceName, excludePath) {
19549
+ const targetLower = sourceName.toLowerCase();
19550
+ async function scanDir(dir) {
19551
+ let entries;
19552
+ try {
19553
+ entries = await fs26.readdir(dir, { withFileTypes: true });
19554
+ } catch {
19555
+ return null;
19556
+ }
19557
+ for (const entry of entries) {
19558
+ if (entry.name.startsWith(".")) continue;
19559
+ const fullPath = path28.join(dir, entry.name);
19560
+ if (entry.isDirectory()) {
19561
+ const found = await scanDir(fullPath);
19562
+ if (found) return found;
19563
+ } else if (entry.isFile() && entry.name.endsWith(".md")) {
19564
+ const basename5 = path28.basename(entry.name, ".md");
19565
+ if (basename5.toLowerCase() === targetLower) {
19566
+ const relative3 = path28.relative(vaultPath2, fullPath).replace(/\\/g, "/");
19567
+ if (relative3 !== excludePath) return relative3;
19568
+ }
19569
+ }
19570
+ }
19571
+ return null;
19572
+ }
19573
+ return scanDir(vaultPath2);
19574
+ }
19391
19575
 
19392
19576
  // src/tools/write/system.ts
19393
19577
  init_git();
@@ -19500,6 +19684,7 @@ Message: ${undoResult.undoneCommit.message}` : void 0
19500
19684
  }
19501
19685
 
19502
19686
  // src/tools/write/policy.ts
19687
+ import * as path33 from "path";
19503
19688
  import { z as z21 } from "zod";
19504
19689
 
19505
19690
  // src/core/write/policy/index.ts
@@ -19509,7 +19694,7 @@ init_schema();
19509
19694
  // src/core/write/policy/parser.ts
19510
19695
  init_schema();
19511
19696
  import fs27 from "fs/promises";
19512
- import path28 from "path";
19697
+ import path29 from "path";
19513
19698
  import matter7 from "gray-matter";
19514
19699
  function parseYaml(content) {
19515
19700
  const parsed = matter7(`---
@@ -19558,13 +19743,13 @@ async function loadPolicyFile(filePath) {
19558
19743
  }
19559
19744
  }
19560
19745
  async function loadPolicy(vaultPath2, policyName) {
19561
- const policiesDir = path28.join(vaultPath2, ".claude", "policies");
19562
- const policyPath = path28.join(policiesDir, `${policyName}.yaml`);
19746
+ const policiesDir = path29.join(vaultPath2, ".claude", "policies");
19747
+ const policyPath = path29.join(policiesDir, `${policyName}.yaml`);
19563
19748
  try {
19564
19749
  await fs27.access(policyPath);
19565
19750
  return loadPolicyFile(policyPath);
19566
19751
  } catch {
19567
- const ymlPath = path28.join(policiesDir, `${policyName}.yml`);
19752
+ const ymlPath = path29.join(policiesDir, `${policyName}.yml`);
19568
19753
  try {
19569
19754
  await fs27.access(ymlPath);
19570
19755
  return loadPolicyFile(ymlPath);
@@ -19707,7 +19892,7 @@ init_writer();
19707
19892
  init_git();
19708
19893
  init_wikilinks();
19709
19894
  import fs29 from "fs/promises";
19710
- import path30 from "path";
19895
+ import path31 from "path";
19711
19896
  init_constants();
19712
19897
  async function executeStep(step, vaultPath2, context, conditionResults, searchFn) {
19713
19898
  const { execute, reason } = shouldStepExecute(step.when, conditionResults);
@@ -19915,7 +20100,7 @@ async function executeToggleTask(params, vaultPath2) {
19915
20100
  const notePath = String(params.path || "");
19916
20101
  const task = String(params.task || "");
19917
20102
  const section = params.section ? String(params.section) : void 0;
19918
- const fullPath = path30.join(vaultPath2, notePath);
20103
+ const fullPath = path31.join(vaultPath2, notePath);
19919
20104
  try {
19920
20105
  await fs29.access(fullPath);
19921
20106
  } catch {
@@ -20198,7 +20383,7 @@ async function rollbackChanges(vaultPath2, originalContents, filesModified) {
20198
20383
  const pathCheck = await validatePathSecure(vaultPath2, filePath);
20199
20384
  if (!pathCheck.valid) continue;
20200
20385
  const original = originalContents.get(filePath);
20201
- const fullPath = path30.join(vaultPath2, filePath);
20386
+ const fullPath = path31.join(vaultPath2, filePath);
20202
20387
  if (original === null) {
20203
20388
  try {
20204
20389
  await fs29.unlink(fullPath);
@@ -20253,9 +20438,9 @@ async function previewPolicy(policy, vaultPath2, variables) {
20253
20438
 
20254
20439
  // src/core/write/policy/storage.ts
20255
20440
  import fs30 from "fs/promises";
20256
- import path31 from "path";
20441
+ import path32 from "path";
20257
20442
  function getPoliciesDir(vaultPath2) {
20258
- return path31.join(vaultPath2, ".claude", "policies");
20443
+ return path32.join(vaultPath2, ".claude", "policies");
20259
20444
  }
20260
20445
  async function ensurePoliciesDir(vaultPath2) {
20261
20446
  const dir = getPoliciesDir(vaultPath2);
@@ -20270,7 +20455,7 @@ async function listPolicies(vaultPath2) {
20270
20455
  if (!file.endsWith(".yaml") && !file.endsWith(".yml")) {
20271
20456
  continue;
20272
20457
  }
20273
- const filePath = path31.join(dir, file);
20458
+ const filePath = path32.join(dir, file);
20274
20459
  const stat5 = await fs30.stat(filePath);
20275
20460
  const content = await fs30.readFile(filePath, "utf-8");
20276
20461
  const metadata = extractPolicyMetadata(content);
@@ -20295,7 +20480,7 @@ async function writePolicyRaw(vaultPath2, policyName, content, overwrite = false
20295
20480
  const dir = getPoliciesDir(vaultPath2);
20296
20481
  await ensurePoliciesDir(vaultPath2);
20297
20482
  const filename = `${policyName}.yaml`;
20298
- const filePath = path31.join(dir, filename);
20483
+ const filePath = path32.join(dir, filename);
20299
20484
  if (!overwrite) {
20300
20485
  try {
20301
20486
  await fs30.access(filePath);
@@ -20409,6 +20594,8 @@ function registerPolicyTools(server2, getVaultPath, getSearchFn) {
20409
20594
  const policies = await listPolicies(vaultPath2);
20410
20595
  const response = {
20411
20596
  success: true,
20597
+ vault: path33.basename(vaultPath2),
20598
+ vault_path: vaultPath2,
20412
20599
  count: policies.length,
20413
20600
  policies: policies.map((p) => ({
20414
20601
  name: p.name,
@@ -20841,7 +21028,7 @@ import { z as z22 } from "zod";
20841
21028
 
20842
21029
  // src/core/write/tagRename.ts
20843
21030
  import * as fs31 from "fs/promises";
20844
- import * as path32 from "path";
21031
+ import * as path34 from "path";
20845
21032
  import matter8 from "gray-matter";
20846
21033
  import { getProtectedZones as getProtectedZones2 } from "@velvetmonkey/vault-core";
20847
21034
  function getNotesInFolder3(index, folder) {
@@ -20947,7 +21134,7 @@ async function renameTag(index, vaultPath2, oldTag, newTag, options) {
20947
21134
  const previews = [];
20948
21135
  let totalChanges = 0;
20949
21136
  for (const note of affectedNotes) {
20950
- const fullPath = path32.join(vaultPath2, note.path);
21137
+ const fullPath = path34.join(vaultPath2, note.path);
20951
21138
  let fileContent;
20952
21139
  try {
20953
21140
  fileContent = await fs31.readFile(fullPath, "utf-8");
@@ -22186,7 +22373,11 @@ var VALID_CONFIG_KEYS = {
22186
22373
  adaptive_strictness: z28.boolean(),
22187
22374
  proactive_linking: z28.boolean(),
22188
22375
  proactive_min_score: z28.number(),
22189
- proactive_max_per_file: z28.number()
22376
+ proactive_max_per_file: z28.number(),
22377
+ proactive_max_per_day: z28.number(),
22378
+ custom_categories: z28.record(z28.string(), z28.object({
22379
+ type_boost: z28.number().optional()
22380
+ }))
22190
22381
  };
22191
22382
  function registerConfigTools(server2, getConfig2, setConfig, getStateDb3) {
22192
22383
  server2.registerTool(
@@ -22255,7 +22446,7 @@ init_wikilinks();
22255
22446
  init_wikilinkFeedback();
22256
22447
  import { z as z29 } from "zod";
22257
22448
  import * as fs32 from "fs/promises";
22258
- import * as path33 from "path";
22449
+ import * as path35 from "path";
22259
22450
  import { scanVaultEntities as scanVaultEntities3, SCHEMA_VERSION as SCHEMA_VERSION2 } from "@velvetmonkey/vault-core";
22260
22451
  init_embeddings();
22261
22452
  function hasSkipWikilinks(content) {
@@ -22271,13 +22462,13 @@ async function collectMarkdownFiles(dirPath, basePath, excludeFolders) {
22271
22462
  const entries = await fs32.readdir(dirPath, { withFileTypes: true });
22272
22463
  for (const entry of entries) {
22273
22464
  if (entry.name.startsWith(".")) continue;
22274
- const fullPath = path33.join(dirPath, entry.name);
22465
+ const fullPath = path35.join(dirPath, entry.name);
22275
22466
  if (entry.isDirectory()) {
22276
22467
  if (excludeFolders.some((f) => entry.name.toLowerCase() === f.toLowerCase())) continue;
22277
22468
  const sub = await collectMarkdownFiles(fullPath, basePath, excludeFolders);
22278
22469
  results.push(...sub);
22279
22470
  } else if (entry.isFile() && entry.name.endsWith(".md")) {
22280
- results.push(path33.relative(basePath, fullPath));
22471
+ results.push(path35.relative(basePath, fullPath));
22281
22472
  }
22282
22473
  }
22283
22474
  } catch {
@@ -22307,7 +22498,7 @@ var EXCLUDE_FOLDERS = [
22307
22498
  ];
22308
22499
  function buildStatusReport(stateDb2, vaultPath2) {
22309
22500
  const recommendations = [];
22310
- const dbPath = path33.join(vaultPath2, ".flywheel", "state.db");
22501
+ const dbPath = path35.join(vaultPath2, ".flywheel", "state.db");
22311
22502
  const statedbExists = stateDb2 !== null;
22312
22503
  if (!statedbExists) {
22313
22504
  recommendations.push("StateDb not initialized \u2014 server needs restart");
@@ -22433,7 +22624,7 @@ async function executeRun(stateDb2, vaultPath2) {
22433
22624
  const allFiles = await collectMarkdownFiles(vaultPath2, vaultPath2, EXCLUDE_FOLDERS);
22434
22625
  let eligible = 0;
22435
22626
  for (const relativePath of allFiles) {
22436
- const fullPath = path33.join(vaultPath2, relativePath);
22627
+ const fullPath = path35.join(vaultPath2, relativePath);
22437
22628
  let content;
22438
22629
  try {
22439
22630
  content = await fs32.readFile(fullPath, "utf-8");
@@ -22491,7 +22682,7 @@ async function executeEnrich(stateDb2, vaultPath2, dryRun, batchSize, offset) {
22491
22682
  const eligible = [];
22492
22683
  let notesSkipped = 0;
22493
22684
  for (const relativePath of allFiles) {
22494
- const fullPath = path33.join(vaultPath2, relativePath);
22685
+ const fullPath = path35.join(vaultPath2, relativePath);
22495
22686
  let content;
22496
22687
  try {
22497
22688
  content = await fs32.readFile(fullPath, "utf-8");
@@ -22521,7 +22712,7 @@ async function executeEnrich(stateDb2, vaultPath2, dryRun, batchSize, offset) {
22521
22712
  match_count: result.linksAdded
22522
22713
  });
22523
22714
  if (!dryRun) {
22524
- const fullPath = path33.join(vaultPath2, relativePath);
22715
+ const fullPath = path35.join(vaultPath2, relativePath);
22525
22716
  await fs32.writeFile(fullPath, result.content, "utf-8");
22526
22717
  notesModified++;
22527
22718
  if (stateDb2) {
@@ -22754,7 +22945,7 @@ import { z as z32 } from "zod";
22754
22945
  // src/core/read/similarity.ts
22755
22946
  init_embeddings();
22756
22947
  import * as fs33 from "fs";
22757
- import * as path34 from "path";
22948
+ import * as path36 from "path";
22758
22949
  var STOP_WORDS = /* @__PURE__ */ new Set([
22759
22950
  "the",
22760
22951
  "be",
@@ -22891,7 +23082,7 @@ function extractKeyTerms(content, maxTerms = 15) {
22891
23082
  }
22892
23083
  function findSimilarNotes(db4, vaultPath2, index, sourcePath, options = {}) {
22893
23084
  const limit = options.limit ?? 10;
22894
- const absPath = path34.join(vaultPath2, sourcePath);
23085
+ const absPath = path36.join(vaultPath2, sourcePath);
22895
23086
  let content;
22896
23087
  try {
22897
23088
  content = fs33.readFileSync(absPath, "utf-8");
@@ -23033,7 +23224,7 @@ function registerSimilarityTools(server2, getIndex, getVaultPath, getStateDb3) {
23033
23224
  diversity: z32.number().min(0).max(1).optional().describe("Relevance vs diversity tradeoff (0=max diversity, 1=pure relevance, default: 0.7)")
23034
23225
  }
23035
23226
  },
23036
- async ({ path: path37, limit, diversity }) => {
23227
+ async ({ path: path39, limit, diversity }) => {
23037
23228
  const index = getIndex();
23038
23229
  const vaultPath2 = getVaultPath();
23039
23230
  const stateDb2 = getStateDb3();
@@ -23042,10 +23233,10 @@ function registerSimilarityTools(server2, getIndex, getVaultPath, getStateDb3) {
23042
23233
  content: [{ type: "text", text: JSON.stringify({ error: "StateDb not available" }) }]
23043
23234
  };
23044
23235
  }
23045
- if (!index.notes.has(path37)) {
23236
+ if (!index.notes.has(path39)) {
23046
23237
  return {
23047
23238
  content: [{ type: "text", text: JSON.stringify({
23048
- error: `Note not found: ${path37}`,
23239
+ error: `Note not found: ${path39}`,
23049
23240
  hint: "Use the full relative path including .md extension"
23050
23241
  }, null, 2) }]
23051
23242
  };
@@ -23057,12 +23248,12 @@ function registerSimilarityTools(server2, getIndex, getVaultPath, getStateDb3) {
23057
23248
  };
23058
23249
  const useHybrid = hasEmbeddingsIndex();
23059
23250
  const method = useHybrid ? "hybrid" : "bm25";
23060
- const results = useHybrid ? await findHybridSimilarNotes(stateDb2.db, vaultPath2, index, path37, opts) : findSimilarNotes(stateDb2.db, vaultPath2, index, path37, opts);
23251
+ const results = useHybrid ? await findHybridSimilarNotes(stateDb2.db, vaultPath2, index, path39, opts) : findSimilarNotes(stateDb2.db, vaultPath2, index, path39, opts);
23061
23252
  return {
23062
23253
  content: [{
23063
23254
  type: "text",
23064
23255
  text: JSON.stringify({
23065
- source: path37,
23256
+ source: path39,
23066
23257
  method,
23067
23258
  count: results.length,
23068
23259
  similar: results
@@ -24188,6 +24379,492 @@ function registerEntityHistoryTools(server2, getStateDb3) {
24188
24379
  );
24189
24380
  }
24190
24381
 
24382
+ // src/tools/read/learningReport.ts
24383
+ import { z as z38 } from "zod";
24384
+
24385
+ // src/core/read/learningReport.ts
24386
+ function isoDate(d) {
24387
+ return d.toISOString().slice(0, 10);
24388
+ }
24389
+ function periodBounds(daysBack, now) {
24390
+ const end = new Date(now);
24391
+ end.setHours(23, 59, 59, 999);
24392
+ const start = new Date(now);
24393
+ start.setDate(start.getDate() - daysBack + 1);
24394
+ start.setHours(0, 0, 0, 0);
24395
+ return {
24396
+ start: isoDate(start),
24397
+ end: isoDate(end),
24398
+ startMs: start.getTime(),
24399
+ endMs: end.getTime() + 1
24400
+ // exclusive upper bound
24401
+ };
24402
+ }
24403
+ function queryApplicationsByDay(stateDb2, startIso, endIso) {
24404
+ const rows = stateDb2.db.prepare(`
24405
+ SELECT date(applied_at) as day,
24406
+ SUM(CASE WHEN status='applied' THEN 1 ELSE 0 END) as applied,
24407
+ SUM(CASE WHEN status='removed' THEN 1 ELSE 0 END) as removed
24408
+ FROM wikilink_applications
24409
+ WHERE applied_at >= ? AND applied_at <= ?
24410
+ GROUP BY day ORDER BY day
24411
+ `).all(startIso, endIso + " 23:59:59");
24412
+ return rows.map((r) => ({ ...r, net: r.applied - r.removed }));
24413
+ }
24414
+ function queryFeedbackByDay(stateDb2, startIso, endIso) {
24415
+ return stateDb2.db.prepare(`
24416
+ SELECT date(created_at) as day,
24417
+ SUM(CASE WHEN correct=1 THEN 1 ELSE 0 END) as positive,
24418
+ SUM(CASE WHEN correct=0 THEN 1 ELSE 0 END) as negative,
24419
+ COUNT(*) as total
24420
+ FROM wikilink_feedback
24421
+ WHERE created_at >= ? AND created_at <= ?
24422
+ GROUP BY day ORDER BY day
24423
+ `).all(startIso, endIso + " 23:59:59");
24424
+ }
24425
+ function querySurvival(stateDb2, startIso, endIso) {
24426
+ const row = stateDb2.db.prepare(`
24427
+ SELECT
24428
+ COUNT(*) as total_applied,
24429
+ SUM(CASE WHEN status='applied' THEN 1 ELSE 0 END) as still_active,
24430
+ SUM(CASE WHEN status='removed' THEN 1 ELSE 0 END) as removed
24431
+ FROM wikilink_applications
24432
+ WHERE applied_at >= ? AND applied_at <= ?
24433
+ `).get(startIso, endIso + " 23:59:59");
24434
+ return {
24435
+ ...row,
24436
+ survival_rate: row.total_applied > 0 ? Math.round(row.still_active / row.total_applied * 1e3) / 1e3 : null
24437
+ };
24438
+ }
24439
+ function queryTopRejected(stateDb2, startIso, endIso) {
24440
+ return stateDb2.db.prepare(`
24441
+ SELECT entity,
24442
+ SUM(CASE WHEN status='removed' THEN 1 ELSE 0 END) as removed_count,
24443
+ COUNT(*) as applied_count
24444
+ FROM wikilink_applications
24445
+ WHERE applied_at >= ? AND applied_at <= ?
24446
+ GROUP BY entity COLLATE NOCASE
24447
+ HAVING removed_count > 0
24448
+ ORDER BY removed_count DESC
24449
+ LIMIT 10
24450
+ `).all(startIso, endIso + " 23:59:59").map((r) => ({
24451
+ entity: r.entity,
24452
+ removed_count: r.removed_count,
24453
+ applied_count: r.applied_count,
24454
+ rejection_rate: Math.round(r.removed_count / r.applied_count * 1e3) / 1e3
24455
+ }));
24456
+ }
24457
+ function queryFunnel(stateDb2, startIso, endIso, startMs, endMs) {
24458
+ const evalRow = stateDb2.db.prepare(`
24459
+ SELECT
24460
+ COUNT(*) as evaluations,
24461
+ SUM(CASE WHEN passed=1 THEN 1 ELSE 0 END) as threshold_passes
24462
+ FROM suggestion_events
24463
+ WHERE timestamp >= ? AND timestamp < ?
24464
+ `).get(startMs, endMs);
24465
+ const appRow = stateDb2.db.prepare(`
24466
+ SELECT
24467
+ COUNT(*) as applications,
24468
+ SUM(CASE WHEN status='applied' THEN 1 ELSE 0 END) as survivals
24469
+ FROM wikilink_applications
24470
+ WHERE applied_at >= ? AND applied_at <= ?
24471
+ `).get(startIso, endIso + " 23:59:59");
24472
+ return {
24473
+ evaluations: evalRow.evaluations,
24474
+ applications: appRow.applications,
24475
+ survivals: appRow.survivals,
24476
+ application_rate: evalRow.evaluations > 0 ? Math.round(appRow.applications / evalRow.evaluations * 1e5) / 1e5 : null,
24477
+ survival_rate: appRow.applications > 0 ? Math.round(appRow.survivals / appRow.applications * 1e3) / 1e3 : null
24478
+ };
24479
+ }
24480
+ function getLearningReport(stateDb2, entityCount, linkCount, daysBack = 7, compare = false) {
24481
+ const now = /* @__PURE__ */ new Date();
24482
+ const bounds = periodBounds(daysBack, now);
24483
+ const report = {
24484
+ period: { start: bounds.start, end: bounds.end, days: daysBack },
24485
+ applications_by_day: queryApplicationsByDay(stateDb2, bounds.start, bounds.end),
24486
+ feedback_by_day: queryFeedbackByDay(stateDb2, bounds.start, bounds.end),
24487
+ survival: querySurvival(stateDb2, bounds.start, bounds.end),
24488
+ top_rejected: queryTopRejected(stateDb2, bounds.start, bounds.end),
24489
+ funnel: queryFunnel(stateDb2, bounds.start, bounds.end, bounds.startMs, bounds.endMs),
24490
+ graph: { link_count: linkCount, entity_count: entityCount }
24491
+ };
24492
+ if (compare) {
24493
+ const prevEnd = new Date(now);
24494
+ prevEnd.setDate(prevEnd.getDate() - daysBack);
24495
+ const prevBounds = periodBounds(daysBack, prevEnd);
24496
+ const prevSurvival = querySurvival(stateDb2, prevBounds.start, prevBounds.end);
24497
+ const prevFeedback = queryFeedbackByDay(stateDb2, prevBounds.start, prevBounds.end);
24498
+ const prevApps = queryApplicationsByDay(stateDb2, prevBounds.start, prevBounds.end);
24499
+ const currAppsTotal = report.applications_by_day.reduce((s, d) => s + d.applied, 0);
24500
+ const prevAppsTotal = prevApps.reduce((s, d) => s + d.applied, 0);
24501
+ const currFeedbackTotal = report.feedback_by_day.reduce((s, d) => s + d.total, 0);
24502
+ const prevFeedbackTotal = prevFeedback.reduce((s, d) => s + d.total, 0);
24503
+ report.comparison = {
24504
+ previous_period: { start: prevBounds.start, end: prevBounds.end },
24505
+ applications_delta: currAppsTotal - prevAppsTotal,
24506
+ feedback_delta: currFeedbackTotal - prevFeedbackTotal,
24507
+ survival_rate_delta: report.survival.survival_rate != null && prevSurvival.survival_rate != null ? Math.round((report.survival.survival_rate - prevSurvival.survival_rate) * 1e3) / 1e3 : null
24508
+ };
24509
+ }
24510
+ return report;
24511
+ }
24512
+
24513
+ // src/tools/read/learningReport.ts
24514
+ function registerLearningReportTools(server2, getIndex, getStateDb3) {
24515
+ server2.tool(
24516
+ "flywheel_learning_report",
24517
+ "Get a narrative report of the flywheel auto-linking system's learning progress. Shows: applications by day, feedback (positive/negative), survival rate, top rejected entities, suggestion funnel (evaluations \u2192 applications \u2192 survivals), and graph growth. Use compare=true for period-over-period deltas.",
24518
+ {
24519
+ days_back: z38.number().min(1).max(365).optional().describe("Analysis window in days (default: 7). Use 1 for today, 2 for last 48h, etc."),
24520
+ compare: z38.boolean().optional().describe("Include comparison with the preceding equal-length period (default: false)")
24521
+ },
24522
+ async (args) => {
24523
+ const stateDb2 = getStateDb3();
24524
+ if (!stateDb2) {
24525
+ return {
24526
+ content: [{ type: "text", text: JSON.stringify({ error: "StateDb not available" }) }],
24527
+ isError: true
24528
+ };
24529
+ }
24530
+ const index = getIndex();
24531
+ const metrics = computeMetrics(index, stateDb2);
24532
+ const report = getLearningReport(
24533
+ stateDb2,
24534
+ metrics.entity_count,
24535
+ metrics.link_count,
24536
+ args.days_back ?? 7,
24537
+ args.compare ?? false
24538
+ );
24539
+ return {
24540
+ content: [{ type: "text", text: JSON.stringify(report, null, 2) }]
24541
+ };
24542
+ }
24543
+ );
24544
+ }
24545
+
24546
+ // src/tools/read/calibrationExport.ts
24547
+ import { z as z39 } from "zod";
24548
+
24549
+ // src/core/read/calibrationExport.ts
24550
+ init_wikilinkFeedback();
24551
+ init_embeddings();
24552
+ var LAYER_KEYS = [
24553
+ "contentMatch",
24554
+ "cooccurrenceBoost",
24555
+ "typeBoost",
24556
+ "contextBoost",
24557
+ "recencyBoost",
24558
+ "crossFolderBoost",
24559
+ "hubBoost",
24560
+ "feedbackAdjustment",
24561
+ "suppressionPenalty",
24562
+ "semanticBoost",
24563
+ "edgeWeightBoost"
24564
+ ];
24565
+ var THRESHOLD_SWEEP = [5, 8, 10, 12, 15, 18, 20, 25, 30];
24566
+ function sizeBucket(count) {
24567
+ if (count < 50) return "tiny";
24568
+ if (count < 200) return "small";
24569
+ if (count < 1e3) return "medium";
24570
+ if (count < 5e3) return "large";
24571
+ return "huge";
24572
+ }
24573
+ function round(n, decimals = 3) {
24574
+ const f = Math.pow(10, decimals);
24575
+ return Math.round(n * f) / f;
24576
+ }
24577
+ function queryEntityDistribution(stateDb2) {
24578
+ const rows = stateDb2.db.prepare(
24579
+ "SELECT category, count(*) as cnt FROM entities GROUP BY category ORDER BY cnt DESC"
24580
+ ).all();
24581
+ const result = {};
24582
+ for (const r of rows) result[r.category] = r.cnt;
24583
+ return result;
24584
+ }
24585
+ function queryFunnel2(stateDb2, startMs, startIso, endIso) {
24586
+ const evalRow = stateDb2.db.prepare(
24587
+ "SELECT COUNT(*) as total FROM suggestion_events WHERE timestamp >= ?"
24588
+ ).get(startMs);
24589
+ const appRow = stateDb2.db.prepare(`
24590
+ SELECT COUNT(*) as total,
24591
+ SUM(CASE WHEN status='applied' THEN 1 ELSE 0 END) as survivals,
24592
+ SUM(CASE WHEN status='removed' THEN 1 ELSE 0 END) as removals
24593
+ FROM wikilink_applications WHERE applied_at >= ? AND applied_at <= ?
24594
+ `).get(startIso, endIso + " 23:59:59");
24595
+ return {
24596
+ total_evaluations: evalRow.total,
24597
+ total_applications: appRow.total,
24598
+ total_survivals: appRow.survivals,
24599
+ total_removals: appRow.removals,
24600
+ survival_rate: appRow.total > 0 ? round(appRow.survivals / appRow.total) : null
24601
+ };
24602
+ }
24603
+ function queryLayerContributions(stateDb2, startMs) {
24604
+ const rows = stateDb2.db.prepare(
24605
+ "SELECT breakdown_json FROM suggestion_events WHERE timestamp >= ?"
24606
+ ).all(startMs);
24607
+ const sums = {};
24608
+ const topCounts = {};
24609
+ for (const k of LAYER_KEYS) {
24610
+ sums[k] = 0;
24611
+ topCounts[k] = 0;
24612
+ }
24613
+ let count = 0;
24614
+ for (const row of rows) {
24615
+ let bd;
24616
+ try {
24617
+ bd = JSON.parse(row.breakdown_json);
24618
+ } catch {
24619
+ continue;
24620
+ }
24621
+ count++;
24622
+ let topLayer = "";
24623
+ let topVal = -Infinity;
24624
+ for (const k of LAYER_KEYS) {
24625
+ const v = Math.abs(bd[k] ?? 0);
24626
+ sums[k] += v;
24627
+ if (v > topVal) {
24628
+ topVal = v;
24629
+ topLayer = k;
24630
+ }
24631
+ }
24632
+ if (topLayer) topCounts[topLayer] = (topCounts[topLayer] || 0) + 1;
24633
+ }
24634
+ const averages = {};
24635
+ for (const k of LAYER_KEYS) averages[k] = count > 0 ? round(sums[k] / count) : 0;
24636
+ return { averages, top_contributor_counts: topCounts, event_count: count };
24637
+ }
24638
+ function queryScoreDistribution(stateDb2, startMs) {
24639
+ const rows = stateDb2.db.prepare(
24640
+ "SELECT total_score FROM suggestion_events WHERE timestamp >= ? ORDER BY total_score"
24641
+ ).all(startMs);
24642
+ const bins = [];
24643
+ for (let i = 0; i < 50; i += 5) {
24644
+ bins.push({ min: i, max: i + 5, count: 0 });
24645
+ }
24646
+ bins.push({ min: 50, max: Infinity, count: 0 });
24647
+ let sum = 0;
24648
+ for (const r of rows) {
24649
+ sum += r.total_score;
24650
+ const binIdx = r.total_score >= 50 ? bins.length - 1 : Math.floor(r.total_score / 5);
24651
+ if (binIdx >= 0 && binIdx < bins.length) bins[binIdx].count++;
24652
+ }
24653
+ const n = rows.length;
24654
+ return {
24655
+ bins: bins.map((b) => ({ min: b.min, max: b.max === Infinity ? 999 : b.max, count: b.count })),
24656
+ mean_score: n > 0 ? round(sum / n) : 0,
24657
+ median_score: n > 0 ? rows[Math.floor(n / 2)].total_score : 0
24658
+ };
24659
+ }
24660
+ function querySurvivalByCategory(stateDb2, startIso, endIso) {
24661
+ const rows = stateDb2.db.prepare(`
24662
+ SELECT e.category,
24663
+ COUNT(*) as applied,
24664
+ SUM(CASE WHEN wa.status='applied' THEN 1 ELSE 0 END) as survived,
24665
+ SUM(CASE WHEN wa.status='removed' THEN 1 ELSE 0 END) as removed
24666
+ FROM wikilink_applications wa
24667
+ JOIN entities e ON e.name_lower = LOWER(wa.entity)
24668
+ WHERE wa.applied_at >= ? AND wa.applied_at <= ?
24669
+ GROUP BY e.category
24670
+ `).all(startIso, endIso + " 23:59:59");
24671
+ const result = {};
24672
+ for (const r of rows) {
24673
+ result[r.category] = {
24674
+ applied: r.applied,
24675
+ survived: r.survived,
24676
+ removed: r.removed,
24677
+ survival_rate: r.applied > 0 ? round(r.survived / r.applied) : null
24678
+ };
24679
+ }
24680
+ return result;
24681
+ }
24682
+ function queryFeedback(stateDb2, startIso, endIso) {
24683
+ const row = stateDb2.db.prepare(`
24684
+ SELECT
24685
+ COUNT(*) as total,
24686
+ SUM(CASE WHEN confidence >= 0.9 THEN 1 ELSE 0 END) as explicit_count,
24687
+ SUM(CASE WHEN confidence < 0.9 THEN 1 ELSE 0 END) as implicit_count,
24688
+ SUM(CASE WHEN confidence >= 0.9 AND correct=1 THEN 1 ELSE 0 END) as explicit_correct,
24689
+ SUM(CASE WHEN confidence < 0.9 AND correct=1 THEN 1 ELSE 0 END) as implicit_correct
24690
+ FROM wikilink_feedback WHERE created_at >= ? AND created_at <= ?
24691
+ `).get(startIso, endIso + " 23:59:59");
24692
+ return {
24693
+ total: row.total,
24694
+ explicit_count: row.explicit_count,
24695
+ implicit_count: row.implicit_count,
24696
+ explicit_accuracy: row.explicit_count > 0 ? round(row.explicit_correct / row.explicit_count) : null,
24697
+ implicit_accuracy: row.implicit_count > 0 ? round(row.implicit_correct / row.implicit_count) : null
24698
+ };
24699
+ }
24700
+ function querySuppression(stateDb2) {
24701
+ const stats = getWeightedEntityStats(stateDb2);
24702
+ const suppressed = getSuppressedCount(stateDb2);
24703
+ return {
24704
+ entities_suppressed: suppressed,
24705
+ entities_with_feedback: stats.length,
24706
+ suppression_rate: stats.length > 0 ? round(suppressed / stats.length) : null
24707
+ };
24708
+ }
24709
+ function queryRecencyAnalysis(stateDb2, startMs, startIso, endIso) {
24710
+ const rows = stateDb2.db.prepare(`
24711
+ SELECT se.breakdown_json, wa.status
24712
+ FROM suggestion_events se
24713
+ JOIN wikilink_applications wa ON LOWER(se.entity) = LOWER(wa.entity)
24714
+ AND se.note_path = wa.note_path
24715
+ WHERE se.timestamp >= ? AND wa.applied_at >= ? AND wa.applied_at <= ?
24716
+ `).all(startMs, startIso, endIso + " 23:59:59");
24717
+ let survivedSum = 0, survivedCount = 0;
24718
+ let removedSum = 0, removedCount = 0;
24719
+ for (const r of rows) {
24720
+ let bd;
24721
+ try {
24722
+ bd = JSON.parse(r.breakdown_json);
24723
+ } catch {
24724
+ continue;
24725
+ }
24726
+ const recency = bd.recencyBoost ?? 0;
24727
+ if (r.status === "applied") {
24728
+ survivedSum += recency;
24729
+ survivedCount++;
24730
+ } else {
24731
+ removedSum += recency;
24732
+ removedCount++;
24733
+ }
24734
+ }
24735
+ return {
24736
+ avg_recency_when_survived: survivedCount > 0 ? round(survivedSum / survivedCount) : 0,
24737
+ avg_recency_when_removed: removedCount > 0 ? round(removedSum / removedCount) : 0
24738
+ };
24739
+ }
24740
+ function queryCooccurrenceAnalysis(stateDb2, startMs, startIso, endIso) {
24741
+ const rows = stateDb2.db.prepare(`
24742
+ SELECT se.breakdown_json, wa.status
24743
+ FROM suggestion_events se
24744
+ LEFT JOIN wikilink_applications wa ON LOWER(se.entity) = LOWER(wa.entity)
24745
+ AND se.note_path = wa.note_path
24746
+ AND wa.applied_at >= ? AND wa.applied_at <= ?
24747
+ WHERE se.timestamp >= ?
24748
+ `).all(startIso, endIso + " 23:59:59", startMs);
24749
+ let totalCount = 0, coocOnlyCount = 0;
24750
+ let coocOnlyApplied = 0, coocOnlySurvived = 0;
24751
+ let coocSum = 0;
24752
+ for (const r of rows) {
24753
+ let bd;
24754
+ try {
24755
+ bd = JSON.parse(r.breakdown_json);
24756
+ } catch {
24757
+ continue;
24758
+ }
24759
+ totalCount++;
24760
+ coocSum += bd.cooccurrenceBoost ?? 0;
24761
+ if ((bd.contentMatch ?? 0) === 0 && (bd.cooccurrenceBoost ?? 0) > 0) {
24762
+ coocOnlyCount++;
24763
+ if (r.status != null) {
24764
+ coocOnlyApplied++;
24765
+ if (r.status === "applied") coocOnlySurvived++;
24766
+ }
24767
+ }
24768
+ }
24769
+ return {
24770
+ cooc_only_rate: totalCount > 0 ? round(coocOnlyCount / totalCount) : 0,
24771
+ cooc_only_survival_rate: coocOnlyApplied > 0 ? round(coocOnlySurvived / coocOnlyApplied) : null,
24772
+ avg_cooc_boost: totalCount > 0 ? round(coocSum / totalCount) : 0
24773
+ };
24774
+ }
24775
+ function queryThresholdAnalysis(stateDb2, startMs) {
24776
+ const rows = stateDb2.db.prepare(
24777
+ "SELECT total_score FROM suggestion_events WHERE timestamp >= ?"
24778
+ ).all(startMs);
24779
+ const total = rows.length;
24780
+ return {
24781
+ pass_rates_at_thresholds: THRESHOLD_SWEEP.map((t) => ({
24782
+ threshold: t,
24783
+ pass_rate: total > 0 ? round(rows.filter((r) => r.total_score >= t).length / total, 4) : 0
24784
+ }))
24785
+ };
24786
+ }
24787
+ function queryFlywheelAgeDays(stateDb2) {
24788
+ const row = stateDb2.db.prepare(
24789
+ "SELECT MIN(timestamp) as first_ts FROM suggestion_events"
24790
+ ).get();
24791
+ if (!row?.first_ts) return 0;
24792
+ return Math.floor((Date.now() - row.first_ts) / (24 * 60 * 60 * 1e3));
24793
+ }
24794
+ function getCalibrationExport(stateDb2, metrics, config, daysBack = 30, includeVaultId = true) {
24795
+ const now = /* @__PURE__ */ new Date();
24796
+ const start = new Date(now);
24797
+ start.setDate(start.getDate() - daysBack + 1);
24798
+ start.setHours(0, 0, 0, 0);
24799
+ const startIso = start.toISOString().slice(0, 10);
24800
+ const endIso = now.toISOString().slice(0, 10);
24801
+ const startMs = start.getTime();
24802
+ let vaultId;
24803
+ if (includeVaultId) {
24804
+ const crypto2 = __require("crypto");
24805
+ vaultId = crypto2.createHash("sha256").update(stateDb2.vaultPath).digest("hex").slice(0, 16);
24806
+ }
24807
+ return {
24808
+ schema_version: 1,
24809
+ exported_at: now.toISOString(),
24810
+ vault_id: vaultId,
24811
+ vault_profile: {
24812
+ size_bucket: sizeBucket(metrics.note_count),
24813
+ entity_bucket: sizeBucket(metrics.entity_count),
24814
+ avg_links_per_note: round(metrics.avg_links_per_note, 1),
24815
+ connected_ratio: round(metrics.connected_ratio),
24816
+ semantic_enabled: hasEmbeddingsIndex(),
24817
+ flywheel_age_days: queryFlywheelAgeDays(stateDb2),
24818
+ strictness_mode: config.wikilink_strictness ?? "balanced",
24819
+ adaptive_strictness: config.adaptive_strictness ?? true
24820
+ },
24821
+ entity_distribution: queryEntityDistribution(stateDb2),
24822
+ funnel: queryFunnel2(stateDb2, startMs, startIso, endIso),
24823
+ layer_contributions: queryLayerContributions(stateDb2, startMs),
24824
+ score_distribution: queryScoreDistribution(stateDb2, startMs),
24825
+ survival_by_category: querySurvivalByCategory(stateDb2, startIso, endIso),
24826
+ feedback: queryFeedback(stateDb2, startIso, endIso),
24827
+ suppression: querySuppression(stateDb2),
24828
+ recency_analysis: queryRecencyAnalysis(stateDb2, startMs, startIso, endIso),
24829
+ cooccurrence_analysis: queryCooccurrenceAnalysis(stateDb2, startMs, startIso, endIso),
24830
+ threshold_analysis: queryThresholdAnalysis(stateDb2, startMs)
24831
+ };
24832
+ }
24833
+
24834
+ // src/tools/read/calibrationExport.ts
24835
+ function registerCalibrationExportTools(server2, getIndex, getStateDb3, getConfig2) {
24836
+ server2.tool(
24837
+ "flywheel_calibration_export",
24838
+ "Export anonymized aggregate scoring data for cross-vault algorithm calibration. No entity names, note paths, or content \u2014 safe to share. Includes: suggestion funnel, per-layer contribution averages, survival rates by entity category, score distribution, suppression stats, recency/co-occurrence effectiveness, and threshold sweep.",
24839
+ {
24840
+ days_back: z39.number().min(1).max(365).optional().describe("Analysis window in days (default: 30)"),
24841
+ include_vault_id: z39.boolean().optional().describe("Include anonymous vault ID for longitudinal tracking (default: true)")
24842
+ },
24843
+ async (args) => {
24844
+ const stateDb2 = getStateDb3();
24845
+ if (!stateDb2) {
24846
+ return {
24847
+ content: [{ type: "text", text: JSON.stringify({ error: "StateDb not available" }) }],
24848
+ isError: true
24849
+ };
24850
+ }
24851
+ const index = getIndex();
24852
+ const metrics = computeMetrics(index, stateDb2);
24853
+ const config = getConfig2();
24854
+ const report = getCalibrationExport(
24855
+ stateDb2,
24856
+ metrics,
24857
+ config,
24858
+ args.days_back ?? 30,
24859
+ args.include_vault_id ?? true
24860
+ );
24861
+ return {
24862
+ content: [{ type: "text", text: JSON.stringify(report, null, 2) }]
24863
+ };
24864
+ }
24865
+ );
24866
+ }
24867
+
24191
24868
  // src/resources/vault.ts
24192
24869
  function registerVaultResources(server2, getIndex) {
24193
24870
  server2.registerResource(
@@ -24359,7 +25036,7 @@ function applyToolGating(targetServer, categories, getDb4, registry, getVaultPat
24359
25036
  let totalBytes = 0;
24360
25037
  for (const p of notePaths) {
24361
25038
  try {
24362
- totalBytes += statSync6(path35.join(vp, p)).size;
25039
+ totalBytes += statSync6(path37.join(vp, p)).size;
24363
25040
  } catch {
24364
25041
  }
24365
25042
  }
@@ -24448,7 +25125,7 @@ function applyToolGating(targetServer, categories, getDb4, registry, getVaultPat
24448
25125
  const schemaIdx = handlerIdx - 1;
24449
25126
  const schema = args[schemaIdx];
24450
25127
  if (schema && typeof schema === "object" && !Array.isArray(schema)) {
24451
- schema.vault = z38.string().optional().describe(
25128
+ schema.vault = z40.string().optional().describe(
24452
25129
  `Vault name for multi-vault mode. Available: ${registry.getVaultNames().join(", ")}. Default: ${registry.primaryName}`
24453
25130
  );
24454
25131
  }
@@ -24571,6 +25248,8 @@ function registerAllTools(targetServer, ctx) {
24571
25248
  registerTemporalAnalysisTools(targetServer, gvi, gvp, gsd);
24572
25249
  registerSessionHistoryTools(targetServer, gsd);
24573
25250
  registerEntityHistoryTools(targetServer, gsd);
25251
+ registerLearningReportTools(targetServer, gvi, gsd);
25252
+ registerCalibrationExportTools(targetServer, gvi, gsd, gcf);
24574
25253
  registerMemoryTools(targetServer, gsd);
24575
25254
  registerRecallTools(targetServer, gsd, gvp, () => gvi() ?? null);
24576
25255
  registerBriefTools(targetServer, gsd);
@@ -24645,6 +25324,15 @@ registerAllTools(server, _registryCtx);
24645
25324
  var categoryList = Array.from(enabledCategories).sort().join(", ");
24646
25325
  serverLog("server", `Tool categories: ${categoryList}`);
24647
25326
  serverLog("server", `Registered ${_gatingResult.registered} tools, skipped ${_gatingResult.skipped}`);
25327
+ function loadVaultCooccurrence(ctx) {
25328
+ if (!ctx.stateDb) return;
25329
+ const cachedCooc = loadCooccurrenceFromStateDb(ctx.stateDb);
25330
+ if (cachedCooc) {
25331
+ ctx.cooccurrenceIndex = cachedCooc.index;
25332
+ ctx.lastCooccurrenceRebuildAt = cachedCooc.builtAt;
25333
+ serverLog("index", `[${ctx.name}] Co-occurrence: loaded from cache (${Object.keys(cachedCooc.index.associations).length} entities, ${cachedCooc.index._metadata.total_associations} associations)`);
25334
+ }
25335
+ }
24648
25336
  async function initializeVault(name, vaultPathArg) {
24649
25337
  const ctx = {
24650
25338
  name,
@@ -24848,13 +25536,10 @@ async function main() {
24848
25536
  if (vaultConfigs) {
24849
25537
  vaultRegistry = new VaultRegistry(vaultConfigs[0].name);
24850
25538
  serverLog("server", `Multi-vault mode: ${vaultConfigs.map((v) => v.name).join(", ")}`);
24851
- for (const vc of vaultConfigs) {
24852
- const ctx = await initializeVault(vc.name, vc.path);
24853
- vaultRegistry.addContext(ctx);
24854
- }
24855
- const primary = vaultRegistry.getContext();
24856
- stateDb = primary.stateDb;
24857
- activateVault(primary);
25539
+ const primaryCtx2 = await initializeVault(vaultConfigs[0].name, vaultConfigs[0].path);
25540
+ vaultRegistry.addContext(primaryCtx2);
25541
+ stateDb = primaryCtx2.stateDb;
25542
+ activateVault(primaryCtx2);
24858
25543
  } else {
24859
25544
  vaultRegistry = new VaultRegistry("default");
24860
25545
  const ctx = await initializeVault("default", vaultPath);
@@ -24862,25 +25547,11 @@ async function main() {
24862
25547
  stateDb = ctx.stateDb;
24863
25548
  activateVault(ctx);
24864
25549
  }
24865
- for (const ctx of vaultRegistry.getAllContexts()) {
24866
- if (ctx.stateDb) {
24867
- const cachedCooc = loadCooccurrenceFromStateDb(ctx.stateDb);
24868
- if (cachedCooc) {
24869
- ctx.cooccurrenceIndex = cachedCooc.index;
24870
- ctx.lastCooccurrenceRebuildAt = cachedCooc.builtAt;
24871
- serverLog("index", `[${ctx.name}] Co-occurrence: loaded from cache (${Object.keys(cachedCooc.index.associations).length} entities, ${cachedCooc.index._metadata.total_associations} associations)`);
24872
- }
24873
- }
24874
- }
24875
- {
24876
- const primary = vaultRegistry.getContext();
24877
- activateVault(primary);
24878
- }
24879
25550
  const transportMode = (process.env.FLYWHEEL_TRANSPORT ?? "stdio").toLowerCase();
24880
25551
  if (transportMode === "stdio" || transportMode === "both") {
24881
25552
  const transport = new StdioServerTransport();
24882
25553
  await server.connect(transport);
24883
- serverLog("server", "MCP server connected (stdio)");
25554
+ serverLog("server", `MCP server connected (stdio) in ${Date.now() - startTime}ms`);
24884
25555
  }
24885
25556
  if (transportMode === "http" || transportMode === "both") {
24886
25557
  const { createMcpExpressApp } = await import("@modelcontextprotocol/sdk/server/express.js");
@@ -24913,10 +25584,27 @@ async function main() {
24913
25584
  serverLog("server", `HTTP transport on ${httpHost}:${httpPort}`);
24914
25585
  });
24915
25586
  }
24916
- for (const vaultCtx of vaultRegistry.getAllContexts()) {
24917
- activateVault(vaultCtx);
24918
- stateDb = vaultCtx.stateDb;
24919
- await bootVault(vaultCtx, startTime);
25587
+ const primaryCtx = vaultRegistry.getContext();
25588
+ loadVaultCooccurrence(primaryCtx);
25589
+ activateVault(primaryCtx);
25590
+ await bootVault(primaryCtx, startTime);
25591
+ if (vaultConfigs && vaultConfigs.length > 1) {
25592
+ const secondaryConfigs = vaultConfigs.slice(1);
25593
+ (async () => {
25594
+ for (const vc of secondaryConfigs) {
25595
+ try {
25596
+ const ctx = await initializeVault(vc.name, vc.path);
25597
+ vaultRegistry.addContext(ctx);
25598
+ loadVaultCooccurrence(ctx);
25599
+ activateVault(ctx);
25600
+ await bootVault(ctx, startTime);
25601
+ serverLog("server", `[${vc.name}] Secondary vault ready`);
25602
+ } catch (err) {
25603
+ serverLog("server", `[${vc.name}] Secondary vault boot failed: ${err}`, "error");
25604
+ }
25605
+ }
25606
+ activateVault(vaultRegistry.getContext());
25607
+ })();
24920
25608
  }
24921
25609
  }
24922
25610
  var DEFAULT_ENTITY_EXCLUDE_FOLDERS = ["node_modules", "templates", "attachments", "tmp"];
@@ -24946,7 +25634,7 @@ async function buildStartupCatchupBatch(vaultPath2, sinceMs) {
24946
25634
  return;
24947
25635
  }
24948
25636
  for (const entry of entries) {
24949
- const fullPath = path36.join(dir, entry.name);
25637
+ const fullPath = path38.join(dir, entry.name);
24950
25638
  if (entry.isDirectory()) {
24951
25639
  if (entry.name.startsWith(".") || entry.name === "node_modules") continue;
24952
25640
  await scanDir(fullPath);
@@ -24956,7 +25644,7 @@ async function buildStartupCatchupBatch(vaultPath2, sinceMs) {
24956
25644
  if (stat5.mtimeMs > sinceMs) {
24957
25645
  events.push({
24958
25646
  type: "upsert",
24959
- path: path36.relative(vaultPath2, fullPath),
25647
+ path: path38.relative(vaultPath2, fullPath),
24960
25648
  originalEvents: []
24961
25649
  });
24962
25650
  }
@@ -25153,8 +25841,8 @@ async function runPostIndexWork(ctx) {
25153
25841
  }
25154
25842
  } catch {
25155
25843
  try {
25156
- const dir = path36.dirname(rawPath);
25157
- const base = path36.basename(rawPath);
25844
+ const dir = path38.dirname(rawPath);
25845
+ const base = path38.basename(rawPath);
25158
25846
  const resolvedDir = realpathSync(dir).replace(/\\/g, "/");
25159
25847
  for (const prefix of vaultPrefixes) {
25160
25848
  if (resolvedDir.startsWith(prefix + "/") || resolvedDir === prefix) {
@@ -25186,7 +25874,7 @@ async function runPostIndexWork(ctx) {
25186
25874
  continue;
25187
25875
  }
25188
25876
  try {
25189
- const content = await fs34.readFile(path36.join(vp, event.path), "utf-8");
25877
+ const content = await fs34.readFile(path38.join(vp, event.path), "utf-8");
25190
25878
  const hash = createHash3("sha256").update(content).digest("hex").slice(0, 16);
25191
25879
  if (lastContentHashes.get(event.path) === hash) {
25192
25880
  serverLog("watcher", `Hash unchanged, skipping: ${event.path}`);