@velvetmonkey/flywheel-memory 2.0.29 → 2.0.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +255 -112
  2. package/package.json +2 -2
package/dist/index.js CHANGED
@@ -2485,7 +2485,8 @@ import {
2485
2485
  } from "@velvetmonkey/vault-core";
2486
2486
  var DEFAULT_CONFIG = {
2487
2487
  exclude_task_tags: [],
2488
- exclude_analysis_tags: []
2488
+ exclude_analysis_tags: [],
2489
+ exclude_entities: []
2489
2490
  };
2490
2491
  function loadConfig(stateDb2) {
2491
2492
  if (stateDb2) {
@@ -5898,36 +5899,38 @@ async function buildFTS5Index(vaultPath2) {
5898
5899
  if (!db2) {
5899
5900
  throw new Error("FTS5 database not initialized. Call setFTS5Database() first.");
5900
5901
  }
5901
- db2.exec("DELETE FROM notes_fts");
5902
5902
  const files = await scanVault(vaultPath2);
5903
5903
  const indexableFiles = files.filter((f) => shouldIndexFile2(f.path));
5904
+ const rows = [];
5905
+ for (const file of indexableFiles) {
5906
+ try {
5907
+ const stats = fs7.statSync(file.absolutePath);
5908
+ if (stats.size > MAX_INDEX_FILE_SIZE) {
5909
+ continue;
5910
+ }
5911
+ const raw = fs7.readFileSync(file.absolutePath, "utf-8");
5912
+ const { frontmatter, body } = splitFrontmatter(raw);
5913
+ const title = file.path.replace(/\.md$/, "").split("/").pop() || file.path;
5914
+ rows.push([file.path, title, frontmatter, body]);
5915
+ } catch (err) {
5916
+ console.error(`[FTS5] Skipping ${file.path}:`, err);
5917
+ }
5918
+ }
5904
5919
  const insert = db2.prepare(
5905
5920
  "INSERT INTO notes_fts (path, title, frontmatter, content) VALUES (?, ?, ?, ?)"
5906
5921
  );
5907
- const insertMany = db2.transaction((filesToIndex) => {
5908
- let indexed2 = 0;
5909
- for (const file of filesToIndex) {
5910
- try {
5911
- const stats = fs7.statSync(file.absolutePath);
5912
- if (stats.size > MAX_INDEX_FILE_SIZE) {
5913
- continue;
5914
- }
5915
- const raw = fs7.readFileSync(file.absolutePath, "utf-8");
5916
- const { frontmatter, body } = splitFrontmatter(raw);
5917
- const title = file.path.replace(/\.md$/, "").split("/").pop() || file.path;
5918
- insert.run(file.path, title, frontmatter, body);
5919
- indexed2++;
5920
- } catch (err) {
5921
- console.error(`[FTS5] Skipping ${file.path}:`, err);
5922
- }
5923
- }
5924
- return indexed2;
5925
- });
5926
- const indexed = insertMany(indexableFiles);
5927
5922
  const now = /* @__PURE__ */ new Date();
5928
- db2.prepare(
5929
- "INSERT OR REPLACE INTO fts_metadata (key, value) VALUES (?, ?)"
5930
- ).run("last_built", now.toISOString());
5923
+ const swapAll = db2.transaction(() => {
5924
+ db2.exec("DELETE FROM notes_fts");
5925
+ for (const row of rows) {
5926
+ insert.run(...row);
5927
+ }
5928
+ db2.prepare(
5929
+ "INSERT OR REPLACE INTO fts_metadata (key, value) VALUES (?, ?)"
5930
+ ).run("last_built", now.toISOString());
5931
+ });
5932
+ swapAll();
5933
+ const indexed = rows.length;
5931
5934
  state = {
5932
5935
  ready: true,
5933
5936
  building: false,
@@ -6187,6 +6190,9 @@ function setTaskCacheDatabase(database) {
6187
6190
  function isTaskCacheReady() {
6188
6191
  return cacheReady && db3 !== null;
6189
6192
  }
6193
+ function isTaskCacheBuilding() {
6194
+ return rebuildInProgress;
6195
+ }
6190
6196
  async function buildTaskCache(vaultPath2, index, excludeTags) {
6191
6197
  if (!db3) {
6192
6198
  throw new Error("Task cache database not initialized. Call setTaskCacheDatabase() first.");
@@ -6195,53 +6201,47 @@ async function buildTaskCache(vaultPath2, index, excludeTags) {
6195
6201
  rebuildInProgress = true;
6196
6202
  const start = Date.now();
6197
6203
  try {
6204
+ const notePaths = [];
6205
+ for (const note of index.notes.values()) {
6206
+ notePaths.push(note.path);
6207
+ }
6208
+ const allRows = [];
6209
+ for (const notePath of notePaths) {
6210
+ const absolutePath = path10.join(vaultPath2, notePath);
6211
+ const tasks = await extractTasksFromNote(notePath, absolutePath);
6212
+ for (const task of tasks) {
6213
+ if (excludeTags?.length && excludeTags.some((t) => task.tags.includes(t))) {
6214
+ continue;
6215
+ }
6216
+ allRows.push([
6217
+ task.path,
6218
+ task.line,
6219
+ task.text,
6220
+ task.status,
6221
+ task.raw,
6222
+ task.context ?? null,
6223
+ task.tags.length > 0 ? JSON.stringify(task.tags) : null,
6224
+ task.due_date ?? null
6225
+ ]);
6226
+ }
6227
+ }
6198
6228
  const insertStmt = db3.prepare(`
6199
6229
  INSERT OR REPLACE INTO tasks (path, line, text, status, raw, context, tags_json, due_date)
6200
6230
  VALUES (?, ?, ?, ?, ?, ?, ?, ?)
6201
6231
  `);
6202
- const insertAll = db3.transaction(() => {
6232
+ const swapAll = db3.transaction(() => {
6203
6233
  db3.prepare("DELETE FROM tasks").run();
6204
- let count = 0;
6205
- const promises7 = [];
6206
- const notePaths2 = [];
6207
- for (const note of index.notes.values()) {
6208
- notePaths2.push(note.path);
6234
+ for (const row of allRows) {
6235
+ insertStmt.run(...row);
6209
6236
  }
6210
- return { notePaths: notePaths2, insertStmt };
6237
+ db3.prepare(
6238
+ "INSERT OR REPLACE INTO fts_metadata (key, value) VALUES (?, ?)"
6239
+ ).run("task_cache_built", (/* @__PURE__ */ new Date()).toISOString());
6211
6240
  });
6212
- const { notePaths, insertStmt: stmt } = insertAll();
6213
- let totalTasks = 0;
6214
- for (const notePath of notePaths) {
6215
- const absolutePath = path10.join(vaultPath2, notePath);
6216
- const tasks = await extractTasksFromNote(notePath, absolutePath);
6217
- if (tasks.length > 0) {
6218
- const insertBatch = db3.transaction(() => {
6219
- for (const task of tasks) {
6220
- if (excludeTags?.length && excludeTags.some((t) => task.tags.includes(t))) {
6221
- continue;
6222
- }
6223
- stmt.run(
6224
- task.path,
6225
- task.line,
6226
- task.text,
6227
- task.status,
6228
- task.raw,
6229
- task.context ?? null,
6230
- task.tags.length > 0 ? JSON.stringify(task.tags) : null,
6231
- task.due_date ?? null
6232
- );
6233
- totalTasks++;
6234
- }
6235
- });
6236
- insertBatch();
6237
- }
6238
- }
6239
- db3.prepare(
6240
- "INSERT OR REPLACE INTO fts_metadata (key, value) VALUES (?, ?)"
6241
- ).run("task_cache_built", (/* @__PURE__ */ new Date()).toISOString());
6241
+ swapAll();
6242
6242
  cacheReady = true;
6243
6243
  const duration = Date.now() - start;
6244
- serverLog("tasks", `Task cache built: ${totalTasks} tasks from ${notePaths.length} notes in ${duration}ms`);
6244
+ serverLog("tasks", `Task cache built: ${allRows.length} tasks from ${notePaths.length} notes in ${duration}ms`);
6245
6245
  } finally {
6246
6246
  rebuildInProgress = false;
6247
6247
  }
@@ -6675,10 +6675,20 @@ async function getContext(vaultPath2, sourcePath, line, contextLines = 1) {
6675
6675
  try {
6676
6676
  const fullPath = path11.join(vaultPath2, sourcePath);
6677
6677
  const content = await fs9.promises.readFile(fullPath, "utf-8");
6678
- const lines = content.split("\n");
6679
- const startLine = Math.max(0, line - 1 - contextLines);
6680
- const endLine = Math.min(lines.length, line + contextLines);
6681
- return lines.slice(startLine, endLine).join("\n").trim();
6678
+ const allLines = content.split("\n");
6679
+ let fmLines = 0;
6680
+ if (allLines[0]?.trimEnd() === "---") {
6681
+ for (let i = 1; i < allLines.length; i++) {
6682
+ if (allLines[i].trimEnd() === "---") {
6683
+ fmLines = i + 1;
6684
+ break;
6685
+ }
6686
+ }
6687
+ }
6688
+ const absLine = line + fmLines;
6689
+ const startLine = Math.max(0, absLine - 1 - contextLines);
6690
+ const endLine = Math.min(allLines.length, absLine + contextLines);
6691
+ return allLines.slice(startLine, endLine).join("\n").trim();
6682
6692
  } catch {
6683
6693
  return "";
6684
6694
  }
@@ -7303,10 +7313,28 @@ function getActivitySummary(index, days) {
7303
7313
  import { SCHEMA_VERSION } from "@velvetmonkey/vault-core";
7304
7314
 
7305
7315
  // src/core/shared/indexActivity.ts
7316
+ function createStepTracker() {
7317
+ const steps = [];
7318
+ let current = null;
7319
+ return {
7320
+ steps,
7321
+ start(name, input) {
7322
+ current = { name, input, startTime: Date.now() };
7323
+ },
7324
+ end(output) {
7325
+ if (!current) return;
7326
+ steps.push({ name: current.name, duration_ms: Date.now() - current.startTime, input: current.input, output });
7327
+ current = null;
7328
+ },
7329
+ skip(name, reason) {
7330
+ steps.push({ name, duration_ms: 0, input: {}, output: {}, skipped: true, skip_reason: reason });
7331
+ }
7332
+ };
7333
+ }
7306
7334
  function recordIndexEvent(stateDb2, event) {
7307
7335
  stateDb2.db.prepare(
7308
- `INSERT INTO index_events (timestamp, trigger, duration_ms, success, note_count, files_changed, changed_paths, error)
7309
- VALUES (?, ?, ?, ?, ?, ?, ?, ?)`
7336
+ `INSERT INTO index_events (timestamp, trigger, duration_ms, success, note_count, files_changed, changed_paths, error, steps)
7337
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`
7310
7338
  ).run(
7311
7339
  Date.now(),
7312
7340
  event.trigger,
@@ -7315,7 +7343,8 @@ function recordIndexEvent(stateDb2, event) {
7315
7343
  event.note_count ?? null,
7316
7344
  event.files_changed ?? null,
7317
7345
  event.changed_paths ? JSON.stringify(event.changed_paths) : null,
7318
- event.error ?? null
7346
+ event.error ?? null,
7347
+ event.steps ? JSON.stringify(event.steps) : null
7319
7348
  );
7320
7349
  }
7321
7350
  function rowToEvent(row) {
@@ -7328,7 +7357,8 @@ function rowToEvent(row) {
7328
7357
  note_count: row.note_count,
7329
7358
  files_changed: row.files_changed,
7330
7359
  changed_paths: row.changed_paths ? JSON.parse(row.changed_paths) : null,
7331
- error: row.error
7360
+ error: row.error,
7361
+ steps: row.steps ? JSON.parse(row.steps) : null
7332
7362
  };
7333
7363
  }
7334
7364
  function getRecentIndexEvents(stateDb2, limit = 20) {
@@ -7415,11 +7445,27 @@ function registerHealthTools(server2, getIndex, getVaultPath, getConfig = () =>
7415
7445
  duration_ms: z3.number(),
7416
7446
  ago_seconds: z3.number()
7417
7447
  }).optional().describe("Most recent index rebuild event"),
7448
+ last_pipeline: z3.object({
7449
+ timestamp: z3.number(),
7450
+ trigger: z3.string(),
7451
+ duration_ms: z3.number(),
7452
+ files_changed: z3.number().nullable(),
7453
+ steps: z3.array(z3.object({
7454
+ name: z3.string(),
7455
+ duration_ms: z3.number(),
7456
+ input: z3.record(z3.unknown()),
7457
+ output: z3.record(z3.unknown()),
7458
+ skipped: z3.boolean().optional(),
7459
+ skip_reason: z3.string().optional()
7460
+ }))
7461
+ }).optional().describe("Most recent watcher pipeline run with per-step timing"),
7418
7462
  fts5_ready: z3.boolean().describe("Whether the FTS5 keyword search index is ready"),
7419
7463
  fts5_building: z3.boolean().describe("Whether the FTS5 keyword search index is currently building"),
7420
7464
  embeddings_building: z3.boolean().describe("Whether semantic embeddings are currently building"),
7421
7465
  embeddings_ready: z3.boolean().describe("Whether semantic embeddings have been built (enables hybrid keyword+semantic search)"),
7422
7466
  embeddings_count: z3.coerce.number().describe("Number of notes with semantic embeddings"),
7467
+ tasks_ready: z3.boolean().describe("Whether the task cache is ready to serve queries"),
7468
+ tasks_building: z3.boolean().describe("Whether the task cache is currently rebuilding"),
7423
7469
  recommendations: z3.array(z3.string()).describe("Suggested actions if any issues detected")
7424
7470
  };
7425
7471
  server2.registerTool(
@@ -7509,6 +7555,23 @@ function registerHealthTools(server2, getIndex, getVaultPath, getConfig = () =>
7509
7555
  } catch {
7510
7556
  }
7511
7557
  }
7558
+ let lastPipeline;
7559
+ if (stateDb2) {
7560
+ try {
7561
+ const events = getRecentIndexEvents(stateDb2, 1);
7562
+ if (events.length > 0 && events[0].steps && events[0].steps.length > 0) {
7563
+ const evt = events[0];
7564
+ lastPipeline = {
7565
+ timestamp: evt.timestamp,
7566
+ trigger: evt.trigger,
7567
+ duration_ms: evt.duration_ms,
7568
+ files_changed: evt.files_changed,
7569
+ steps: evt.steps
7570
+ };
7571
+ }
7572
+ } catch {
7573
+ }
7574
+ }
7512
7575
  const ftsState = getFTS5State();
7513
7576
  const output = {
7514
7577
  status,
@@ -7528,11 +7591,14 @@ function registerHealthTools(server2, getIndex, getVaultPath, getConfig = () =>
7528
7591
  periodic_notes: periodicNotes && periodicNotes.length > 0 ? periodicNotes : void 0,
7529
7592
  config: configInfo,
7530
7593
  last_rebuild: lastRebuild,
7594
+ last_pipeline: lastPipeline,
7531
7595
  fts5_ready: ftsState.ready,
7532
7596
  fts5_building: ftsState.building,
7533
7597
  embeddings_building: isEmbeddingsBuilding(),
7534
7598
  embeddings_ready: hasEmbeddingsIndex(),
7535
7599
  embeddings_count: getEmbeddingsCount(),
7600
+ tasks_ready: isTaskCacheReady(),
7601
+ tasks_building: isTaskCacheBuilding(),
7536
7602
  recommendations
7537
7603
  };
7538
7604
  return {
@@ -9787,6 +9853,35 @@ function isPeriodicNote(notePath) {
9787
9853
  const folder = notePath.split("/")[0]?.toLowerCase() || "";
9788
9854
  return patterns.some((p) => p.test(nameWithoutExt)) || periodicFolders.includes(folder);
9789
9855
  }
9856
+ function getExcludedPaths(index, config) {
9857
+ const excluded = /* @__PURE__ */ new Set();
9858
+ const excludeTags = new Set((config.exclude_analysis_tags ?? []).map((t) => t.toLowerCase()));
9859
+ const excludeEntities = new Set((config.exclude_entities ?? []).map((e) => e.toLowerCase()));
9860
+ if (excludeTags.size === 0 && excludeEntities.size === 0) return excluded;
9861
+ for (const note of index.notes.values()) {
9862
+ if (excludeTags.size > 0) {
9863
+ const tags = note.frontmatter?.tags;
9864
+ const tagList = Array.isArray(tags) ? tags : typeof tags === "string" ? [tags] : [];
9865
+ if (tagList.some((t) => excludeTags.has(String(t).toLowerCase()))) {
9866
+ excluded.add(note.path);
9867
+ continue;
9868
+ }
9869
+ }
9870
+ if (excludeEntities.size > 0) {
9871
+ if (excludeEntities.has(note.title.toLowerCase())) {
9872
+ excluded.add(note.path);
9873
+ continue;
9874
+ }
9875
+ for (const alias of note.aliases) {
9876
+ if (excludeEntities.has(alias.toLowerCase())) {
9877
+ excluded.add(note.path);
9878
+ break;
9879
+ }
9880
+ }
9881
+ }
9882
+ }
9883
+ return excluded;
9884
+ }
9790
9885
  function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb, getConfig) {
9791
9886
  server2.registerTool(
9792
9887
  "graph_analysis",
@@ -9808,9 +9903,11 @@ function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb,
9808
9903
  requireIndex();
9809
9904
  const limit = Math.min(requestedLimit ?? 50, MAX_LIMIT);
9810
9905
  const index = getIndex();
9906
+ const config = getConfig?.() ?? {};
9907
+ const excludedPaths = getExcludedPaths(index, config);
9811
9908
  switch (analysis) {
9812
9909
  case "orphans": {
9813
- const allOrphans = findOrphanNotes(index, folder).filter((o) => !isPeriodicNote(o.path));
9910
+ const allOrphans = findOrphanNotes(index, folder).filter((o) => !isPeriodicNote(o.path) && !excludedPaths.has(o.path));
9814
9911
  const orphans = allOrphans.slice(offset, offset + limit);
9815
9912
  return {
9816
9913
  content: [{ type: "text", text: JSON.stringify({
@@ -9827,7 +9924,7 @@ function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb,
9827
9924
  };
9828
9925
  }
9829
9926
  case "dead_ends": {
9830
- const allResults = findDeadEnds(index, folder, min_backlinks);
9927
+ const allResults = findDeadEnds(index, folder, min_backlinks).filter((n) => !excludedPaths.has(n.path));
9831
9928
  const result = allResults.slice(offset, offset + limit);
9832
9929
  return {
9833
9930
  content: [{ type: "text", text: JSON.stringify({
@@ -9840,7 +9937,7 @@ function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb,
9840
9937
  };
9841
9938
  }
9842
9939
  case "sources": {
9843
- const allResults = findSources(index, folder, min_outlinks);
9940
+ const allResults = findSources(index, folder, min_outlinks).filter((n) => !excludedPaths.has(n.path));
9844
9941
  const result = allResults.slice(offset, offset + limit);
9845
9942
  return {
9846
9943
  content: [{ type: "text", text: JSON.stringify({
@@ -9853,17 +9950,7 @@ function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb,
9853
9950
  };
9854
9951
  }
9855
9952
  case "hubs": {
9856
- const excludeTags = new Set(
9857
- (getConfig?.()?.exclude_analysis_tags ?? []).map((t) => t.toLowerCase())
9858
- );
9859
- const allHubs = findHubNotes(index, min_links).filter((h) => {
9860
- if (excludeTags.size === 0) return true;
9861
- const note = index.notes.get(h.path);
9862
- if (!note) return true;
9863
- const tags = note.frontmatter?.tags;
9864
- const tagList = Array.isArray(tags) ? tags : typeof tags === "string" ? [tags] : [];
9865
- return !tagList.some((t) => excludeTags.has(String(t).toLowerCase()));
9866
- });
9953
+ const allHubs = findHubNotes(index, min_links).filter((h) => !excludedPaths.has(h.path));
9867
9954
  const hubs = allHubs.slice(offset, offset + limit);
9868
9955
  return {
9869
9956
  content: [{ type: "text", text: JSON.stringify({
@@ -9889,7 +9976,7 @@ function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb,
9889
9976
  }, null, 2) }]
9890
9977
  };
9891
9978
  }
9892
- const result = getStaleNotes(index, days, min_backlinks).slice(0, limit);
9979
+ const result = getStaleNotes(index, days, min_backlinks).filter((n) => !excludedPaths.has(n.path)).slice(0, limit);
9893
9980
  return {
9894
9981
  content: [{ type: "text", text: JSON.stringify({
9895
9982
  analysis: "stale",
@@ -9905,7 +9992,7 @@ function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb,
9905
9992
  case "immature": {
9906
9993
  const vaultPath2 = getVaultPath();
9907
9994
  const allNotes = Array.from(index.notes.values()).filter(
9908
- (note) => (!folder || note.path.startsWith(folder + "/") || note.path.substring(0, note.path.lastIndexOf("/")) === folder) && !isPeriodicNote(note.path)
9995
+ (note) => (!folder || note.path.startsWith(folder + "/") || note.path.substring(0, note.path.lastIndexOf("/")) === folder) && !isPeriodicNote(note.path) && !excludedPaths.has(note.path)
9909
9996
  );
9910
9997
  const conventions = inferFolderConventions(index, folder, 0.5);
9911
9998
  const expectedFields = conventions.inferred_fields.map((f) => f.name);
@@ -9990,22 +10077,20 @@ function registerGraphAnalysisTools(server2, getIndex, getVaultPath, getStateDb,
9990
10077
  }
9991
10078
  const daysBack = days ?? 30;
9992
10079
  let hubs = getEmergingHubs(db4, daysBack);
9993
- const excludeTags = new Set(
9994
- (getConfig?.()?.exclude_analysis_tags ?? []).map((t) => t.toLowerCase())
9995
- );
9996
- if (excludeTags.size > 0) {
10080
+ if (excludedPaths.size > 0) {
9997
10081
  const notesByTitle = /* @__PURE__ */ new Map();
9998
10082
  for (const note of index.notes.values()) {
9999
10083
  notesByTitle.set(note.title.toLowerCase(), note);
10000
10084
  }
10001
10085
  hubs = hubs.filter((hub) => {
10002
10086
  const note = notesByTitle.get(hub.entity.toLowerCase());
10003
- if (!note) return true;
10004
- const tags = note.frontmatter?.tags;
10005
- const tagList = Array.isArray(tags) ? tags : typeof tags === "string" ? [tags] : [];
10006
- return !tagList.some((t) => excludeTags.has(String(t).toLowerCase()));
10087
+ return !note || !excludedPaths.has(note.path);
10007
10088
  });
10008
10089
  }
10090
+ const excludeEntities = new Set((config.exclude_entities ?? []).map((e) => e.toLowerCase()));
10091
+ if (excludeEntities.size > 0) {
10092
+ hubs = hubs.filter((hub) => !excludeEntities.has(hub.entity.toLowerCase()));
10093
+ }
10009
10094
  return {
10010
10095
  content: [{ type: "text", text: JSON.stringify({
10011
10096
  analysis: "emerging_hubs",
@@ -15782,6 +15867,7 @@ async function main() {
15782
15867
  setFTS5Database(stateDb.db);
15783
15868
  setEmbeddingsDatabase(stateDb.db);
15784
15869
  setTaskCacheDatabase(stateDb.db);
15870
+ serverLog("statedb", "Injected FTS5, embeddings, task cache handles");
15785
15871
  loadEntityEmbeddingsToMemory();
15786
15872
  setWriteStateDb(stateDb);
15787
15873
  } catch (err) {
@@ -15830,7 +15916,8 @@ async function main() {
15830
15916
  vaultIndex = cachedIndex;
15831
15917
  setIndexState("ready");
15832
15918
  const duration = Date.now() - startTime;
15833
- serverLog("index", `Loaded from cache in ${duration}ms \u2014 ${cachedIndex.notes.size} notes`);
15919
+ const cacheAge = cachedIndex.builtAt ? Math.round((Date.now() - cachedIndex.builtAt.getTime()) / 1e3) : 0;
15920
+ serverLog("index", `Cache hit: ${cachedIndex.notes.size} notes, ${cacheAge}s old \u2014 loaded in ${duration}ms`);
15834
15921
  if (stateDb) {
15835
15922
  recordIndexEvent(stateDb, {
15836
15923
  trigger: "startup_cache",
@@ -15840,7 +15927,7 @@ async function main() {
15840
15927
  }
15841
15928
  runPostIndexWork(vaultIndex);
15842
15929
  } else {
15843
- serverLog("index", "Building vault index...");
15930
+ serverLog("index", "Cache miss: building from scratch");
15844
15931
  try {
15845
15932
  vaultIndex = await buildVaultIndex(vaultPath);
15846
15933
  setIndexState("ready");
@@ -15911,9 +15998,13 @@ async function updateEntitiesInStateDb() {
15911
15998
  }
15912
15999
  }
15913
16000
  async function runPostIndexWork(index) {
16001
+ const postStart = Date.now();
16002
+ serverLog("index", "Scanning entities...");
15914
16003
  await updateEntitiesInStateDb();
15915
16004
  await initializeEntityIndex(vaultPath);
16005
+ serverLog("index", "Entity index initialized");
15916
16006
  await exportHubScores(index, stateDb);
16007
+ serverLog("index", "Hub scores exported");
15917
16008
  if (stateDb) {
15918
16009
  try {
15919
16010
  const metrics = computeMetrics(index, stateDb);
@@ -15938,6 +16029,7 @@ async function runPostIndexWork(index) {
15938
16029
  if (stateDb) {
15939
16030
  try {
15940
16031
  updateSuppressionList(stateDb);
16032
+ serverLog("index", "Suppression list updated");
15941
16033
  } catch (err) {
15942
16034
  serverLog("server", `Failed to update suppression list: ${err instanceof Error ? err.message : err}`, "error");
15943
16035
  }
@@ -15948,9 +16040,15 @@ async function runPostIndexWork(index) {
15948
16040
  saveConfig(stateDb, inferred, existing);
15949
16041
  }
15950
16042
  flywheelConfig = loadConfig(stateDb);
16043
+ const configKeys = Object.keys(flywheelConfig).filter((k) => flywheelConfig[k] != null);
16044
+ serverLog("config", `Config inferred: ${configKeys.join(", ")}`);
15951
16045
  if (stateDb) {
15952
- refreshIfStale(vaultPath, index, flywheelConfig.exclude_task_tags);
15953
- serverLog("tasks", "Task cache ready");
16046
+ if (isTaskCacheStale()) {
16047
+ serverLog("tasks", "Task cache stale, rebuilding...");
16048
+ refreshIfStale(vaultPath, index, flywheelConfig.exclude_task_tags);
16049
+ } else {
16050
+ serverLog("tasks", "Task cache fresh, skipping rebuild");
16051
+ }
15954
16052
  }
15955
16053
  if (flywheelConfig.vault_name) {
15956
16054
  serverLog("config", `Vault: ${flywheelConfig.vault_name}`);
@@ -15996,36 +16094,47 @@ async function runPostIndexWork(index) {
15996
16094
  serverLog("watcher", `Processing ${batch.events.length} file changes`);
15997
16095
  const batchStart = Date.now();
15998
16096
  const changedPaths = batch.events.map((e) => e.path);
16097
+ const tracker = createStepTracker();
15999
16098
  try {
16099
+ tracker.start("index_rebuild", { files_changed: batch.events.length, changed_paths: changedPaths });
16000
16100
  vaultIndex = await buildVaultIndex(vaultPath);
16001
16101
  setIndexState("ready");
16002
- const duration = Date.now() - batchStart;
16003
- serverLog("watcher", `Index rebuilt in ${duration}ms`);
16004
- if (stateDb) {
16005
- recordIndexEvent(stateDb, {
16006
- trigger: "watcher",
16007
- duration_ms: duration,
16008
- note_count: vaultIndex.notes.size,
16009
- files_changed: batch.events.length,
16010
- changed_paths: changedPaths
16011
- });
16012
- }
16102
+ tracker.end({ note_count: vaultIndex.notes.size, entity_count: vaultIndex.entities.size, tag_count: vaultIndex.tags.size });
16103
+ serverLog("watcher", `Index rebuilt: ${vaultIndex.notes.size} notes, ${vaultIndex.entities.size} entities`);
16104
+ tracker.start("entity_scan", { note_count: vaultIndex.notes.size });
16013
16105
  await updateEntitiesInStateDb();
16014
- await exportHubScores(vaultIndex, stateDb);
16106
+ const entityCount = stateDb ? getAllEntitiesFromDb3(stateDb).length : 0;
16107
+ tracker.end({ entity_count: entityCount });
16108
+ serverLog("watcher", `Entity scan: ${entityCount} entities`);
16109
+ tracker.start("hub_scores", { entity_count: entityCount });
16110
+ const hubUpdated = await exportHubScores(vaultIndex, stateDb);
16111
+ tracker.end({ updated: hubUpdated ?? 0 });
16112
+ serverLog("watcher", `Hub scores: ${hubUpdated ?? 0} updated`);
16015
16113
  if (hasEmbeddingsIndex()) {
16114
+ tracker.start("note_embeddings", { files: batch.events.length });
16115
+ let embUpdated = 0;
16116
+ let embRemoved = 0;
16016
16117
  for (const event of batch.events) {
16017
16118
  try {
16018
16119
  if (event.type === "delete") {
16019
16120
  removeEmbedding(event.path);
16121
+ embRemoved++;
16020
16122
  } else if (event.path.endsWith(".md")) {
16021
16123
  const absPath = path29.join(vaultPath, event.path);
16022
16124
  await updateEmbedding(event.path, absPath);
16125
+ embUpdated++;
16023
16126
  }
16024
16127
  } catch {
16025
16128
  }
16026
16129
  }
16130
+ tracker.end({ updated: embUpdated, removed: embRemoved });
16131
+ serverLog("watcher", `Note embeddings: ${embUpdated} updated, ${embRemoved} removed`);
16132
+ } else {
16133
+ tracker.skip("note_embeddings", "not built");
16027
16134
  }
16028
16135
  if (hasEntityEmbeddingsIndex() && stateDb) {
16136
+ tracker.start("entity_embeddings", { files: batch.events.length });
16137
+ let entEmbUpdated = 0;
16029
16138
  try {
16030
16139
  const allEntities = getAllEntitiesFromDb3(stateDb);
16031
16140
  for (const event of batch.events) {
@@ -16038,28 +16147,58 @@ async function runPostIndexWork(index) {
16038
16147
  category: entity.category,
16039
16148
  aliases: entity.aliases
16040
16149
  }, vaultPath);
16150
+ entEmbUpdated++;
16041
16151
  }
16042
16152
  }
16043
16153
  } catch {
16044
16154
  }
16155
+ tracker.end({ updated: entEmbUpdated });
16156
+ serverLog("watcher", `Entity embeddings: ${entEmbUpdated} updated`);
16157
+ } else {
16158
+ tracker.skip("entity_embeddings", !stateDb ? "no stateDb" : "not built");
16045
16159
  }
16046
16160
  if (stateDb) {
16161
+ tracker.start("index_cache", { note_count: vaultIndex.notes.size });
16047
16162
  try {
16048
16163
  saveVaultIndexToCache(stateDb, vaultIndex);
16164
+ tracker.end({ saved: true });
16165
+ serverLog("watcher", "Index cache saved");
16049
16166
  } catch (err) {
16167
+ tracker.end({ saved: false, error: err instanceof Error ? err.message : String(err) });
16050
16168
  serverLog("index", `Failed to update index cache: ${err instanceof Error ? err.message : err}`, "error");
16051
16169
  }
16170
+ } else {
16171
+ tracker.skip("index_cache", "no stateDb");
16052
16172
  }
16173
+ tracker.start("task_cache", { files: batch.events.length });
16174
+ let taskUpdated = 0;
16175
+ let taskRemoved = 0;
16053
16176
  for (const event of batch.events) {
16054
16177
  try {
16055
16178
  if (event.type === "delete") {
16056
16179
  removeTaskCacheForFile(event.path);
16180
+ taskRemoved++;
16057
16181
  } else if (event.path.endsWith(".md")) {
16058
16182
  await updateTaskCacheForFile(vaultPath, event.path);
16183
+ taskUpdated++;
16059
16184
  }
16060
16185
  } catch {
16061
16186
  }
16062
16187
  }
16188
+ tracker.end({ updated: taskUpdated, removed: taskRemoved });
16189
+ serverLog("watcher", `Task cache: ${taskUpdated} updated, ${taskRemoved} removed`);
16190
+ const duration = Date.now() - batchStart;
16191
+ if (stateDb) {
16192
+ recordIndexEvent(stateDb, {
16193
+ trigger: "watcher",
16194
+ duration_ms: duration,
16195
+ note_count: vaultIndex.notes.size,
16196
+ files_changed: batch.events.length,
16197
+ changed_paths: changedPaths,
16198
+ steps: tracker.steps
16199
+ });
16200
+ }
16201
+ serverLog("watcher", `Batch complete: ${batch.events.length} files, ${duration}ms, ${tracker.steps.length} steps`);
16063
16202
  } catch (err) {
16064
16203
  setIndexState("error");
16065
16204
  setIndexError(err instanceof Error ? err : new Error(String(err)));
@@ -16071,7 +16210,8 @@ async function runPostIndexWork(index) {
16071
16210
  success: false,
16072
16211
  files_changed: batch.events.length,
16073
16212
  changed_paths: changedPaths,
16074
- error: err instanceof Error ? err.message : String(err)
16213
+ error: err instanceof Error ? err.message : String(err),
16214
+ steps: tracker.steps
16075
16215
  });
16076
16216
  }
16077
16217
  serverLog("watcher", `Failed to rebuild index: ${err instanceof Error ? err.message : err}`, "error");
@@ -16087,7 +16227,10 @@ async function runPostIndexWork(index) {
16087
16227
  }
16088
16228
  });
16089
16229
  watcher.start();
16230
+ serverLog("watcher", "File watcher started");
16090
16231
  }
16232
+ const postDuration = Date.now() - postStart;
16233
+ serverLog("server", `Post-index work complete in ${postDuration}ms`);
16091
16234
  }
16092
16235
  if (process.argv.includes("--init-semantic")) {
16093
16236
  (async () => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@velvetmonkey/flywheel-memory",
3
- "version": "2.0.29",
3
+ "version": "2.0.30",
4
4
  "description": "MCP server that gives Claude full read/write access to your Obsidian vault. 42 tools for search, backlinks, graph queries, mutations, and hybrid semantic search.",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -50,7 +50,7 @@
50
50
  },
51
51
  "dependencies": {
52
52
  "@modelcontextprotocol/sdk": "^1.25.1",
53
- "@velvetmonkey/vault-core": "^2.0.29",
53
+ "@velvetmonkey/vault-core": "^2.0.30",
54
54
  "better-sqlite3": "^11.0.0",
55
55
  "chokidar": "^4.0.0",
56
56
  "gray-matter": "^4.0.3",