@velvetmonkey/flywheel-memory 2.0.129 → 2.0.131

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +300 -262
  2. package/package.json +2 -2
package/dist/index.js CHANGED
@@ -1239,15 +1239,22 @@ function getDashboardData(stateDb2) {
1239
1239
  }))
1240
1240
  };
1241
1241
  }
1242
- function getEntityScoreTimeline(stateDb2, entityName, daysBack = 30, limit = 100) {
1242
+ function getEntityScoreTimeline(stateDb2, entityName, daysBack = 90, limit = 90) {
1243
1243
  const cutoff = Date.now() - daysBack * 24 * 60 * 60 * 1e3;
1244
1244
  const rows = stateDb2.db.prepare(`
1245
- SELECT timestamp, total_score, breakdown_json, note_path, passed, threshold
1246
- FROM suggestion_events
1247
- WHERE entity = ? AND timestamp >= ?
1248
- ORDER BY timestamp ASC
1245
+ SELECT s.timestamp, s.total_score, s.breakdown_json, s.note_path, s.passed, s.threshold
1246
+ FROM suggestion_events s
1247
+ INNER JOIN (
1248
+ SELECT date(timestamp/1000, 'unixepoch') as day, MAX(total_score) as max_score
1249
+ FROM suggestion_events
1250
+ WHERE entity = ? AND timestamp >= ?
1251
+ GROUP BY day
1252
+ ) agg ON date(s.timestamp/1000, 'unixepoch') = agg.day AND s.total_score = agg.max_score
1253
+ WHERE s.entity = ? AND s.timestamp >= ?
1254
+ GROUP BY agg.day
1255
+ ORDER BY s.timestamp ASC
1249
1256
  LIMIT ?
1250
- `).all(entityName, cutoff, limit);
1257
+ `).all(entityName, cutoff, entityName, cutoff, limit);
1251
1258
  return rows.map((r) => ({
1252
1259
  timestamp: r.timestamp,
1253
1260
  score: r.total_score,
@@ -15303,7 +15310,7 @@ Example: vault_add_to_section({ path: "daily/2026-02-15.md", section: "Log", con
15303
15310
  format: z12.enum(["plain", "bullet", "task", "numbered", "timestamp-bullet"]).default("plain").describe("How to format the content"),
15304
15311
  commit: z12.boolean().default(false).describe("If true, commit this change to git (creates undo point)"),
15305
15312
  skipWikilinks: z12.boolean().default(false).describe("If true, skip auto-wikilink application (wikilinks are applied by default)"),
15306
- suggestOutgoingLinks: z12.boolean().default(true).describe("Suggest related outgoing wikilinks based on content. Set false to disable."),
15313
+ suggestOutgoingLinks: z12.boolean().default(false).describe('Suggest related outgoing wikilinks based on content (e.g., "\u2192 [[AI]], [[Philosophy]]"). Off by default \u2014 set true for daily notes, journals, or capture-heavy contexts.'),
15307
15314
  maxSuggestions: z12.number().min(1).max(10).default(5).describe("Maximum number of suggested wikilinks (1-10, default: 5)"),
15308
15315
  linkedEntities: z12.array(z12.string()).optional().describe("Entity names already linked in the content. When skipWikilinks=true, these are tracked for feedback without re-processing the content."),
15309
15316
  dry_run: z12.boolean().optional().default(false).describe("Preview changes without writing to disk"),
@@ -15464,7 +15471,7 @@ Example: vault_add_to_section({ path: "daily/2026-02-15.md", section: "Log", con
15464
15471
  useRegex: z12.boolean().default(false).describe("Treat search as regex"),
15465
15472
  commit: z12.boolean().default(false).describe("If true, commit this change to git (creates undo point)"),
15466
15473
  skipWikilinks: z12.boolean().default(false).describe("If true, skip auto-wikilink application on replacement text"),
15467
- suggestOutgoingLinks: z12.boolean().default(true).describe("Suggest related outgoing wikilinks based on content. Set false to disable."),
15474
+ suggestOutgoingLinks: z12.boolean().default(false).describe('Suggest related outgoing wikilinks based on content (e.g., "\u2192 [[AI]], [[Philosophy]]"). Off by default \u2014 set true for daily notes, journals, or capture-heavy contexts.'),
15468
15475
  maxSuggestions: z12.number().min(1).max(10).default(5).describe("Maximum number of suggested wikilinks (1-10, default: 5)"),
15469
15476
  dry_run: z12.boolean().optional().default(false).describe("Preview changes without writing to disk"),
15470
15477
  agent_id: z12.string().optional().describe("Agent identifier for multi-agent scoping"),
@@ -15684,7 +15691,7 @@ function registerTaskTools(server2, getVaultPath) {
15684
15691
  completed: z13.boolean().default(false).describe("Whether the task should start as completed"),
15685
15692
  commit: z13.boolean().default(false).describe("If true, commit this change to git (creates undo point)"),
15686
15693
  skipWikilinks: z13.boolean().default(false).describe("If true, skip auto-wikilink application (wikilinks are applied by default)"),
15687
- suggestOutgoingLinks: z13.boolean().default(true).describe('Append suggested outgoing wikilinks based on content (e.g., "\u2192 [[AI]], [[Philosophy]]"). Set false to disable.'),
15694
+ suggestOutgoingLinks: z13.boolean().default(false).describe('Append suggested outgoing wikilinks based on content (e.g., "\u2192 [[AI]], [[Philosophy]]"). Off by default \u2014 set true for daily notes, journals, or capture-heavy contexts.'),
15688
15695
  maxSuggestions: z13.number().min(1).max(10).default(5).describe("Maximum number of suggested wikilinks to append (1-10, default: 5)"),
15689
15696
  preserveListNesting: z13.boolean().default(true).describe("Preserve indentation when inserting into nested lists. Default: true"),
15690
15697
  validate: z13.boolean().default(true).describe("Check input for common issues"),
@@ -15832,7 +15839,7 @@ function registerNoteTools(server2, getVaultPath, getIndex) {
15832
15839
  overwrite: z15.boolean().default(false).describe("If true, overwrite existing file"),
15833
15840
  commit: z15.boolean().default(false).describe("If true, commit this change to git (creates undo point)"),
15834
15841
  skipWikilinks: z15.boolean().default(false).describe("If true, skip auto-wikilink application (wikilinks are applied by default)"),
15835
- suggestOutgoingLinks: z15.boolean().default(true).describe('Append suggested outgoing wikilinks based on content (e.g., "\u2192 [[AI]], [[Philosophy]]").'),
15842
+ suggestOutgoingLinks: z15.boolean().default(false).describe('Append suggested outgoing wikilinks based on content (e.g., "\u2192 [[AI]], [[Philosophy]]"). Off by default \u2014 set true for daily notes, journals, or capture-heavy contexts.'),
15836
15843
  maxSuggestions: z15.number().min(1).max(10).default(5).describe("Maximum number of suggested wikilinks to append (1-10, default: 5)"),
15837
15844
  dry_run: z15.boolean().optional().default(false).describe("Preview changes without writing to disk"),
15838
15845
  agent_id: z15.string().optional().describe("Agent identifier for multi-agent scoping"),
@@ -19983,7 +19990,10 @@ var VALID_CONFIG_KEYS = {
19983
19990
  wikilink_strictness: z27.enum(["conservative", "balanced", "aggressive"]),
19984
19991
  implicit_detection: z27.boolean(),
19985
19992
  implicit_patterns: z27.array(z27.string()),
19986
- adaptive_strictness: z27.boolean()
19993
+ adaptive_strictness: z27.boolean(),
19994
+ proactive_linking: z27.boolean(),
19995
+ proactive_min_score: z27.number(),
19996
+ proactive_max_per_file: z27.number()
19987
19997
  };
19988
19998
  function registerConfigTools(server2, getConfig2, setConfig, getStateDb2) {
19989
19999
  server2.registerTool(
@@ -22083,6 +22093,15 @@ var stateDb = null;
22083
22093
  var watcherInstance = null;
22084
22094
  var vaultRegistry = null;
22085
22095
  function getWatcherStatus() {
22096
+ if (vaultRegistry) {
22097
+ const name = globalThis.__flywheel_active_vault;
22098
+ if (name) {
22099
+ try {
22100
+ return vaultRegistry.getContext(name).watcher?.status ?? null;
22101
+ } catch {
22102
+ }
22103
+ }
22104
+ }
22086
22105
  return watcherInstance?.status ?? null;
22087
22106
  }
22088
22107
  var ALL_CATEGORIES = [
@@ -22573,12 +22592,11 @@ function registerAllTools(targetServer) {
22573
22592
  targetServer,
22574
22593
  gvi,
22575
22594
  (newIndex) => {
22576
- vaultIndex = newIndex;
22595
+ updateVaultIndex(newIndex);
22577
22596
  },
22578
22597
  gvp,
22579
22598
  (newConfig) => {
22580
- flywheelConfig = newConfig;
22581
- setWikilinkConfig(newConfig);
22599
+ updateFlywheelConfig(newConfig);
22582
22600
  },
22583
22601
  gsd
22584
22602
  );
@@ -22636,8 +22654,7 @@ function registerAllTools(targetServer) {
22636
22654
  targetServer,
22637
22655
  gcf,
22638
22656
  (newConfig) => {
22639
- flywheelConfig = newConfig;
22640
- setWikilinkConfig(newConfig);
22657
+ updateFlywheelConfig(newConfig);
22641
22658
  },
22642
22659
  gsd
22643
22660
  );
@@ -22663,7 +22680,7 @@ function createConfiguredServer() {
22663
22680
  { name: "flywheel-memory", version: pkg.version },
22664
22681
  { instructions: generateInstructions(enabledCategories, vaultRegistry) }
22665
22682
  );
22666
- applyToolGating(s, enabledCategories, () => stateDb, vaultRegistry, () => vaultPath);
22683
+ applyToolGating(s, enabledCategories, () => getActiveScopeOrNull()?.stateDb ?? stateDb, vaultRegistry, () => getActiveScopeOrNull()?.vaultPath ?? vaultPath);
22667
22684
  registerAllTools(s);
22668
22685
  return s;
22669
22686
  }
@@ -22671,7 +22688,7 @@ var server = new McpServer(
22671
22688
  { name: "flywheel-memory", version: pkg.version },
22672
22689
  { instructions: generateInstructions(enabledCategories, vaultRegistry) }
22673
22690
  );
22674
- var _gatingResult = applyToolGating(server, enabledCategories, () => stateDb, vaultRegistry, () => vaultPath);
22691
+ var _gatingResult = applyToolGating(server, enabledCategories, () => getActiveScopeOrNull()?.stateDb ?? stateDb, vaultRegistry, () => getActiveScopeOrNull()?.vaultPath ?? vaultPath);
22675
22692
  registerAllTools(server);
22676
22693
  var categoryList = Array.from(enabledCategories).sort().join(", ");
22677
22694
  serverLog("server", `Tool categories: ${categoryList}`);
@@ -22756,6 +22773,108 @@ function updateIndexState(state2, error) {
22756
22773
  if (error !== void 0) ctx.indexError = error;
22757
22774
  }
22758
22775
  }
22776
+ function updateVaultIndex(index) {
22777
+ vaultIndex = index;
22778
+ const ctx = getActiveVaultContext();
22779
+ if (ctx) ctx.vaultIndex = index;
22780
+ }
22781
+ function updateFlywheelConfig(config) {
22782
+ flywheelConfig = config;
22783
+ setWikilinkConfig(config);
22784
+ const ctx = getActiveVaultContext();
22785
+ if (ctx) ctx.flywheelConfig = config;
22786
+ }
22787
+ async function bootVault(ctx, startTime) {
22788
+ const vp = ctx.vaultPath;
22789
+ const sd = ctx.stateDb;
22790
+ initializeLogger(vp).then(() => {
22791
+ const logger3 = getLogger();
22792
+ if (logger3?.enabled) {
22793
+ serverLog("server", `[${ctx.name}] Unified logging enabled`);
22794
+ }
22795
+ }).catch(() => {
22796
+ });
22797
+ initializeLogger2(vp).catch((err) => {
22798
+ serverLog("server", `[${ctx.name}] Write logger initialization failed: ${err}`, "error");
22799
+ });
22800
+ if (process.env.FLYWHEEL_SKIP_FTS5 !== "true") {
22801
+ if (isIndexStale(vp)) {
22802
+ buildFTS5Index(vp).then(() => {
22803
+ serverLog("fts5", `[${ctx.name}] Search index ready`);
22804
+ }).catch((err) => {
22805
+ serverLog("fts5", `[${ctx.name}] Build failed: ${err instanceof Error ? err.message : err}`, "error");
22806
+ });
22807
+ } else {
22808
+ serverLog("fts5", `[${ctx.name}] Search index already fresh, skipping rebuild`);
22809
+ }
22810
+ } else {
22811
+ serverLog("fts5", "Skipping \u2014 FLYWHEEL_SKIP_FTS5");
22812
+ }
22813
+ let cachedIndex = null;
22814
+ if (sd) {
22815
+ try {
22816
+ const files = await scanVault(vp);
22817
+ const noteCount = files.length;
22818
+ serverLog("index", `[${ctx.name}] Found ${noteCount} markdown files`);
22819
+ const newestMtime = files.reduce((max, f) => f.modified > max ? f.modified : max, /* @__PURE__ */ new Date(0));
22820
+ cachedIndex = loadVaultIndexFromCache(sd, noteCount, void 0, void 0, newestMtime);
22821
+ } catch (err) {
22822
+ serverLog("index", `[${ctx.name}] Cache check failed: ${err instanceof Error ? err.message : err}`, "warn");
22823
+ }
22824
+ }
22825
+ if (cachedIndex) {
22826
+ updateVaultIndex(cachedIndex);
22827
+ updateIndexState("ready");
22828
+ const duration = Date.now() - startTime;
22829
+ const cacheAge = cachedIndex.builtAt ? Math.round((Date.now() - cachedIndex.builtAt.getTime()) / 1e3) : 0;
22830
+ serverLog("index", `[${ctx.name}] Cache hit: ${cachedIndex.notes.size} notes, ${cacheAge}s old \u2014 loaded in ${duration}ms`);
22831
+ if (sd) {
22832
+ recordIndexEvent(sd, {
22833
+ trigger: "startup_cache",
22834
+ duration_ms: duration,
22835
+ note_count: cachedIndex.notes.size
22836
+ });
22837
+ }
22838
+ await runPostIndexWork(ctx);
22839
+ } else {
22840
+ serverLog("index", `[${ctx.name}] Cache miss: building from scratch`);
22841
+ try {
22842
+ const built = await buildVaultIndex(vp);
22843
+ updateVaultIndex(built);
22844
+ updateIndexState("ready");
22845
+ const duration = Date.now() - startTime;
22846
+ serverLog("index", `[${ctx.name}] Vault index ready in ${duration}ms \u2014 ${vaultIndex.notes.size} notes`);
22847
+ if (sd) {
22848
+ recordIndexEvent(sd, {
22849
+ trigger: "startup_build",
22850
+ duration_ms: duration,
22851
+ note_count: vaultIndex.notes.size
22852
+ });
22853
+ }
22854
+ if (sd) {
22855
+ try {
22856
+ saveVaultIndexToCache(sd, vaultIndex);
22857
+ serverLog("index", `[${ctx.name}] Index cache saved`);
22858
+ } catch (err) {
22859
+ serverLog("index", `[${ctx.name}] Failed to save index cache: ${err instanceof Error ? err.message : err}`, "error");
22860
+ }
22861
+ }
22862
+ await runPostIndexWork(ctx);
22863
+ } catch (err) {
22864
+ updateIndexState("error", err instanceof Error ? err : new Error(String(err)));
22865
+ const duration = Date.now() - startTime;
22866
+ if (sd) {
22867
+ recordIndexEvent(sd, {
22868
+ trigger: "startup_build",
22869
+ duration_ms: duration,
22870
+ success: false,
22871
+ error: err instanceof Error ? err.message : String(err)
22872
+ });
22873
+ }
22874
+ serverLog("index", `[${ctx.name}] Failed to build vault index: ${err instanceof Error ? err.message : err}`, "error");
22875
+ }
22876
+ }
22877
+ }
22759
22878
  async function main() {
22760
22879
  serverLog("server", `Starting Flywheel Memory v${pkg.version}...`);
22761
22880
  serverLog("server", `Vault: ${vaultPath}`);
@@ -22829,105 +22948,24 @@ async function main() {
22829
22948
  serverLog("server", `HTTP transport on ${httpHost}:${httpPort}`);
22830
22949
  });
22831
22950
  }
22832
- initializeLogger(vaultPath).then(() => {
22833
- const logger3 = getLogger();
22834
- if (logger3?.enabled) {
22835
- serverLog("server", "Unified logging enabled");
22836
- }
22837
- }).catch(() => {
22838
- });
22839
- initializeLogger2(vaultPath).catch((err) => {
22840
- serverLog("server", `Write logger initialization failed: ${err}`, "error");
22841
- });
22842
- if (process.env.FLYWHEEL_SKIP_FTS5 !== "true") {
22843
- if (isIndexStale(vaultPath)) {
22844
- buildFTS5Index(vaultPath).then(() => {
22845
- serverLog("fts5", "Search index ready");
22846
- }).catch((err) => {
22847
- serverLog("fts5", `Build failed: ${err instanceof Error ? err.message : err}`, "error");
22848
- });
22849
- } else {
22850
- serverLog("fts5", "Search index already fresh, skipping rebuild");
22851
- }
22852
- } else {
22853
- serverLog("fts5", "Skipping \u2014 FLYWHEEL_SKIP_FTS5");
22854
- }
22855
- let cachedIndex = null;
22856
- if (stateDb) {
22857
- try {
22858
- const files = await scanVault(vaultPath);
22859
- const noteCount = files.length;
22860
- serverLog("index", `Found ${noteCount} markdown files`);
22861
- const newestMtime = files.reduce((max, f) => f.modified > max ? f.modified : max, /* @__PURE__ */ new Date(0));
22862
- cachedIndex = loadVaultIndexFromCache(stateDb, noteCount, void 0, void 0, newestMtime);
22863
- } catch (err) {
22864
- serverLog("index", `Cache check failed: ${err instanceof Error ? err.message : err}`, "warn");
22865
- }
22866
- }
22867
- if (cachedIndex) {
22868
- vaultIndex = cachedIndex;
22869
- updateIndexState("ready");
22870
- const duration = Date.now() - startTime;
22871
- const cacheAge = cachedIndex.builtAt ? Math.round((Date.now() - cachedIndex.builtAt.getTime()) / 1e3) : 0;
22872
- serverLog("index", `Cache hit: ${cachedIndex.notes.size} notes, ${cacheAge}s old \u2014 loaded in ${duration}ms`);
22873
- if (stateDb) {
22874
- recordIndexEvent(stateDb, {
22875
- trigger: "startup_cache",
22876
- duration_ms: duration,
22877
- note_count: cachedIndex.notes.size
22878
- });
22879
- }
22880
- runPostIndexWork(vaultIndex);
22881
- } else {
22882
- serverLog("index", "Cache miss: building from scratch");
22883
- try {
22884
- vaultIndex = await buildVaultIndex(vaultPath);
22885
- updateIndexState("ready");
22886
- const duration = Date.now() - startTime;
22887
- serverLog("index", `Vault index ready in ${duration}ms \u2014 ${vaultIndex.notes.size} notes`);
22888
- if (stateDb) {
22889
- recordIndexEvent(stateDb, {
22890
- trigger: "startup_build",
22891
- duration_ms: duration,
22892
- note_count: vaultIndex.notes.size
22893
- });
22894
- }
22895
- if (stateDb) {
22896
- try {
22897
- saveVaultIndexToCache(stateDb, vaultIndex);
22898
- serverLog("index", "Index cache saved");
22899
- } catch (err) {
22900
- serverLog("index", `Failed to save index cache: ${err instanceof Error ? err.message : err}`, "error");
22901
- }
22902
- }
22903
- await runPostIndexWork(vaultIndex);
22904
- } catch (err) {
22905
- updateIndexState("error", err instanceof Error ? err : new Error(String(err)));
22906
- const duration = Date.now() - startTime;
22907
- if (stateDb) {
22908
- recordIndexEvent(stateDb, {
22909
- trigger: "startup_build",
22910
- duration_ms: duration,
22911
- success: false,
22912
- error: err instanceof Error ? err.message : String(err)
22913
- });
22914
- }
22915
- serverLog("index", `Failed to build vault index: ${err instanceof Error ? err.message : err}`, "error");
22916
- }
22951
+ for (const vaultCtx of vaultRegistry.getAllContexts()) {
22952
+ activateVault(vaultCtx);
22953
+ stateDb = vaultCtx.stateDb;
22954
+ await bootVault(vaultCtx, startTime);
22917
22955
  }
22918
22956
  }
22919
22957
  var DEFAULT_ENTITY_EXCLUDE_FOLDERS = ["node_modules", "templates", "attachments", "tmp"];
22920
- var lastCooccurrenceRebuildAt = 0;
22921
- var lastEdgeWeightRebuildAt = 0;
22922
- async function updateEntitiesInStateDb() {
22923
- if (!stateDb) return;
22958
+ async function updateEntitiesInStateDb(vp, sd) {
22959
+ const db4 = sd ?? stateDb;
22960
+ const vault = vp ?? vaultPath;
22961
+ if (!db4) return;
22924
22962
  try {
22925
- const config = loadConfig(stateDb);
22963
+ const config = loadConfig(db4);
22926
22964
  const excludeFolders = config.exclude_entity_folders?.length ? config.exclude_entity_folders : DEFAULT_ENTITY_EXCLUDE_FOLDERS;
22927
- const entityIndex2 = await scanVaultEntities4(vaultPath, {
22965
+ const entityIndex2 = await scanVaultEntities4(vault, {
22928
22966
  excludeFolders
22929
22967
  });
22930
- stateDb.replaceAllEntities(entityIndex2);
22968
+ db4.replaceAllEntities(entityIndex2);
22931
22969
  serverLog("index", `Updated ${entityIndex2._metadata.total_entities} entities in StateDb`);
22932
22970
  } catch (e) {
22933
22971
  serverLog("index", `Failed to update entities in StateDb: ${e instanceof Error ? e.message : e}`, "error");
@@ -22983,61 +23021,69 @@ function runPeriodicMaintenance(db4) {
22983
23021
  serverLog("server", "Daily purge complete");
22984
23022
  }
22985
23023
  }
22986
- async function runPostIndexWork(index) {
23024
+ async function runPostIndexWork(ctx) {
23025
+ const index = ctx.vaultIndex;
23026
+ const vp = ctx.vaultPath;
23027
+ const sd = ctx.stateDb;
23028
+ let rvp;
23029
+ try {
23030
+ rvp = realpathSync(vp).replace(/\\/g, "/");
23031
+ } catch {
23032
+ rvp = vp.replace(/\\/g, "/");
23033
+ }
22987
23034
  const postStart = Date.now();
22988
23035
  serverLog("index", "Scanning entities...");
22989
- await updateEntitiesInStateDb();
22990
- await initializeEntityIndex(vaultPath);
23036
+ await updateEntitiesInStateDb(vp, sd);
23037
+ await initializeEntityIndex(vp);
22991
23038
  serverLog("index", "Entity index initialized");
22992
- await exportHubScores(index, stateDb);
23039
+ await exportHubScores(index, sd);
22993
23040
  serverLog("index", "Hub scores exported");
22994
- if (stateDb) {
23041
+ if (sd) {
22995
23042
  try {
22996
- const metrics = computeMetrics(index, stateDb);
22997
- recordMetrics(stateDb, metrics);
22998
- purgeOldMetrics(stateDb, 90);
22999
- purgeOldIndexEvents(stateDb, 90);
23000
- purgeOldInvocations(stateDb, 90);
23001
- purgeOldSuggestionEvents(stateDb, 30);
23002
- purgeOldNoteLinkHistory(stateDb, 90);
23003
- sweepExpiredMemories(stateDb);
23004
- decayMemoryConfidence(stateDb);
23005
- pruneSupersededMemories(stateDb, 90);
23043
+ const metrics = computeMetrics(index, sd);
23044
+ recordMetrics(sd, metrics);
23045
+ purgeOldMetrics(sd, 90);
23046
+ purgeOldIndexEvents(sd, 90);
23047
+ purgeOldInvocations(sd, 90);
23048
+ purgeOldSuggestionEvents(sd, 30);
23049
+ purgeOldNoteLinkHistory(sd, 90);
23050
+ sweepExpiredMemories(sd);
23051
+ decayMemoryConfidence(sd);
23052
+ pruneSupersededMemories(sd, 90);
23006
23053
  serverLog("server", "Growth metrics recorded");
23007
23054
  } catch (err) {
23008
23055
  serverLog("server", `Failed to record metrics: ${err instanceof Error ? err.message : err}`, "error");
23009
23056
  }
23010
23057
  }
23011
- if (stateDb) {
23058
+ if (sd) {
23012
23059
  try {
23013
23060
  const graphMetrics = computeGraphMetrics(index);
23014
- recordGraphSnapshot(stateDb, graphMetrics);
23015
- purgeOldSnapshots(stateDb, 90);
23061
+ recordGraphSnapshot(sd, graphMetrics);
23062
+ purgeOldSnapshots(sd, 90);
23016
23063
  } catch (err) {
23017
23064
  serverLog("server", `Failed to record graph snapshot: ${err instanceof Error ? err.message : err}`, "error");
23018
23065
  }
23019
23066
  }
23020
- if (stateDb) {
23067
+ if (sd) {
23021
23068
  try {
23022
- updateSuppressionList(stateDb);
23069
+ updateSuppressionList(sd);
23023
23070
  serverLog("index", "Suppression list updated");
23024
23071
  } catch (err) {
23025
23072
  serverLog("server", `Failed to update suppression list: ${err instanceof Error ? err.message : err}`, "error");
23026
23073
  }
23027
23074
  }
23028
- const existing = loadConfig(stateDb);
23029
- const inferred = inferConfig(index, vaultPath);
23030
- if (stateDb) {
23031
- saveConfig(stateDb, inferred, existing);
23075
+ const existing = loadConfig(sd);
23076
+ const inferred = inferConfig(index, vp);
23077
+ if (sd) {
23078
+ saveConfig(sd, inferred, existing);
23032
23079
  }
23033
- flywheelConfig = loadConfig(stateDb);
23034
- setWikilinkConfig(flywheelConfig);
23080
+ updateFlywheelConfig(loadConfig(sd));
23035
23081
  const configKeys = Object.keys(flywheelConfig).filter((k) => flywheelConfig[k] != null);
23036
23082
  serverLog("config", `Config inferred: ${configKeys.join(", ")}`);
23037
- if (stateDb) {
23083
+ if (sd) {
23038
23084
  if (isTaskCacheStale()) {
23039
23085
  serverLog("tasks", "Task cache stale, rebuilding...");
23040
- refreshIfStale(vaultPath, index, flywheelConfig.exclude_task_tags);
23086
+ refreshIfStale(vp, index, flywheelConfig.exclude_task_tags);
23041
23087
  } else {
23042
23088
  serverLog("tasks", "Task cache fresh, skipping rebuild");
23043
23089
  }
@@ -23050,9 +23096,9 @@ async function runPostIndexWork(index) {
23050
23096
  if (hasEmbeddingsIndex() && needsEmbeddingRebuild()) {
23051
23097
  const oldModel = getStoredEmbeddingModel();
23052
23098
  serverLog("semantic", `Embedding model changed from ${oldModel} to ${getActiveModelId()}, rebuilding`);
23053
- if (stateDb) {
23054
- stateDb.db.exec("DELETE FROM note_embeddings");
23055
- stateDb.db.exec("DELETE FROM entity_embeddings");
23099
+ if (sd) {
23100
+ sd.db.exec("DELETE FROM note_embeddings");
23101
+ sd.db.exec("DELETE FROM entity_embeddings");
23056
23102
  }
23057
23103
  setEmbeddingsBuildState("none");
23058
23104
  modelChanged = true;
@@ -23062,19 +23108,17 @@ async function runPostIndexWork(index) {
23062
23108
  } else {
23063
23109
  const MAX_BUILD_RETRIES = 2;
23064
23110
  const attemptBuild = async (attempt) => {
23111
+ activateVault(ctx);
23065
23112
  setEmbeddingsBuilding(true);
23066
- {
23067
- const ctx = getActiveVaultContext();
23068
- if (ctx) ctx.embeddingsBuilding = true;
23069
- }
23113
+ ctx.embeddingsBuilding = true;
23070
23114
  try {
23071
- await buildEmbeddingsIndex(vaultPath, (p) => {
23115
+ await buildEmbeddingsIndex(vp, (p) => {
23072
23116
  if (p.current % 100 === 0 || p.current === p.total) {
23073
23117
  serverLog("semantic", `Embedding ${p.current}/${p.total} notes...`);
23074
23118
  }
23075
23119
  });
23076
- if (stateDb) {
23077
- const entities = getAllEntitiesFromDb3(stateDb);
23120
+ if (sd) {
23121
+ const entities = getAllEntitiesFromDb3(sd);
23078
23122
  if (entities.length > 0) {
23079
23123
  const entityMap = new Map(entities.map((e) => [e.name, {
23080
23124
  name: e.name,
@@ -23082,9 +23126,11 @@ async function runPostIndexWork(index) {
23082
23126
  category: e.category,
23083
23127
  aliases: e.aliases
23084
23128
  }]));
23085
- await buildEntityEmbeddingsIndex(vaultPath, entityMap);
23129
+ activateVault(ctx);
23130
+ await buildEntityEmbeddingsIndex(vp, entityMap);
23086
23131
  }
23087
23132
  }
23133
+ activateVault(ctx);
23088
23134
  loadEntityEmbeddingsToMemory();
23089
23135
  setEmbeddingsBuildState("complete");
23090
23136
  serverLog("semantic", "Embeddings ready \u2014 searches now use hybrid ranking");
@@ -23099,11 +23145,9 @@ async function runPostIndexWork(index) {
23099
23145
  serverLog("semantic", `Embeddings build failed after ${MAX_BUILD_RETRIES} attempts: ${msg}`, "error");
23100
23146
  serverLog("semantic", "Keyword search (BM25) remains fully available", "error");
23101
23147
  } finally {
23148
+ activateVault(ctx);
23102
23149
  setEmbeddingsBuilding(false);
23103
- {
23104
- const ctx = getActiveVaultContext();
23105
- if (ctx) ctx.embeddingsBuilding = false;
23106
- }
23150
+ ctx.embeddingsBuilding = false;
23107
23151
  }
23108
23152
  };
23109
23153
  attemptBuild(1);
@@ -23114,8 +23158,8 @@ async function runPostIndexWork(index) {
23114
23158
  if (process.env.FLYWHEEL_WATCH !== "false") {
23115
23159
  const config = parseWatcherConfig();
23116
23160
  const lastContentHashes = /* @__PURE__ */ new Map();
23117
- if (stateDb) {
23118
- const persisted = loadContentHashes(stateDb);
23161
+ if (sd) {
23162
+ const persisted = loadContentHashes(sd);
23119
23163
  for (const [p, h] of persisted) lastContentHashes.set(p, h);
23120
23164
  if (persisted.size > 0) {
23121
23165
  serverLog("watcher", `Loaded ${persisted.size} persisted content hashes`);
@@ -23124,8 +23168,8 @@ async function runPostIndexWork(index) {
23124
23168
  serverLog("watcher", `File watcher enabled (debounce: ${config.debounceMs}ms)`);
23125
23169
  const handleBatch = async (batch) => {
23126
23170
  const vaultPrefixes = /* @__PURE__ */ new Set([
23127
- vaultPath.replace(/\\/g, "/"),
23128
- resolvedVaultPath
23171
+ vp.replace(/\\/g, "/"),
23172
+ rvp
23129
23173
  ]);
23130
23174
  const normalizeEventPath = (rawPath) => {
23131
23175
  const normalized = rawPath.replace(/\\/g, "/");
@@ -23176,7 +23220,7 @@ async function runPostIndexWork(index) {
23176
23220
  continue;
23177
23221
  }
23178
23222
  try {
23179
- const content = await fs33.readFile(path33.join(vaultPath, event.path), "utf-8");
23223
+ const content = await fs33.readFile(path33.join(vp, event.path), "utf-8");
23180
23224
  const hash = createHash3("sha256").update(content).digest("hex").slice(0, 16);
23181
23225
  if (lastContentHashes.get(event.path) === hash) {
23182
23226
  serverLog("watcher", `Hash unchanged, skipping: ${event.path}`);
@@ -23189,25 +23233,25 @@ async function runPostIndexWork(index) {
23189
23233
  filteredEvents.push(event);
23190
23234
  }
23191
23235
  }
23192
- if (stateDb && (hashUpserts.length || hashDeletes.length)) {
23193
- saveContentHashBatch(stateDb, hashUpserts, hashDeletes);
23236
+ if (sd && (hashUpserts.length || hashDeletes.length)) {
23237
+ saveContentHashBatch(sd, hashUpserts, hashDeletes);
23194
23238
  }
23195
- if (batchRenames.length > 0 && stateDb) {
23239
+ if (batchRenames.length > 0 && sd) {
23196
23240
  try {
23197
- const insertMove = stateDb.db.prepare(`
23241
+ const insertMove = sd.db.prepare(`
23198
23242
  INSERT INTO note_moves (old_path, new_path, old_folder, new_folder)
23199
23243
  VALUES (?, ?, ?, ?)
23200
23244
  `);
23201
- const renameNoteLinks = stateDb.db.prepare(
23245
+ const renameNoteLinks = sd.db.prepare(
23202
23246
  "UPDATE note_links SET note_path = ? WHERE note_path = ?"
23203
23247
  );
23204
- const renameNoteTags = stateDb.db.prepare(
23248
+ const renameNoteTags = sd.db.prepare(
23205
23249
  "UPDATE note_tags SET note_path = ? WHERE note_path = ?"
23206
23250
  );
23207
- const renameNoteLinkHistory = stateDb.db.prepare(
23251
+ const renameNoteLinkHistory = sd.db.prepare(
23208
23252
  "UPDATE note_link_history SET note_path = ? WHERE note_path = ?"
23209
23253
  );
23210
- const renameWikilinkApplications = stateDb.db.prepare(
23254
+ const renameWikilinkApplications = sd.db.prepare(
23211
23255
  "UPDATE wikilink_applications SET note_path = ? WHERE note_path = ?"
23212
23256
  );
23213
23257
  for (const rename of batchRenames) {
@@ -23222,7 +23266,7 @@ async function runPostIndexWork(index) {
23222
23266
  if (oldHash !== void 0) {
23223
23267
  lastContentHashes.set(rename.newPath, oldHash);
23224
23268
  lastContentHashes.delete(rename.oldPath);
23225
- renameContentHash(stateDb, rename.oldPath, rename.newPath);
23269
+ renameContentHash(sd, rename.oldPath, rename.newPath);
23226
23270
  }
23227
23271
  }
23228
23272
  serverLog("watcher", `Renames: recorded ${batchRenames.length} move(s) in note_moves`);
@@ -23250,17 +23294,18 @@ async function runPostIndexWork(index) {
23250
23294
  try {
23251
23295
  tracker.start("index_rebuild", { files_changed: filteredEvents.length, changed_paths: changedPaths });
23252
23296
  if (!vaultIndex) {
23253
- vaultIndex = await buildVaultIndex(vaultPath);
23254
- serverLog("watcher", `Index rebuilt (full): ${vaultIndex.notes.size} notes, ${vaultIndex.entities.size} entities`);
23297
+ const rebuilt = await buildVaultIndex(vp);
23298
+ updateVaultIndex(rebuilt);
23299
+ serverLog("watcher", `Index rebuilt (full): ${rebuilt.notes.size} notes, ${rebuilt.entities.size} entities`);
23255
23300
  } else {
23256
23301
  const absoluteBatch = {
23257
23302
  ...batch,
23258
23303
  events: filteredEvents.map((e) => ({
23259
23304
  ...e,
23260
- path: path33.join(vaultPath, e.path)
23305
+ path: path33.join(vp, e.path)
23261
23306
  }))
23262
23307
  };
23263
- const batchResult = await processBatch(vaultIndex, vaultPath, absoluteBatch);
23308
+ const batchResult = await processBatch(vaultIndex, vp, absoluteBatch);
23264
23309
  serverLog("watcher", `Incremental: ${batchResult.successful}/${batchResult.total} files in ${batchResult.durationMs}ms`);
23265
23310
  }
23266
23311
  updateIndexState("ready");
@@ -23273,20 +23318,20 @@ async function runPostIndexWork(index) {
23273
23318
  serverLog("watcher", `Note moves: ${batchRenames.length} rename(s) recorded`);
23274
23319
  }
23275
23320
  const hubBefore = /* @__PURE__ */ new Map();
23276
- if (stateDb) {
23277
- const rows = stateDb.db.prepare("SELECT name, hub_score FROM entities").all();
23321
+ if (sd) {
23322
+ const rows = sd.db.prepare("SELECT name, hub_score FROM entities").all();
23278
23323
  for (const r of rows) hubBefore.set(r.name, r.hub_score);
23279
23324
  }
23280
- const entitiesBefore = stateDb ? getAllEntitiesFromDb3(stateDb) : [];
23325
+ const entitiesBefore = sd ? getAllEntitiesFromDb3(sd) : [];
23281
23326
  tracker.start("entity_scan", { note_count: vaultIndex.notes.size });
23282
- await updateEntitiesInStateDb();
23283
- const entitiesAfter = stateDb ? getAllEntitiesFromDb3(stateDb) : [];
23327
+ await updateEntitiesInStateDb(vp, sd);
23328
+ const entitiesAfter = sd ? getAllEntitiesFromDb3(sd) : [];
23284
23329
  const entityDiff = computeEntityDiff(entitiesBefore, entitiesAfter);
23285
23330
  const categoryChanges = [];
23286
23331
  const descriptionChanges = [];
23287
- if (stateDb) {
23332
+ if (sd) {
23288
23333
  const beforeMap = new Map(entitiesBefore.map((e) => [e.name, e]));
23289
- const insertChange = stateDb.db.prepare(
23334
+ const insertChange = sd.db.prepare(
23290
23335
  "INSERT INTO entity_changes (entity, field, old_value, new_value) VALUES (?, ?, ?, ?)"
23291
23336
  );
23292
23337
  for (const after of entitiesAfter) {
@@ -23309,10 +23354,10 @@ async function runPostIndexWork(index) {
23309
23354
  serverLog("watcher", `Entity scan: ${entitiesAfter.length} entities`);
23310
23355
  tracker.start("hub_scores", { entity_count: entitiesAfter.length });
23311
23356
  try {
23312
- const hubUpdated = await exportHubScores(vaultIndex, stateDb);
23357
+ const hubUpdated = await exportHubScores(vaultIndex, sd);
23313
23358
  const hubDiffs = [];
23314
- if (stateDb) {
23315
- const rows = stateDb.db.prepare("SELECT name, hub_score FROM entities").all();
23359
+ if (sd) {
23360
+ const rows = sd.db.prepare("SELECT name, hub_score FROM entities").all();
23316
23361
  for (const r of rows) {
23317
23362
  const prev = hubBefore.get(r.name) ?? 0;
23318
23363
  if (prev !== r.hub_score) hubDiffs.push({ entity: r.name, before: prev, after: r.hub_score });
@@ -23330,7 +23375,7 @@ async function runPostIndexWork(index) {
23330
23375
  const cacheAgeMs = cachedRecency ? Date.now() - (cachedRecency.lastUpdated ?? 0) : Infinity;
23331
23376
  if (cacheAgeMs >= 60 * 60 * 1e3) {
23332
23377
  const entities = entitiesAfter.map((e) => ({ name: e.name, path: e.path, aliases: e.aliases }));
23333
- const recencyIndex2 = await buildRecencyIndex(vaultPath, entities);
23378
+ const recencyIndex2 = await buildRecencyIndex(vp, entities);
23334
23379
  saveRecencyToStateDb(recencyIndex2);
23335
23380
  tracker.end({ rebuilt: true, entities: recencyIndex2.lastMentioned.size });
23336
23381
  serverLog("watcher", `Recency: rebuilt ${recencyIndex2.lastMentioned.size} entities`);
@@ -23344,19 +23389,15 @@ async function runPostIndexWork(index) {
23344
23389
  }
23345
23390
  tracker.start("cooccurrence", { entity_count: entitiesAfter.length });
23346
23391
  try {
23347
- const cooccurrenceAgeMs = lastCooccurrenceRebuildAt > 0 ? Date.now() - lastCooccurrenceRebuildAt : Infinity;
23392
+ const cooccurrenceAgeMs = ctx.lastCooccurrenceRebuildAt > 0 ? Date.now() - ctx.lastCooccurrenceRebuildAt : Infinity;
23348
23393
  if (cooccurrenceAgeMs >= 60 * 60 * 1e3) {
23349
23394
  const entityNames = entitiesAfter.map((e) => e.name);
23350
- const cooccurrenceIdx = await mineCooccurrences(vaultPath, entityNames);
23395
+ const cooccurrenceIdx = await mineCooccurrences(vp, entityNames);
23351
23396
  setCooccurrenceIndex(cooccurrenceIdx);
23352
- lastCooccurrenceRebuildAt = Date.now();
23353
- const activeCtx = getActiveVaultContext();
23354
- if (activeCtx) {
23355
- activeCtx.cooccurrenceIndex = cooccurrenceIdx;
23356
- activeCtx.lastCooccurrenceRebuildAt = lastCooccurrenceRebuildAt;
23357
- }
23358
- if (stateDb) {
23359
- saveCooccurrenceToStateDb(stateDb, cooccurrenceIdx);
23397
+ ctx.lastCooccurrenceRebuildAt = Date.now();
23398
+ ctx.cooccurrenceIndex = cooccurrenceIdx;
23399
+ if (sd) {
23400
+ saveCooccurrenceToStateDb(sd, cooccurrenceIdx);
23360
23401
  }
23361
23402
  tracker.end({ rebuilt: true, associations: cooccurrenceIdx._metadata.total_associations });
23362
23403
  serverLog("watcher", `Co-occurrence: rebuilt ${cooccurrenceIdx._metadata.total_associations} associations`);
@@ -23368,17 +23409,13 @@ async function runPostIndexWork(index) {
23368
23409
  tracker.end({ error: String(e) });
23369
23410
  serverLog("watcher", `Co-occurrence: failed: ${e}`);
23370
23411
  }
23371
- if (stateDb) {
23412
+ if (sd) {
23372
23413
  tracker.start("edge_weights", {});
23373
23414
  try {
23374
- const edgeWeightAgeMs = lastEdgeWeightRebuildAt > 0 ? Date.now() - lastEdgeWeightRebuildAt : Infinity;
23415
+ const edgeWeightAgeMs = ctx.lastEdgeWeightRebuildAt > 0 ? Date.now() - ctx.lastEdgeWeightRebuildAt : Infinity;
23375
23416
  if (edgeWeightAgeMs >= 60 * 60 * 1e3) {
23376
- const result = recomputeEdgeWeights(stateDb);
23377
- lastEdgeWeightRebuildAt = Date.now();
23378
- {
23379
- const activeCtx = getActiveVaultContext();
23380
- if (activeCtx) activeCtx.lastEdgeWeightRebuildAt = lastEdgeWeightRebuildAt;
23381
- }
23417
+ const result = recomputeEdgeWeights(sd);
23418
+ ctx.lastEdgeWeightRebuildAt = Date.now();
23382
23419
  tracker.end({
23383
23420
  rebuilt: true,
23384
23421
  edges: result.edges_updated,
@@ -23408,7 +23445,7 @@ async function runPostIndexWork(index) {
23408
23445
  removeEmbedding(event.path);
23409
23446
  embRemoved++;
23410
23447
  } else if (event.path.endsWith(".md")) {
23411
- const absPath = path33.join(vaultPath, event.path);
23448
+ const absPath = path33.join(vp, event.path);
23412
23449
  await updateEmbedding(event.path, absPath);
23413
23450
  embUpdated++;
23414
23451
  }
@@ -23420,12 +23457,12 @@ async function runPostIndexWork(index) {
23420
23457
  } else {
23421
23458
  tracker.skip("note_embeddings", "not built");
23422
23459
  }
23423
- if (hasEntityEmbeddingsIndex() && stateDb) {
23460
+ if (hasEntityEmbeddingsIndex() && sd) {
23424
23461
  tracker.start("entity_embeddings", { files: filteredEvents.length });
23425
23462
  let entEmbUpdated = 0;
23426
23463
  const entEmbNames = [];
23427
23464
  try {
23428
- const allEntities = getAllEntitiesFromDb3(stateDb);
23465
+ const allEntities = getAllEntitiesFromDb3(sd);
23429
23466
  for (const event of filteredEvents) {
23430
23467
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23431
23468
  const matching = allEntities.filter((e) => e.path === event.path);
@@ -23435,7 +23472,7 @@ async function runPostIndexWork(index) {
23435
23472
  path: entity.path,
23436
23473
  category: entity.category,
23437
23474
  aliases: entity.aliases
23438
- }, vaultPath);
23475
+ }, vp);
23439
23476
  entEmbUpdated++;
23440
23477
  entEmbNames.push(entity.name);
23441
23478
  }
@@ -23445,12 +23482,12 @@ async function runPostIndexWork(index) {
23445
23482
  tracker.end({ updated: entEmbUpdated, updated_entities: entEmbNames.slice(0, 10) });
23446
23483
  serverLog("watcher", `Entity embeddings: ${entEmbUpdated} updated`);
23447
23484
  } else {
23448
- tracker.skip("entity_embeddings", !stateDb ? "no stateDb" : "not built");
23485
+ tracker.skip("entity_embeddings", !sd ? "no sd" : "not built");
23449
23486
  }
23450
- if (stateDb) {
23487
+ if (sd) {
23451
23488
  tracker.start("index_cache", { note_count: vaultIndex.notes.size });
23452
23489
  try {
23453
- saveVaultIndexToCache(stateDb, vaultIndex);
23490
+ saveVaultIndexToCache(sd, vaultIndex);
23454
23491
  tracker.end({ saved: true });
23455
23492
  serverLog("watcher", "Index cache saved");
23456
23493
  } catch (err) {
@@ -23458,7 +23495,7 @@ async function runPostIndexWork(index) {
23458
23495
  serverLog("index", `Failed to update index cache: ${err instanceof Error ? err.message : err}`, "error");
23459
23496
  }
23460
23497
  } else {
23461
- tracker.skip("index_cache", "no stateDb");
23498
+ tracker.skip("index_cache", "no sd");
23462
23499
  }
23463
23500
  tracker.start("task_cache", { files: filteredEvents.length });
23464
23501
  let taskUpdated = 0;
@@ -23469,7 +23506,7 @@ async function runPostIndexWork(index) {
23469
23506
  removeTaskCacheForFile(event.path);
23470
23507
  taskRemoved++;
23471
23508
  } else if (event.path.endsWith(".md")) {
23472
- await updateTaskCacheForFile(vaultPath, event.path);
23509
+ await updateTaskCacheForFile(vp, event.path);
23473
23510
  taskUpdated++;
23474
23511
  }
23475
23512
  } catch {
@@ -23507,19 +23544,19 @@ async function runPostIndexWork(index) {
23507
23544
  } catch {
23508
23545
  }
23509
23546
  }
23510
- if (stateDb) {
23511
- const upsertHistory = stateDb.db.prepare(`
23547
+ if (sd) {
23548
+ const upsertHistory = sd.db.prepare(`
23512
23549
  INSERT INTO note_link_history (note_path, target) VALUES (?, ?)
23513
23550
  ON CONFLICT(note_path, target) DO UPDATE SET edits_survived = edits_survived + 1
23514
23551
  `);
23515
- const checkThreshold = stateDb.db.prepare(`
23552
+ const checkThreshold = sd.db.prepare(`
23516
23553
  SELECT target FROM note_link_history
23517
23554
  WHERE note_path = ? AND target = ? AND edits_survived >= 3 AND last_positive_at IS NULL
23518
23555
  `);
23519
- const markPositive = stateDb.db.prepare(`
23556
+ const markPositive = sd.db.prepare(`
23520
23557
  UPDATE note_link_history SET last_positive_at = datetime('now') WHERE note_path = ? AND target = ?
23521
23558
  `);
23522
- const getEdgeCount = stateDb.db.prepare(
23559
+ const getEdgeCount = sd.db.prepare(
23523
23560
  "SELECT edits_survived FROM note_link_history WHERE note_path=? AND target=?"
23524
23561
  );
23525
23562
  for (const entry of forwardLinkResults) {
@@ -23527,16 +23564,16 @@ async function runPostIndexWork(index) {
23527
23564
  ...entry.resolved.map((n) => n.toLowerCase()),
23528
23565
  ...entry.dead.map((n) => n.toLowerCase())
23529
23566
  ]);
23530
- const previousSet = getStoredNoteLinks(stateDb, entry.file);
23567
+ const previousSet = getStoredNoteLinks(sd, entry.file);
23531
23568
  if (previousSet.size === 0) {
23532
- updateStoredNoteLinks(stateDb, entry.file, currentSet);
23569
+ updateStoredNoteLinks(sd, entry.file, currentSet);
23533
23570
  continue;
23534
23571
  }
23535
23572
  const diff = diffNoteLinks(previousSet, currentSet);
23536
23573
  if (diff.added.length > 0 || diff.removed.length > 0) {
23537
23574
  linkDiffs.push({ file: entry.file, ...diff });
23538
23575
  }
23539
- updateStoredNoteLinks(stateDb, entry.file, currentSet);
23576
+ updateStoredNoteLinks(sd, entry.file, currentSet);
23540
23577
  if (diff.removed.length === 0) continue;
23541
23578
  for (const link of currentSet) {
23542
23579
  if (!previousSet.has(link)) continue;
@@ -23551,7 +23588,7 @@ async function runPostIndexWork(index) {
23551
23588
  (e) => e.nameLower === link || (e.aliases ?? []).some((a) => a.toLowerCase() === link)
23552
23589
  );
23553
23590
  if (entity) {
23554
- recordFeedback(stateDb, entity.name, "implicit:kept", entry.file, true, 0.8);
23591
+ recordFeedback(sd, entity.name, "implicit:kept", entry.file, true, 0.8);
23555
23592
  markPositive.run(entry.file, link);
23556
23593
  }
23557
23594
  }
@@ -23559,10 +23596,10 @@ async function runPostIndexWork(index) {
23559
23596
  }
23560
23597
  for (const event of filteredEvents) {
23561
23598
  if (event.type === "delete") {
23562
- const previousSet = getStoredNoteLinks(stateDb, event.path);
23599
+ const previousSet = getStoredNoteLinks(sd, event.path);
23563
23600
  if (previousSet.size > 0) {
23564
23601
  linkDiffs.push({ file: event.path, added: [], removed: [...previousSet] });
23565
- updateStoredNoteLinks(stateDb, event.path, /* @__PURE__ */ new Set());
23602
+ updateStoredNoteLinks(sd, event.path, /* @__PURE__ */ new Set());
23566
23603
  }
23567
23604
  }
23568
23605
  }
@@ -23570,10 +23607,10 @@ async function runPostIndexWork(index) {
23570
23607
  for (const event of filteredEvents) {
23571
23608
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23572
23609
  if (processedFiles.has(event.path)) continue;
23573
- const previousSet = getStoredNoteLinks(stateDb, event.path);
23610
+ const previousSet = getStoredNoteLinks(sd, event.path);
23574
23611
  if (previousSet.size > 0) {
23575
23612
  linkDiffs.push({ file: event.path, added: [], removed: [...previousSet] });
23576
- updateStoredNoteLinks(stateDb, event.path, /* @__PURE__ */ new Set());
23613
+ updateStoredNoteLinks(sd, event.path, /* @__PURE__ */ new Set());
23577
23614
  }
23578
23615
  }
23579
23616
  }
@@ -23599,11 +23636,11 @@ async function runPostIndexWork(index) {
23599
23636
  }
23600
23637
  tracker.start("wikilink_check", { files: filteredEvents.length });
23601
23638
  const trackedLinks = [];
23602
- if (stateDb) {
23639
+ if (sd) {
23603
23640
  for (const event of filteredEvents) {
23604
23641
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23605
23642
  try {
23606
- const apps = getTrackedApplications(stateDb, event.path);
23643
+ const apps = getTrackedApplications(sd, event.path);
23607
23644
  if (apps.length > 0) trackedLinks.push({ file: event.path, entities: apps });
23608
23645
  } catch {
23609
23646
  }
@@ -23628,7 +23665,7 @@ async function runPostIndexWork(index) {
23628
23665
  for (const event of filteredEvents) {
23629
23666
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23630
23667
  try {
23631
- const content = await fs33.readFile(path33.join(vaultPath, event.path), "utf-8");
23668
+ const content = await fs33.readFile(path33.join(vp, event.path), "utf-8");
23632
23669
  const zones = getProtectedZones2(content);
23633
23670
  const linked = new Set(
23634
23671
  (forwardLinkResults.find((r) => r.file === event.path)?.resolved ?? []).map((n) => n.toLowerCase())
@@ -23636,7 +23673,7 @@ async function runPostIndexWork(index) {
23636
23673
  const mentions = [];
23637
23674
  for (const entity of entitiesAfter) {
23638
23675
  if (linked.has(entity.nameLower)) continue;
23639
- if (stateDb && isSuppressed(stateDb, entity.name)) continue;
23676
+ if (sd && isSuppressed(sd, entity.name)) continue;
23640
23677
  const matches = findEntityMatches2(content, entity.name, true);
23641
23678
  const valid = matches.some((m) => !rangeOverlapsProtectedZone(m.start, m.end, zones));
23642
23679
  if (valid) {
@@ -23663,20 +23700,20 @@ async function runPostIndexWork(index) {
23663
23700
  const deletedFiles = new Set(
23664
23701
  filteredEvents.filter((e) => e.type === "delete").map((e) => e.path)
23665
23702
  );
23666
- const preSuppressed = stateDb ? new Set(getAllSuppressionPenalties(stateDb).keys()) : /* @__PURE__ */ new Set();
23703
+ const preSuppressed = sd ? new Set(getAllSuppressionPenalties(sd).keys()) : /* @__PURE__ */ new Set();
23667
23704
  const feedbackResults = [];
23668
- if (stateDb) {
23705
+ if (sd) {
23669
23706
  for (const event of filteredEvents) {
23670
23707
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23671
23708
  try {
23672
- const content = await fs33.readFile(path33.join(vaultPath, event.path), "utf-8");
23673
- const removed = processImplicitFeedback(stateDb, event.path, content);
23709
+ const content = await fs33.readFile(path33.join(vp, event.path), "utf-8");
23710
+ const removed = processImplicitFeedback(sd, event.path, content);
23674
23711
  for (const entity of removed) feedbackResults.push({ entity, file: event.path });
23675
23712
  } catch {
23676
23713
  }
23677
23714
  }
23678
23715
  }
23679
- if (stateDb && linkDiffs.length > 0) {
23716
+ if (sd && linkDiffs.length > 0) {
23680
23717
  for (const diff of linkDiffs) {
23681
23718
  if (deletedFiles.has(diff.file)) continue;
23682
23719
  for (const target of diff.removed) {
@@ -23685,15 +23722,15 @@ async function runPostIndexWork(index) {
23685
23722
  (e) => e.nameLower === target || (e.aliases ?? []).some((a) => a.toLowerCase() === target)
23686
23723
  );
23687
23724
  if (entity) {
23688
- recordFeedback(stateDb, entity.name, "implicit:removed", diff.file, false);
23725
+ recordFeedback(sd, entity.name, "implicit:removed", diff.file, false);
23689
23726
  feedbackResults.push({ entity: entity.name, file: diff.file });
23690
23727
  }
23691
23728
  }
23692
23729
  }
23693
23730
  }
23694
23731
  const additionResults = [];
23695
- if (stateDb && linkDiffs.length > 0) {
23696
- const checkApplication = stateDb.db.prepare(
23732
+ if (sd && linkDiffs.length > 0) {
23733
+ const checkApplication = sd.db.prepare(
23697
23734
  `SELECT 1 FROM wikilink_applications WHERE LOWER(entity) = LOWER(?) AND note_path = ? AND status = 'applied'`
23698
23735
  );
23699
23736
  for (const diff of linkDiffs) {
@@ -23704,15 +23741,15 @@ async function runPostIndexWork(index) {
23704
23741
  (e) => e.nameLower === target || (e.aliases ?? []).some((a) => a.toLowerCase() === target)
23705
23742
  );
23706
23743
  if (entity) {
23707
- recordFeedback(stateDb, entity.name, "implicit:manual_added", diff.file, true);
23744
+ recordFeedback(sd, entity.name, "implicit:manual_added", diff.file, true);
23708
23745
  additionResults.push({ entity: entity.name, file: diff.file });
23709
23746
  }
23710
23747
  }
23711
23748
  }
23712
23749
  }
23713
23750
  const newlySuppressed = [];
23714
- if (stateDb) {
23715
- const postSuppressed = getAllSuppressionPenalties(stateDb);
23751
+ if (sd) {
23752
+ const postSuppressed = getAllSuppressionPenalties(sd);
23716
23753
  for (const entity of postSuppressed.keys()) {
23717
23754
  if (!preSuppressed.has(entity)) {
23718
23755
  newlySuppressed.push(entity);
@@ -23728,10 +23765,10 @@ async function runPostIndexWork(index) {
23728
23765
  }
23729
23766
  tracker.start("corrections", {});
23730
23767
  try {
23731
- if (stateDb) {
23732
- const corrProcessed = processPendingCorrections(stateDb);
23768
+ if (sd) {
23769
+ const corrProcessed = processPendingCorrections(sd);
23733
23770
  if (corrProcessed > 0) {
23734
- updateSuppressionList(stateDb);
23771
+ updateSuppressionList(sd);
23735
23772
  }
23736
23773
  tracker.end({ processed: corrProcessed });
23737
23774
  if (corrProcessed > 0) {
@@ -23749,7 +23786,7 @@ async function runPostIndexWork(index) {
23749
23786
  for (const event of filteredEvents) {
23750
23787
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23751
23788
  try {
23752
- const content = await fs33.readFile(path33.join(vaultPath, event.path), "utf-8");
23789
+ const content = await fs33.readFile(path33.join(vp, event.path), "utf-8");
23753
23790
  const zones = getProtectedZones2(content);
23754
23791
  const linkedSet = new Set(
23755
23792
  (forwardLinkResults.find((r) => r.file === event.path)?.resolved ?? []).concat(forwardLinkResults.find((r) => r.file === event.path)?.dead ?? []).map((n) => n.toLowerCase())
@@ -23782,7 +23819,7 @@ async function runPostIndexWork(index) {
23782
23819
  for (const event of filteredEvents) {
23783
23820
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23784
23821
  try {
23785
- const rawContent = await fs33.readFile(path33.join(vaultPath, event.path), "utf-8");
23822
+ const rawContent = await fs33.readFile(path33.join(vp, event.path), "utf-8");
23786
23823
  const content = rawContent.replace(/ → \[\[.*$/gm, "");
23787
23824
  const result = await suggestRelatedLinks(content, {
23788
23825
  maxSuggestions: 5,
@@ -23812,7 +23849,7 @@ async function runPostIndexWork(index) {
23812
23849
  const proactiveResults = [];
23813
23850
  for (const { file, top } of suggestionResults) {
23814
23851
  try {
23815
- const result = await applyProactiveSuggestions(file, vaultPath, top, {
23852
+ const result = await applyProactiveSuggestions(file, vp, top, {
23816
23853
  minScore: flywheelConfig?.proactive_min_score ?? 20,
23817
23854
  maxPerFile: flywheelConfig?.proactive_max_per_file ?? 3
23818
23855
  });
@@ -23835,7 +23872,7 @@ async function runPostIndexWork(index) {
23835
23872
  tracker.start("tag_scan", { files: filteredEvents.length });
23836
23873
  try {
23837
23874
  const tagDiffs = [];
23838
- if (stateDb) {
23875
+ if (sd) {
23839
23876
  const noteTagsForward = /* @__PURE__ */ new Map();
23840
23877
  for (const [tag, paths] of vaultIndex.tags) {
23841
23878
  for (const notePath of paths) {
@@ -23846,9 +23883,9 @@ async function runPostIndexWork(index) {
23846
23883
  for (const event of filteredEvents) {
23847
23884
  if (event.type === "delete" || !event.path.endsWith(".md")) continue;
23848
23885
  const currentSet = noteTagsForward.get(event.path) ?? /* @__PURE__ */ new Set();
23849
- const previousSet = getStoredNoteTags(stateDb, event.path);
23886
+ const previousSet = getStoredNoteTags(sd, event.path);
23850
23887
  if (previousSet.size === 0 && currentSet.size > 0) {
23851
- updateStoredNoteTags(stateDb, event.path, currentSet);
23888
+ updateStoredNoteTags(sd, event.path, currentSet);
23852
23889
  continue;
23853
23890
  }
23854
23891
  const added = [...currentSet].filter((t) => !previousSet.has(t));
@@ -23856,14 +23893,14 @@ async function runPostIndexWork(index) {
23856
23893
  if (added.length > 0 || removed.length > 0) {
23857
23894
  tagDiffs.push({ file: event.path, added, removed });
23858
23895
  }
23859
- updateStoredNoteTags(stateDb, event.path, currentSet);
23896
+ updateStoredNoteTags(sd, event.path, currentSet);
23860
23897
  }
23861
23898
  for (const event of filteredEvents) {
23862
23899
  if (event.type === "delete") {
23863
- const previousSet = getStoredNoteTags(stateDb, event.path);
23900
+ const previousSet = getStoredNoteTags(sd, event.path);
23864
23901
  if (previousSet.size > 0) {
23865
23902
  tagDiffs.push({ file: event.path, added: [], removed: [...previousSet] });
23866
- updateStoredNoteTags(stateDb, event.path, /* @__PURE__ */ new Set());
23903
+ updateStoredNoteTags(sd, event.path, /* @__PURE__ */ new Set());
23867
23904
  }
23868
23905
  }
23869
23906
  }
@@ -23880,22 +23917,22 @@ async function runPostIndexWork(index) {
23880
23917
  }
23881
23918
  tracker.start("retrieval_cooccurrence", {});
23882
23919
  try {
23883
- if (stateDb) {
23884
- const inserted = mineRetrievalCooccurrence(stateDb);
23920
+ if (sd) {
23921
+ const inserted = mineRetrievalCooccurrence(sd);
23885
23922
  tracker.end({ pairs_inserted: inserted });
23886
23923
  if (inserted > 0) {
23887
23924
  serverLog("watcher", `Retrieval co-occurrence: ${inserted} new pairs`);
23888
23925
  }
23889
23926
  } else {
23890
- tracker.end({ skipped: "no stateDb" });
23927
+ tracker.end({ skipped: "no sd" });
23891
23928
  }
23892
23929
  } catch (e) {
23893
23930
  tracker.end({ error: String(e) });
23894
23931
  serverLog("watcher", `Retrieval co-occurrence: failed: ${e}`, "error");
23895
23932
  }
23896
23933
  const duration = Date.now() - batchStart;
23897
- if (stateDb) {
23898
- recordIndexEvent(stateDb, {
23934
+ if (sd) {
23935
+ recordIndexEvent(sd, {
23899
23936
  trigger: "watcher",
23900
23937
  duration_ms: duration,
23901
23938
  note_count: vaultIndex.notes.size,
@@ -23908,8 +23945,8 @@ async function runPostIndexWork(index) {
23908
23945
  } catch (err) {
23909
23946
  updateIndexState("error", err instanceof Error ? err : new Error(String(err)));
23910
23947
  const duration = Date.now() - batchStart;
23911
- if (stateDb) {
23912
- recordIndexEvent(stateDb, {
23948
+ if (sd) {
23949
+ recordIndexEvent(sd, {
23913
23950
  trigger: "watcher",
23914
23951
  duration_ms: duration,
23915
23952
  success: false,
@@ -23923,7 +23960,7 @@ async function runPostIndexWork(index) {
23923
23960
  }
23924
23961
  };
23925
23962
  const watcher = createVaultWatcher({
23926
- vaultPath,
23963
+ vaultPath: vp,
23927
23964
  config,
23928
23965
  onBatch: handleBatch,
23929
23966
  onStateChange: (status) => {
@@ -23935,11 +23972,12 @@ async function runPostIndexWork(index) {
23935
23972
  serverLog("watcher", `Watcher error: ${err.message}`, "error");
23936
23973
  }
23937
23974
  });
23975
+ ctx.watcher = watcher;
23938
23976
  watcherInstance = watcher;
23939
- if (stateDb) {
23940
- const lastPipelineEvent = getRecentPipelineEvent(stateDb);
23977
+ if (sd) {
23978
+ const lastPipelineEvent = getRecentPipelineEvent(sd);
23941
23979
  if (lastPipelineEvent) {
23942
- const catchupEvents = await buildStartupCatchupBatch(vaultPath, lastPipelineEvent.timestamp);
23980
+ const catchupEvents = await buildStartupCatchupBatch(vp, lastPipelineEvent.timestamp);
23943
23981
  if (catchupEvents.length > 0) {
23944
23982
  console.error(`[Flywheel] Startup catch-up: ${catchupEvents.length} file(s) modified while offline`);
23945
23983
  await handleBatch({ events: catchupEvents, renames: [], timestamp: Date.now() });
@@ -23950,8 +23988,8 @@ async function runPostIndexWork(index) {
23950
23988
  serverLog("watcher", "File watcher started");
23951
23989
  }
23952
23990
  if (process.env.FLYWHEEL_WATCH !== "false") {
23953
- startSweepTimer(() => vaultIndex, void 0, () => {
23954
- if (stateDb) runPeriodicMaintenance(stateDb);
23991
+ startSweepTimer(() => ctx.vaultIndex, void 0, () => {
23992
+ if (sd) runPeriodicMaintenance(sd);
23955
23993
  });
23956
23994
  serverLog("server", "Sweep timer started (5 min interval)");
23957
23995
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@velvetmonkey/flywheel-memory",
3
- "version": "2.0.129",
3
+ "version": "2.0.131",
4
4
  "description": "MCP server that gives Claude full read/write access to your Obsidian vault. Select from 69 tools for search, backlinks, graph queries, mutations, agent memory, and hybrid semantic search.",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -53,7 +53,7 @@
53
53
  },
54
54
  "dependencies": {
55
55
  "@modelcontextprotocol/sdk": "^1.25.1",
56
- "@velvetmonkey/vault-core": "2.0.129",
56
+ "@velvetmonkey/vault-core": "2.0.131",
57
57
  "better-sqlite3": "^11.0.0",
58
58
  "chokidar": "^4.0.0",
59
59
  "gray-matter": "^4.0.3",