@velvetmonkey/flywheel-memory 2.0.64 → 2.0.65
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +98 -47
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -61,6 +61,7 @@ var init_constants = __esm({
|
|
|
61
61
|
import fs18 from "fs/promises";
|
|
62
62
|
import path20 from "path";
|
|
63
63
|
import matter5 from "gray-matter";
|
|
64
|
+
import { createHash as createHash2 } from "node:crypto";
|
|
64
65
|
function isSensitivePath(filePath) {
|
|
65
66
|
const normalizedPath = filePath.replace(/\\/g, "/");
|
|
66
67
|
return SENSITIVE_PATH_PATTERNS.some((pattern) => pattern.test(normalizedPath));
|
|
@@ -471,6 +472,9 @@ async function validatePathSecure(vaultPath2, notePath) {
|
|
|
471
472
|
}
|
|
472
473
|
return { valid: true };
|
|
473
474
|
}
|
|
475
|
+
function computeContentHash(rawContent) {
|
|
476
|
+
return createHash2("sha256").update(rawContent).digest("hex").slice(0, 16);
|
|
477
|
+
}
|
|
474
478
|
async function readVaultFile(vaultPath2, notePath) {
|
|
475
479
|
if (!validatePath(vaultPath2, notePath)) {
|
|
476
480
|
throw new Error("Invalid path: path traversal not allowed");
|
|
@@ -480,6 +484,7 @@ async function readVaultFile(vaultPath2, notePath) {
|
|
|
480
484
|
fs18.readFile(fullPath, "utf-8"),
|
|
481
485
|
fs18.stat(fullPath)
|
|
482
486
|
]);
|
|
487
|
+
const contentHash2 = computeContentHash(rawContent);
|
|
483
488
|
const lineEnding = detectLineEnding(rawContent);
|
|
484
489
|
const normalizedContent = normalizeLineEndings(rawContent);
|
|
485
490
|
const parsed = matter5(normalizedContent);
|
|
@@ -489,7 +494,8 @@ async function readVaultFile(vaultPath2, notePath) {
|
|
|
489
494
|
frontmatter,
|
|
490
495
|
rawContent,
|
|
491
496
|
lineEnding,
|
|
492
|
-
mtimeMs: stat4.mtimeMs
|
|
497
|
+
mtimeMs: stat4.mtimeMs,
|
|
498
|
+
contentHash: contentHash2
|
|
493
499
|
};
|
|
494
500
|
}
|
|
495
501
|
function deepCloneFrontmatter(obj) {
|
|
@@ -523,12 +529,19 @@ function deepCloneFrontmatter(obj) {
|
|
|
523
529
|
}
|
|
524
530
|
return cloned;
|
|
525
531
|
}
|
|
526
|
-
async function writeVaultFile(vaultPath2, notePath, content, frontmatter, lineEnding = "LF") {
|
|
532
|
+
async function writeVaultFile(vaultPath2, notePath, content, frontmatter, lineEnding = "LF", expectedHash) {
|
|
527
533
|
const validation = await validatePathSecure(vaultPath2, notePath);
|
|
528
534
|
if (!validation.valid) {
|
|
529
535
|
throw new Error(`Invalid path: ${validation.reason}`);
|
|
530
536
|
}
|
|
531
537
|
const fullPath = path20.join(vaultPath2, notePath);
|
|
538
|
+
if (expectedHash) {
|
|
539
|
+
const currentRaw = await fs18.readFile(fullPath, "utf-8");
|
|
540
|
+
const currentHash = computeContentHash(currentRaw);
|
|
541
|
+
if (currentHash !== expectedHash) {
|
|
542
|
+
throw new WriteConflictError(notePath);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
532
545
|
let output = matter5.stringify(content, frontmatter);
|
|
533
546
|
output = normalizeTrailingNewline(output);
|
|
534
547
|
output = convertLineEndings(output, lineEnding);
|
|
@@ -693,7 +706,7 @@ function injectMutationMetadata(frontmatter, scoping) {
|
|
|
693
706
|
}
|
|
694
707
|
return frontmatter;
|
|
695
708
|
}
|
|
696
|
-
var SENSITIVE_PATH_PATTERNS, REDOS_PATTERNS, MAX_REGEX_LENGTH, EMPTY_PLACEHOLDER_PATTERNS, DiagnosticError;
|
|
709
|
+
var SENSITIVE_PATH_PATTERNS, REDOS_PATTERNS, MAX_REGEX_LENGTH, EMPTY_PLACEHOLDER_PATTERNS, WriteConflictError, DiagnosticError;
|
|
697
710
|
var init_writer = __esm({
|
|
698
711
|
"src/core/write/writer.ts"() {
|
|
699
712
|
"use strict";
|
|
@@ -799,6 +812,13 @@ var init_writer = __esm({
|
|
|
799
812
|
/^\*\s*$/
|
|
800
813
|
// "* " (asterisk bullet placeholder)
|
|
801
814
|
];
|
|
815
|
+
WriteConflictError = class extends Error {
|
|
816
|
+
constructor(notePath) {
|
|
817
|
+
super(`Write conflict on ${notePath}: file was modified externally since it was read. Re-read and retry.`);
|
|
818
|
+
this.notePath = notePath;
|
|
819
|
+
this.name = "WriteConflictError";
|
|
820
|
+
}
|
|
821
|
+
};
|
|
802
822
|
DiagnosticError = class extends Error {
|
|
803
823
|
diagnostic;
|
|
804
824
|
constructor(message, diagnostic) {
|
|
@@ -13228,7 +13248,7 @@ async function withVaultFile(options, operation) {
|
|
|
13228
13248
|
return formatMcpResult(existsError);
|
|
13229
13249
|
}
|
|
13230
13250
|
const runMutation = async () => {
|
|
13231
|
-
const { content, frontmatter: frontmatter2, lineEnding: lineEnding2,
|
|
13251
|
+
const { content, frontmatter: frontmatter2, lineEnding: lineEnding2, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
13232
13252
|
const writeStateDb = getWriteStateDb();
|
|
13233
13253
|
if (writeStateDb) {
|
|
13234
13254
|
processImplicitFeedback(writeStateDb, notePath, content);
|
|
@@ -13250,7 +13270,7 @@ async function withVaultFile(options, operation) {
|
|
|
13250
13270
|
notePath
|
|
13251
13271
|
};
|
|
13252
13272
|
const opResult2 = await operation(ctx);
|
|
13253
|
-
return { opResult: opResult2, frontmatter: frontmatter2, lineEnding: lineEnding2,
|
|
13273
|
+
return { opResult: opResult2, frontmatter: frontmatter2, lineEnding: lineEnding2, contentHash: contentHash2 };
|
|
13254
13274
|
};
|
|
13255
13275
|
let result = await runMutation();
|
|
13256
13276
|
if ("error" in result) {
|
|
@@ -13267,20 +13287,11 @@ async function withVaultFile(options, operation) {
|
|
|
13267
13287
|
});
|
|
13268
13288
|
return formatMcpResult(dryResult);
|
|
13269
13289
|
}
|
|
13270
|
-
const fullPath = path21.join(vaultPath2, notePath);
|
|
13271
|
-
const statBefore = await fs19.stat(fullPath);
|
|
13272
|
-
if (statBefore.mtimeMs !== result.mtimeMs) {
|
|
13273
|
-
console.warn(`[withVaultFile] External modification detected on ${notePath}, re-reading and retrying`);
|
|
13274
|
-
result = await runMutation();
|
|
13275
|
-
if ("error" in result) {
|
|
13276
|
-
return formatMcpResult(result.error);
|
|
13277
|
-
}
|
|
13278
|
-
}
|
|
13279
13290
|
let finalFrontmatter = opResult.updatedFrontmatter ?? frontmatter;
|
|
13280
13291
|
if (scoping && (scoping.agent_id || scoping.session_id)) {
|
|
13281
13292
|
finalFrontmatter = injectMutationMetadata(finalFrontmatter, scoping);
|
|
13282
13293
|
}
|
|
13283
|
-
await writeVaultFile(vaultPath2, notePath, opResult.updatedContent, finalFrontmatter, lineEnding);
|
|
13294
|
+
await writeVaultFile(vaultPath2, notePath, opResult.updatedContent, finalFrontmatter, lineEnding, result.contentHash);
|
|
13284
13295
|
const gitInfo = await handleGitCommit(vaultPath2, notePath, commit, commitPrefix);
|
|
13285
13296
|
const successRes = successResult(notePath, opResult.message, gitInfo, {
|
|
13286
13297
|
preview: opResult.preview,
|
|
@@ -13291,6 +13302,13 @@ async function withVaultFile(options, operation) {
|
|
|
13291
13302
|
return formatMcpResult(successRes);
|
|
13292
13303
|
} catch (error) {
|
|
13293
13304
|
const extras = {};
|
|
13305
|
+
if (error instanceof WriteConflictError) {
|
|
13306
|
+
extras.warnings = [{
|
|
13307
|
+
type: "write_conflict",
|
|
13308
|
+
message: error.message,
|
|
13309
|
+
suggestion: "The file was modified while processing. Re-read and retry."
|
|
13310
|
+
}];
|
|
13311
|
+
}
|
|
13294
13312
|
if (error instanceof DiagnosticError) {
|
|
13295
13313
|
extras.diagnostic = error.diagnostic;
|
|
13296
13314
|
}
|
|
@@ -13309,7 +13327,7 @@ async function withVaultFrontmatter(options, operation) {
|
|
|
13309
13327
|
if (existsError) {
|
|
13310
13328
|
return formatMcpResult(existsError);
|
|
13311
13329
|
}
|
|
13312
|
-
const { content, frontmatter, lineEnding } = await readVaultFile(vaultPath2, notePath);
|
|
13330
|
+
const { content, frontmatter, lineEnding, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
13313
13331
|
const ctx = { content, frontmatter, lineEnding, vaultPath: vaultPath2, notePath };
|
|
13314
13332
|
const opResult = await operation(ctx);
|
|
13315
13333
|
if (dryRun) {
|
|
@@ -13319,16 +13337,25 @@ async function withVaultFrontmatter(options, operation) {
|
|
|
13319
13337
|
});
|
|
13320
13338
|
return formatMcpResult(result2);
|
|
13321
13339
|
}
|
|
13322
|
-
await writeVaultFile(vaultPath2, notePath, content, opResult.updatedFrontmatter, lineEnding);
|
|
13340
|
+
await writeVaultFile(vaultPath2, notePath, content, opResult.updatedFrontmatter, lineEnding, contentHash2);
|
|
13323
13341
|
const gitInfo = await handleGitCommit(vaultPath2, notePath, commit, commitPrefix);
|
|
13324
13342
|
const result = successResult(notePath, opResult.message, gitInfo, {
|
|
13325
13343
|
preview: opResult.preview
|
|
13326
13344
|
});
|
|
13327
13345
|
return formatMcpResult(result);
|
|
13328
13346
|
} catch (error) {
|
|
13347
|
+
const extras = {};
|
|
13348
|
+
if (error instanceof WriteConflictError) {
|
|
13349
|
+
extras.warnings = [{
|
|
13350
|
+
type: "write_conflict",
|
|
13351
|
+
message: error.message,
|
|
13352
|
+
suggestion: "The file was modified while processing. Re-read and retry."
|
|
13353
|
+
}];
|
|
13354
|
+
}
|
|
13329
13355
|
const result = errorResult(
|
|
13330
13356
|
notePath,
|
|
13331
|
-
`Failed to ${actionDescription}: ${error instanceof Error ? error.message : String(error)}
|
|
13357
|
+
`Failed to ${actionDescription}: ${error instanceof Error ? error.message : String(error)}`,
|
|
13358
|
+
extras
|
|
13332
13359
|
);
|
|
13333
13360
|
return formatMcpResult(result);
|
|
13334
13361
|
}
|
|
@@ -13703,7 +13730,7 @@ function registerTaskTools(server2, vaultPath2) {
|
|
|
13703
13730
|
if (existsError) {
|
|
13704
13731
|
return formatMcpResult(existsError);
|
|
13705
13732
|
}
|
|
13706
|
-
const { content: fileContent, frontmatter } = await readVaultFile(vaultPath2, notePath);
|
|
13733
|
+
const { content: fileContent, frontmatter, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
13707
13734
|
let sectionBoundary;
|
|
13708
13735
|
if (section) {
|
|
13709
13736
|
const found = findSection(fileContent, section);
|
|
@@ -13740,7 +13767,7 @@ function registerTaskTools(server2, vaultPath2) {
|
|
|
13740
13767
|
if (agent_id || session_id) {
|
|
13741
13768
|
finalFrontmatter = injectMutationMetadata(frontmatter, { agent_id, session_id });
|
|
13742
13769
|
}
|
|
13743
|
-
await writeVaultFile(vaultPath2, notePath, toggleResult.content, finalFrontmatter);
|
|
13770
|
+
await writeVaultFile(vaultPath2, notePath, toggleResult.content, finalFrontmatter, "LF", contentHash2);
|
|
13744
13771
|
await updateTaskCacheForFile(vaultPath2, notePath).catch(() => {
|
|
13745
13772
|
});
|
|
13746
13773
|
const gitInfo = await handleGitCommit(vaultPath2, notePath, commit, "[Flywheel:Task]");
|
|
@@ -13750,8 +13777,16 @@ function registerTaskTools(server2, vaultPath2) {
|
|
|
13750
13777
|
})
|
|
13751
13778
|
);
|
|
13752
13779
|
} catch (error) {
|
|
13780
|
+
const extras = {};
|
|
13781
|
+
if (error instanceof WriteConflictError) {
|
|
13782
|
+
extras.warnings = [{
|
|
13783
|
+
type: "write_conflict",
|
|
13784
|
+
message: error.message,
|
|
13785
|
+
suggestion: "The file was modified while processing. Re-read and retry."
|
|
13786
|
+
}];
|
|
13787
|
+
}
|
|
13753
13788
|
return formatMcpResult(
|
|
13754
|
-
errorResult(notePath, `Failed to toggle task: ${error instanceof Error ? error.message : String(error)}
|
|
13789
|
+
errorResult(notePath, `Failed to toggle task: ${error instanceof Error ? error.message : String(error)}`, extras)
|
|
13755
13790
|
);
|
|
13756
13791
|
}
|
|
13757
13792
|
}
|
|
@@ -14177,10 +14212,8 @@ async function findBacklinks(vaultPath2, targetTitle, targetAliases) {
|
|
|
14177
14212
|
return results;
|
|
14178
14213
|
}
|
|
14179
14214
|
async function updateBacklinksInFile(vaultPath2, filePath, oldTitles, newTitle) {
|
|
14180
|
-
const
|
|
14181
|
-
|
|
14182
|
-
const parsed = matter6(raw);
|
|
14183
|
-
let content = parsed.content;
|
|
14215
|
+
const { content: fileContent, frontmatter, lineEnding, contentHash: contentHash2 } = await readVaultFile(vaultPath2, filePath);
|
|
14216
|
+
let content = fileContent;
|
|
14184
14217
|
let totalUpdated = 0;
|
|
14185
14218
|
for (const oldTitle of oldTitles) {
|
|
14186
14219
|
const pattern = new RegExp(
|
|
@@ -14193,7 +14226,7 @@ async function updateBacklinksInFile(vaultPath2, filePath, oldTitles, newTitle)
|
|
|
14193
14226
|
});
|
|
14194
14227
|
}
|
|
14195
14228
|
if (totalUpdated > 0) {
|
|
14196
|
-
await writeVaultFile(vaultPath2, filePath, content,
|
|
14229
|
+
await writeVaultFile(vaultPath2, filePath, content, frontmatter, lineEnding, contentHash2);
|
|
14197
14230
|
return { updated: true, linksUpdated: totalUpdated };
|
|
14198
14231
|
}
|
|
14199
14232
|
return { updated: false, linksUpdated: 0 };
|
|
@@ -14579,10 +14612,12 @@ function registerMergeTools(server2, vaultPath2) {
|
|
|
14579
14612
|
}
|
|
14580
14613
|
let targetContent;
|
|
14581
14614
|
let targetFrontmatter;
|
|
14615
|
+
let targetContentHash;
|
|
14582
14616
|
try {
|
|
14583
14617
|
const target = await readVaultFile(vaultPath2, target_path);
|
|
14584
14618
|
targetContent = target.content;
|
|
14585
14619
|
targetFrontmatter = target.frontmatter;
|
|
14620
|
+
targetContentHash = target.contentHash;
|
|
14586
14621
|
} catch {
|
|
14587
14622
|
const result2 = {
|
|
14588
14623
|
success: false,
|
|
@@ -14657,7 +14692,7 @@ ${trimmedSource}`;
|
|
|
14657
14692
|
};
|
|
14658
14693
|
return { content: [{ type: "text", text: JSON.stringify(result2, null, 2) }] };
|
|
14659
14694
|
}
|
|
14660
|
-
await writeVaultFile(vaultPath2, target_path, targetContent, targetFrontmatter);
|
|
14695
|
+
await writeVaultFile(vaultPath2, target_path, targetContent, targetFrontmatter, "LF", targetContentHash);
|
|
14661
14696
|
const fullSourcePath = `${vaultPath2}/${source_path}`;
|
|
14662
14697
|
await fs23.unlink(fullSourcePath);
|
|
14663
14698
|
initializeEntityIndex(vaultPath2).catch((err) => {
|
|
@@ -14675,7 +14710,14 @@ ${trimmedSource}`;
|
|
|
14675
14710
|
const result = {
|
|
14676
14711
|
success: false,
|
|
14677
14712
|
message: `Failed to merge entities: ${error instanceof Error ? error.message : String(error)}`,
|
|
14678
|
-
path: source_path
|
|
14713
|
+
path: source_path,
|
|
14714
|
+
...error instanceof WriteConflictError ? {
|
|
14715
|
+
warnings: [{
|
|
14716
|
+
type: "write_conflict",
|
|
14717
|
+
message: error.message,
|
|
14718
|
+
suggestion: "The file was modified while processing. Re-read and retry."
|
|
14719
|
+
}]
|
|
14720
|
+
} : {}
|
|
14679
14721
|
};
|
|
14680
14722
|
return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
|
|
14681
14723
|
}
|
|
@@ -14701,10 +14743,12 @@ ${trimmedSource}`;
|
|
|
14701
14743
|
}
|
|
14702
14744
|
let targetContent;
|
|
14703
14745
|
let targetFrontmatter;
|
|
14746
|
+
let absorbTargetHash;
|
|
14704
14747
|
try {
|
|
14705
14748
|
const target = await readVaultFile(vaultPath2, target_path);
|
|
14706
14749
|
targetContent = target.content;
|
|
14707
14750
|
targetFrontmatter = target.frontmatter;
|
|
14751
|
+
absorbTargetHash = target.contentHash;
|
|
14708
14752
|
} catch {
|
|
14709
14753
|
const result2 = {
|
|
14710
14754
|
success: false,
|
|
@@ -14730,7 +14774,7 @@ ${trimmedSource}`;
|
|
|
14730
14774
|
modifiedFiles.push(backlink.path);
|
|
14731
14775
|
}
|
|
14732
14776
|
} else {
|
|
14733
|
-
await writeVaultFile(vaultPath2, target_path, targetContent, targetFrontmatter);
|
|
14777
|
+
await writeVaultFile(vaultPath2, target_path, targetContent, targetFrontmatter, "LF", absorbTargetHash);
|
|
14734
14778
|
for (const backlink of backlinks) {
|
|
14735
14779
|
if (backlink.path === target_path) continue;
|
|
14736
14780
|
let fileData;
|
|
@@ -14756,7 +14800,7 @@ ${trimmedSource}`;
|
|
|
14756
14800
|
return `[[${targetTitle}|${source_name}]]`;
|
|
14757
14801
|
});
|
|
14758
14802
|
if (linksUpdated > 0) {
|
|
14759
|
-
await writeVaultFile(vaultPath2, backlink.path, content, fileData.frontmatter);
|
|
14803
|
+
await writeVaultFile(vaultPath2, backlink.path, content, fileData.frontmatter, fileData.lineEnding, fileData.contentHash);
|
|
14760
14804
|
totalBacklinksUpdated += linksUpdated;
|
|
14761
14805
|
modifiedFiles.push(backlink.path);
|
|
14762
14806
|
}
|
|
@@ -14787,7 +14831,14 @@ ${trimmedSource}`;
|
|
|
14787
14831
|
const result = {
|
|
14788
14832
|
success: false,
|
|
14789
14833
|
message: `Failed to absorb as alias: ${error instanceof Error ? error.message : String(error)}`,
|
|
14790
|
-
path: target_path
|
|
14834
|
+
path: target_path,
|
|
14835
|
+
...error instanceof WriteConflictError ? {
|
|
14836
|
+
warnings: [{
|
|
14837
|
+
type: "write_conflict",
|
|
14838
|
+
message: error.message,
|
|
14839
|
+
suggestion: "The file was modified while processing. Re-read and retry."
|
|
14840
|
+
}]
|
|
14841
|
+
} : {}
|
|
14791
14842
|
};
|
|
14792
14843
|
return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
|
|
14793
14844
|
}
|
|
@@ -15189,7 +15240,7 @@ async function executeAddToSection(params, vaultPath2, context) {
|
|
|
15189
15240
|
} catch {
|
|
15190
15241
|
return { success: false, message: `File not found: ${notePath}`, path: notePath };
|
|
15191
15242
|
}
|
|
15192
|
-
const { content: fileContent, frontmatter, lineEnding } = await readVaultFile(vaultPath2, notePath);
|
|
15243
|
+
const { content: fileContent, frontmatter, lineEnding, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
15193
15244
|
const sectionBoundary = findSection(fileContent, section);
|
|
15194
15245
|
if (!sectionBoundary) {
|
|
15195
15246
|
return { success: false, message: `Section '${section}' not found`, path: notePath };
|
|
@@ -15209,7 +15260,7 @@ async function executeAddToSection(params, vaultPath2, context) {
|
|
|
15209
15260
|
position,
|
|
15210
15261
|
{ preserveListNesting }
|
|
15211
15262
|
);
|
|
15212
|
-
await writeVaultFile(vaultPath2, notePath, updatedContent, frontmatter, lineEnding);
|
|
15263
|
+
await writeVaultFile(vaultPath2, notePath, updatedContent, frontmatter, lineEnding, contentHash2);
|
|
15213
15264
|
return {
|
|
15214
15265
|
success: true,
|
|
15215
15266
|
message: `Added content to section "${sectionBoundary.name}" in ${notePath}`,
|
|
@@ -15229,7 +15280,7 @@ async function executeRemoveFromSection(params, vaultPath2) {
|
|
|
15229
15280
|
} catch {
|
|
15230
15281
|
return { success: false, message: `File not found: ${notePath}`, path: notePath };
|
|
15231
15282
|
}
|
|
15232
|
-
const { content: fileContent, frontmatter, lineEnding } = await readVaultFile(vaultPath2, notePath);
|
|
15283
|
+
const { content: fileContent, frontmatter, lineEnding, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
15233
15284
|
const sectionBoundary = findSection(fileContent, section);
|
|
15234
15285
|
if (!sectionBoundary) {
|
|
15235
15286
|
return { success: false, message: `Section '${section}' not found`, path: notePath };
|
|
@@ -15238,7 +15289,7 @@ async function executeRemoveFromSection(params, vaultPath2) {
|
|
|
15238
15289
|
if (removeResult.removedCount === 0) {
|
|
15239
15290
|
return { success: false, message: `No content matching "${pattern}" found`, path: notePath };
|
|
15240
15291
|
}
|
|
15241
|
-
await writeVaultFile(vaultPath2, notePath, removeResult.content, frontmatter, lineEnding);
|
|
15292
|
+
await writeVaultFile(vaultPath2, notePath, removeResult.content, frontmatter, lineEnding, contentHash2);
|
|
15242
15293
|
return {
|
|
15243
15294
|
success: true,
|
|
15244
15295
|
message: `Removed ${removeResult.removedCount} line(s) from section "${sectionBoundary.name}"`,
|
|
@@ -15260,7 +15311,7 @@ async function executeReplaceInSection(params, vaultPath2, context) {
|
|
|
15260
15311
|
} catch {
|
|
15261
15312
|
return { success: false, message: `File not found: ${notePath}`, path: notePath };
|
|
15262
15313
|
}
|
|
15263
|
-
const { content: fileContent, frontmatter, lineEnding } = await readVaultFile(vaultPath2, notePath);
|
|
15314
|
+
const { content: fileContent, frontmatter, lineEnding, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
15264
15315
|
const sectionBoundary = findSection(fileContent, section);
|
|
15265
15316
|
if (!sectionBoundary) {
|
|
15266
15317
|
return { success: false, message: `Section '${section}' not found`, path: notePath };
|
|
@@ -15277,7 +15328,7 @@ async function executeReplaceInSection(params, vaultPath2, context) {
|
|
|
15277
15328
|
if (replaceResult.replacedCount === 0) {
|
|
15278
15329
|
return { success: false, message: `No content matching "${search}" found`, path: notePath };
|
|
15279
15330
|
}
|
|
15280
|
-
await writeVaultFile(vaultPath2, notePath, replaceResult.content, frontmatter, lineEnding);
|
|
15331
|
+
await writeVaultFile(vaultPath2, notePath, replaceResult.content, frontmatter, lineEnding, contentHash2);
|
|
15281
15332
|
return {
|
|
15282
15333
|
success: true,
|
|
15283
15334
|
message: `Replaced ${replaceResult.replacedCount} occurrence(s) in section "${sectionBoundary.name}"`,
|
|
@@ -15348,7 +15399,7 @@ async function executeToggleTask(params, vaultPath2) {
|
|
|
15348
15399
|
} catch {
|
|
15349
15400
|
return { success: false, message: `File not found: ${notePath}`, path: notePath };
|
|
15350
15401
|
}
|
|
15351
|
-
const { content: fileContent, frontmatter } = await readVaultFile(vaultPath2, notePath);
|
|
15402
|
+
const { content: fileContent, frontmatter, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
15352
15403
|
let sectionBoundary;
|
|
15353
15404
|
if (section) {
|
|
15354
15405
|
sectionBoundary = findSection(fileContent, section);
|
|
@@ -15367,7 +15418,7 @@ async function executeToggleTask(params, vaultPath2) {
|
|
|
15367
15418
|
if (!toggleResult) {
|
|
15368
15419
|
return { success: false, message: "Failed to toggle task", path: notePath };
|
|
15369
15420
|
}
|
|
15370
|
-
await writeVaultFile(vaultPath2, notePath, toggleResult.content, frontmatter);
|
|
15421
|
+
await writeVaultFile(vaultPath2, notePath, toggleResult.content, frontmatter, "LF", contentHash2);
|
|
15371
15422
|
const newStatus = toggleResult.newState ? "completed" : "incomplete";
|
|
15372
15423
|
const checkbox = toggleResult.newState ? "[x]" : "[ ]";
|
|
15373
15424
|
return {
|
|
@@ -15391,7 +15442,7 @@ async function executeAddTask(params, vaultPath2, context) {
|
|
|
15391
15442
|
} catch {
|
|
15392
15443
|
return { success: false, message: `File not found: ${notePath}`, path: notePath };
|
|
15393
15444
|
}
|
|
15394
|
-
const { content: fileContent, frontmatter } = await readVaultFile(vaultPath2, notePath);
|
|
15445
|
+
const { content: fileContent, frontmatter, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
15395
15446
|
const sectionBoundary = findSection(fileContent, section);
|
|
15396
15447
|
if (!sectionBoundary) {
|
|
15397
15448
|
return { success: false, message: `Section not found: ${section}`, path: notePath };
|
|
@@ -15411,7 +15462,7 @@ async function executeAddTask(params, vaultPath2, context) {
|
|
|
15411
15462
|
position,
|
|
15412
15463
|
{ preserveListNesting }
|
|
15413
15464
|
);
|
|
15414
|
-
await writeVaultFile(vaultPath2, notePath, updatedContent, frontmatter);
|
|
15465
|
+
await writeVaultFile(vaultPath2, notePath, updatedContent, frontmatter, "LF", contentHash2);
|
|
15415
15466
|
return {
|
|
15416
15467
|
success: true,
|
|
15417
15468
|
message: `Added task to section "${sectionBoundary.name}"`,
|
|
@@ -15428,9 +15479,9 @@ async function executeUpdateFrontmatter(params, vaultPath2) {
|
|
|
15428
15479
|
} catch {
|
|
15429
15480
|
return { success: false, message: `File not found: ${notePath}`, path: notePath };
|
|
15430
15481
|
}
|
|
15431
|
-
const { content, frontmatter } = await readVaultFile(vaultPath2, notePath);
|
|
15482
|
+
const { content, frontmatter, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
15432
15483
|
const updatedFrontmatter = { ...frontmatter, ...updates };
|
|
15433
|
-
await writeVaultFile(vaultPath2, notePath, content, updatedFrontmatter);
|
|
15484
|
+
await writeVaultFile(vaultPath2, notePath, content, updatedFrontmatter, "LF", contentHash2);
|
|
15434
15485
|
const updatedKeys = Object.keys(updates);
|
|
15435
15486
|
const preview = updatedKeys.map((k) => `${k}: ${JSON.stringify(updates[k])}`).join("\n");
|
|
15436
15487
|
return {
|
|
@@ -15450,12 +15501,12 @@ async function executeAddFrontmatterField(params, vaultPath2) {
|
|
|
15450
15501
|
} catch {
|
|
15451
15502
|
return { success: false, message: `File not found: ${notePath}`, path: notePath };
|
|
15452
15503
|
}
|
|
15453
|
-
const { content, frontmatter } = await readVaultFile(vaultPath2, notePath);
|
|
15504
|
+
const { content, frontmatter, contentHash: contentHash2 } = await readVaultFile(vaultPath2, notePath);
|
|
15454
15505
|
if (key in frontmatter) {
|
|
15455
15506
|
return { success: false, message: `Field "${key}" already exists`, path: notePath };
|
|
15456
15507
|
}
|
|
15457
15508
|
const updatedFrontmatter = { ...frontmatter, [key]: value };
|
|
15458
|
-
await writeVaultFile(vaultPath2, notePath, content, updatedFrontmatter);
|
|
15509
|
+
await writeVaultFile(vaultPath2, notePath, content, updatedFrontmatter, "LF", contentHash2);
|
|
15459
15510
|
return {
|
|
15460
15511
|
success: true,
|
|
15461
15512
|
message: `Added frontmatter field "${key}"`,
|
|
@@ -18922,7 +18973,7 @@ function registerMergeTools2(server2, getStateDb) {
|
|
|
18922
18973
|
|
|
18923
18974
|
// src/index.ts
|
|
18924
18975
|
import * as fs31 from "node:fs/promises";
|
|
18925
|
-
import { createHash as
|
|
18976
|
+
import { createHash as createHash3 } from "node:crypto";
|
|
18926
18977
|
|
|
18927
18978
|
// src/resources/vault.ts
|
|
18928
18979
|
function registerVaultResources(server2, getIndex) {
|
|
@@ -19698,7 +19749,7 @@ async function runPostIndexWork(index) {
|
|
|
19698
19749
|
}
|
|
19699
19750
|
try {
|
|
19700
19751
|
const content = await fs31.readFile(path32.join(vaultPath, event.path), "utf-8");
|
|
19701
|
-
const hash =
|
|
19752
|
+
const hash = createHash3("sha256").update(content).digest("hex").slice(0, 16);
|
|
19702
19753
|
if (lastContentHashes.get(event.path) === hash) {
|
|
19703
19754
|
serverLog("watcher", `Hash unchanged, skipping: ${event.path}`);
|
|
19704
19755
|
continue;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@velvetmonkey/flywheel-memory",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.65",
|
|
4
4
|
"description": "MCP server that gives Claude full read/write access to your Obsidian vault. Select from 42 tools for search, backlinks, graph queries, mutations, and hybrid semantic search.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -52,7 +52,7 @@
|
|
|
52
52
|
},
|
|
53
53
|
"dependencies": {
|
|
54
54
|
"@modelcontextprotocol/sdk": "^1.25.1",
|
|
55
|
-
"@velvetmonkey/vault-core": "2.0.
|
|
55
|
+
"@velvetmonkey/vault-core": "2.0.65",
|
|
56
56
|
"better-sqlite3": "^11.0.0",
|
|
57
57
|
"chokidar": "^4.0.0",
|
|
58
58
|
"gray-matter": "^4.0.3",
|