roam-research-mcp 1.3.2 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +187 -25
- package/build/Roam_Markdown_Cheatsheet.md +11 -2
- package/build/cli/commands/get.js +79 -0
- package/build/cli/commands/refs.js +122 -0
- package/build/cli/commands/save.js +121 -0
- package/build/cli/commands/search.js +79 -0
- package/build/cli/roam.js +18 -0
- package/build/cli/utils/output.js +88 -0
- package/build/diff/actions.js +93 -0
- package/build/diff/actions.test.js +125 -0
- package/build/diff/diff.js +155 -0
- package/build/diff/diff.test.js +202 -0
- package/build/diff/index.js +43 -0
- package/build/diff/matcher.js +118 -0
- package/build/diff/matcher.test.js +198 -0
- package/build/diff/parser.js +114 -0
- package/build/diff/parser.test.js +281 -0
- package/build/diff/types.js +27 -0
- package/build/diff/types.test.js +57 -0
- package/build/search/block-ref-search.js +34 -7
- package/build/server/roam-server.js +7 -0
- package/build/tools/operations/pages.js +95 -0
- package/build/tools/schemas.js +29 -2
- package/build/tools/tool-handlers.js +4 -0
- package/package.json +9 -5
- package/build/cli/import-markdown.js +0 -98
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { parseExistingBlock, parseExistingBlocks, flattenExistingBlocks, markdownToBlocks, getBlockDepth, } from './parser.js';
|
|
3
|
+
describe('parseExistingBlock', () => {
|
|
4
|
+
it('parses a simple block', () => {
|
|
5
|
+
const roamBlock = {
|
|
6
|
+
':block/uid': 'abc123def',
|
|
7
|
+
':block/string': 'Hello world',
|
|
8
|
+
':block/order': 0,
|
|
9
|
+
':block/heading': null,
|
|
10
|
+
};
|
|
11
|
+
const block = parseExistingBlock(roamBlock);
|
|
12
|
+
expect(block.uid).toBe('abc123def');
|
|
13
|
+
expect(block.text).toBe('Hello world');
|
|
14
|
+
expect(block.order).toBe(0);
|
|
15
|
+
expect(block.heading).toBeNull();
|
|
16
|
+
expect(block.children).toEqual([]);
|
|
17
|
+
expect(block.parentUid).toBeNull();
|
|
18
|
+
});
|
|
19
|
+
it('parses block with heading', () => {
|
|
20
|
+
const roamBlock = {
|
|
21
|
+
':block/uid': 'heading1',
|
|
22
|
+
':block/string': 'Title',
|
|
23
|
+
':block/order': 0,
|
|
24
|
+
':block/heading': 2,
|
|
25
|
+
};
|
|
26
|
+
const block = parseExistingBlock(roamBlock);
|
|
27
|
+
expect(block.heading).toBe(2);
|
|
28
|
+
});
|
|
29
|
+
it('parses nested children', () => {
|
|
30
|
+
const roamBlock = {
|
|
31
|
+
':block/uid': 'parent',
|
|
32
|
+
':block/string': 'Parent',
|
|
33
|
+
':block/order': 0,
|
|
34
|
+
':block/children': [
|
|
35
|
+
{
|
|
36
|
+
':block/uid': 'child1',
|
|
37
|
+
':block/string': 'Child 1',
|
|
38
|
+
':block/order': 0,
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
':block/uid': 'child2',
|
|
42
|
+
':block/string': 'Child 2',
|
|
43
|
+
':block/order': 1,
|
|
44
|
+
},
|
|
45
|
+
],
|
|
46
|
+
};
|
|
47
|
+
const block = parseExistingBlock(roamBlock);
|
|
48
|
+
expect(block.children.length).toBe(2);
|
|
49
|
+
expect(block.children[0].uid).toBe('child1');
|
|
50
|
+
expect(block.children[0].parentUid).toBe('parent');
|
|
51
|
+
expect(block.children[1].uid).toBe('child2');
|
|
52
|
+
expect(block.children[1].parentUid).toBe('parent');
|
|
53
|
+
});
|
|
54
|
+
it('sorts children by order', () => {
|
|
55
|
+
const roamBlock = {
|
|
56
|
+
':block/uid': 'parent',
|
|
57
|
+
':block/string': 'Parent',
|
|
58
|
+
':block/order': 0,
|
|
59
|
+
':block/children': [
|
|
60
|
+
{ ':block/uid': 'c', ':block/string': 'C', ':block/order': 2 },
|
|
61
|
+
{ ':block/uid': 'a', ':block/string': 'A', ':block/order': 0 },
|
|
62
|
+
{ ':block/uid': 'b', ':block/string': 'B', ':block/order': 1 },
|
|
63
|
+
],
|
|
64
|
+
};
|
|
65
|
+
const block = parseExistingBlock(roamBlock);
|
|
66
|
+
expect(block.children.map((c) => c.uid)).toEqual(['a', 'b', 'c']);
|
|
67
|
+
});
|
|
68
|
+
it('handles missing properties gracefully', () => {
|
|
69
|
+
const roamBlock = {};
|
|
70
|
+
const block = parseExistingBlock(roamBlock);
|
|
71
|
+
expect(block.uid).toBe('');
|
|
72
|
+
expect(block.text).toBe('');
|
|
73
|
+
expect(block.order).toBe(0);
|
|
74
|
+
expect(block.heading).toBeNull();
|
|
75
|
+
});
|
|
76
|
+
});
|
|
77
|
+
describe('parseExistingBlocks', () => {
|
|
78
|
+
it('parses page children into blocks', () => {
|
|
79
|
+
const pageData = {
|
|
80
|
+
':block/uid': 'page123',
|
|
81
|
+
':block/children': [
|
|
82
|
+
{ ':block/uid': 'b1', ':block/string': 'First', ':block/order': 0 },
|
|
83
|
+
{ ':block/uid': 'b2', ':block/string': 'Second', ':block/order': 1 },
|
|
84
|
+
],
|
|
85
|
+
};
|
|
86
|
+
const blocks = parseExistingBlocks(pageData);
|
|
87
|
+
expect(blocks.length).toBe(2);
|
|
88
|
+
expect(blocks[0].text).toBe('First');
|
|
89
|
+
expect(blocks[1].text).toBe('Second');
|
|
90
|
+
expect(blocks[0].parentUid).toBeNull();
|
|
91
|
+
});
|
|
92
|
+
it('returns empty array for page with no children', () => {
|
|
93
|
+
const pageData = { ':block/uid': 'page123' };
|
|
94
|
+
const blocks = parseExistingBlocks(pageData);
|
|
95
|
+
expect(blocks).toEqual([]);
|
|
96
|
+
});
|
|
97
|
+
});
|
|
98
|
+
describe('flattenExistingBlocks', () => {
|
|
99
|
+
it('flattens nested blocks into array', () => {
|
|
100
|
+
const blocks = [
|
|
101
|
+
{
|
|
102
|
+
uid: 'a',
|
|
103
|
+
text: 'A',
|
|
104
|
+
order: 0,
|
|
105
|
+
heading: null,
|
|
106
|
+
parentUid: null,
|
|
107
|
+
children: [
|
|
108
|
+
{
|
|
109
|
+
uid: 'a1',
|
|
110
|
+
text: 'A1',
|
|
111
|
+
order: 0,
|
|
112
|
+
heading: null,
|
|
113
|
+
parentUid: 'a',
|
|
114
|
+
children: [],
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
uid: 'a2',
|
|
118
|
+
text: 'A2',
|
|
119
|
+
order: 1,
|
|
120
|
+
heading: null,
|
|
121
|
+
parentUid: 'a',
|
|
122
|
+
children: [],
|
|
123
|
+
},
|
|
124
|
+
],
|
|
125
|
+
},
|
|
126
|
+
{
|
|
127
|
+
uid: 'b',
|
|
128
|
+
text: 'B',
|
|
129
|
+
order: 1,
|
|
130
|
+
heading: null,
|
|
131
|
+
parentUid: null,
|
|
132
|
+
children: [],
|
|
133
|
+
},
|
|
134
|
+
];
|
|
135
|
+
const flat = flattenExistingBlocks(blocks);
|
|
136
|
+
expect(flat.map((b) => b.uid)).toEqual(['a', 'a1', 'a2', 'b']);
|
|
137
|
+
});
|
|
138
|
+
it('preserves depth-first order', () => {
|
|
139
|
+
const blocks = [
|
|
140
|
+
{
|
|
141
|
+
uid: 'root',
|
|
142
|
+
text: 'Root',
|
|
143
|
+
order: 0,
|
|
144
|
+
heading: null,
|
|
145
|
+
parentUid: null,
|
|
146
|
+
children: [
|
|
147
|
+
{
|
|
148
|
+
uid: 'child1',
|
|
149
|
+
text: 'Child 1',
|
|
150
|
+
order: 0,
|
|
151
|
+
heading: null,
|
|
152
|
+
parentUid: 'root',
|
|
153
|
+
children: [
|
|
154
|
+
{
|
|
155
|
+
uid: 'grandchild',
|
|
156
|
+
text: 'Grandchild',
|
|
157
|
+
order: 0,
|
|
158
|
+
heading: null,
|
|
159
|
+
parentUid: 'child1',
|
|
160
|
+
children: [],
|
|
161
|
+
},
|
|
162
|
+
],
|
|
163
|
+
},
|
|
164
|
+
{
|
|
165
|
+
uid: 'child2',
|
|
166
|
+
text: 'Child 2',
|
|
167
|
+
order: 1,
|
|
168
|
+
heading: null,
|
|
169
|
+
parentUid: 'root',
|
|
170
|
+
children: [],
|
|
171
|
+
},
|
|
172
|
+
],
|
|
173
|
+
},
|
|
174
|
+
];
|
|
175
|
+
const flat = flattenExistingBlocks(blocks);
|
|
176
|
+
expect(flat.map((b) => b.uid)).toEqual([
|
|
177
|
+
'root',
|
|
178
|
+
'child1',
|
|
179
|
+
'grandchild',
|
|
180
|
+
'child2',
|
|
181
|
+
]);
|
|
182
|
+
});
|
|
183
|
+
it('returns empty array for empty input', () => {
|
|
184
|
+
expect(flattenExistingBlocks([])).toEqual([]);
|
|
185
|
+
});
|
|
186
|
+
});
|
|
187
|
+
describe('markdownToBlocks', () => {
|
|
188
|
+
const pageUid = 'page123';
|
|
189
|
+
it('converts simple markdown to blocks', () => {
|
|
190
|
+
const markdown = `- First item
|
|
191
|
+
- Second item`;
|
|
192
|
+
const blocks = markdownToBlocks(markdown, pageUid);
|
|
193
|
+
expect(blocks.length).toBe(2);
|
|
194
|
+
expect(blocks[0].text).toBe('First item');
|
|
195
|
+
expect(blocks[1].text).toBe('Second item');
|
|
196
|
+
expect(blocks[0].parentRef?.blockUid).toBe(pageUid);
|
|
197
|
+
expect(blocks[1].parentRef?.blockUid).toBe(pageUid);
|
|
198
|
+
});
|
|
199
|
+
it('handles nested markdown', () => {
|
|
200
|
+
const markdown = `- Parent
|
|
201
|
+
- Child`;
|
|
202
|
+
const blocks = markdownToBlocks(markdown, pageUid);
|
|
203
|
+
expect(blocks.length).toBe(2);
|
|
204
|
+
const parentBlock = blocks.find((b) => b.text === 'Parent');
|
|
205
|
+
const childBlock = blocks.find((b) => b.text === 'Child');
|
|
206
|
+
expect(parentBlock).toBeDefined();
|
|
207
|
+
expect(childBlock).toBeDefined();
|
|
208
|
+
expect(childBlock?.parentRef?.blockUid).toBe(parentBlock?.ref.blockUid);
|
|
209
|
+
});
|
|
210
|
+
it('preserves heading levels', () => {
|
|
211
|
+
const markdown = `# Heading 1
|
|
212
|
+
## Heading 2
|
|
213
|
+
### Heading 3`;
|
|
214
|
+
const blocks = markdownToBlocks(markdown, pageUid);
|
|
215
|
+
expect(blocks[0].heading).toBe(1);
|
|
216
|
+
expect(blocks[1].heading).toBe(2);
|
|
217
|
+
expect(blocks[2].heading).toBe(3);
|
|
218
|
+
});
|
|
219
|
+
it('generates unique UIDs', () => {
|
|
220
|
+
const markdown = `- Item 1
|
|
221
|
+
- Item 2
|
|
222
|
+
- Item 3`;
|
|
223
|
+
const blocks = markdownToBlocks(markdown, pageUid);
|
|
224
|
+
const uids = blocks.map((b) => b.ref.blockUid);
|
|
225
|
+
expect(new Set(uids).size).toBe(3); // All unique
|
|
226
|
+
});
|
|
227
|
+
it('sets order based on sibling position', () => {
|
|
228
|
+
const markdown = `- First
|
|
229
|
+
- Second
|
|
230
|
+
- Third`;
|
|
231
|
+
const blocks = markdownToBlocks(markdown, pageUid);
|
|
232
|
+
expect(blocks[0].order).toBe(0);
|
|
233
|
+
expect(blocks[1].order).toBe(1);
|
|
234
|
+
expect(blocks[2].order).toBe(2);
|
|
235
|
+
});
|
|
236
|
+
});
|
|
237
|
+
describe('getBlockDepth', () => {
|
|
238
|
+
it('returns 0 for root blocks', () => {
|
|
239
|
+
const blocks = [
|
|
240
|
+
{
|
|
241
|
+
ref: { blockUid: 'root' },
|
|
242
|
+
text: 'Root',
|
|
243
|
+
parentRef: { blockUid: 'page' },
|
|
244
|
+
order: 0,
|
|
245
|
+
open: true,
|
|
246
|
+
heading: null,
|
|
247
|
+
},
|
|
248
|
+
];
|
|
249
|
+
expect(getBlockDepth(blocks[0], blocks)).toBe(0);
|
|
250
|
+
});
|
|
251
|
+
it('returns correct depth for nested blocks', () => {
|
|
252
|
+
const blocks = [
|
|
253
|
+
{
|
|
254
|
+
ref: { blockUid: 'parent' },
|
|
255
|
+
text: 'Parent',
|
|
256
|
+
parentRef: { blockUid: 'page' },
|
|
257
|
+
order: 0,
|
|
258
|
+
open: true,
|
|
259
|
+
heading: null,
|
|
260
|
+
},
|
|
261
|
+
{
|
|
262
|
+
ref: { blockUid: 'child' },
|
|
263
|
+
text: 'Child',
|
|
264
|
+
parentRef: { blockUid: 'parent' },
|
|
265
|
+
order: 0,
|
|
266
|
+
open: true,
|
|
267
|
+
heading: null,
|
|
268
|
+
},
|
|
269
|
+
{
|
|
270
|
+
ref: { blockUid: 'grandchild' },
|
|
271
|
+
text: 'Grandchild',
|
|
272
|
+
parentRef: { blockUid: 'child' },
|
|
273
|
+
order: 0,
|
|
274
|
+
open: true,
|
|
275
|
+
heading: null,
|
|
276
|
+
},
|
|
277
|
+
];
|
|
278
|
+
expect(getBlockDepth(blocks[1], blocks)).toBe(1);
|
|
279
|
+
expect(getBlockDepth(blocks[2], blocks)).toBe(2);
|
|
280
|
+
});
|
|
281
|
+
});
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Diff Algorithm Types
|
|
3
|
+
*
|
|
4
|
+
* Type definitions for the smart diff algorithm that computes minimal
|
|
5
|
+
* update operations when syncing markdown content to Roam.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Extract statistics from a DiffResult.
|
|
9
|
+
*/
|
|
10
|
+
export function getDiffStats(result) {
|
|
11
|
+
return {
|
|
12
|
+
creates: result.creates.length,
|
|
13
|
+
updates: result.updates.length,
|
|
14
|
+
moves: result.moves.length,
|
|
15
|
+
deletes: result.deletes.length,
|
|
16
|
+
preserved: result.preservedUids.size,
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Check if a diff result contains no changes.
|
|
21
|
+
*/
|
|
22
|
+
export function isDiffEmpty(result) {
|
|
23
|
+
return (result.creates.length === 0 &&
|
|
24
|
+
result.updates.length === 0 &&
|
|
25
|
+
result.moves.length === 0 &&
|
|
26
|
+
result.deletes.length === 0);
|
|
27
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { getDiffStats, isDiffEmpty } from './types.js';
|
|
3
|
+
function createDiffResult(createCount = 0, updateCount = 0, moveCount = 0, deleteCount = 0, preservedCount = 0) {
|
|
4
|
+
return {
|
|
5
|
+
creates: Array(createCount).fill({ action: 'create-block' }),
|
|
6
|
+
updates: Array(updateCount).fill({ action: 'update-block' }),
|
|
7
|
+
moves: Array(moveCount).fill({ action: 'move-block' }),
|
|
8
|
+
deletes: Array(deleteCount).fill({ action: 'delete-block' }),
|
|
9
|
+
preservedUids: new Set(Array(preservedCount).fill(null).map((_, i) => `uid${i}`)),
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
describe('getDiffStats', () => {
|
|
13
|
+
it('returns correct counts for all operation types', () => {
|
|
14
|
+
const diff = createDiffResult(2, 3, 1, 4, 5);
|
|
15
|
+
const stats = getDiffStats(diff);
|
|
16
|
+
expect(stats.creates).toBe(2);
|
|
17
|
+
expect(stats.updates).toBe(3);
|
|
18
|
+
expect(stats.moves).toBe(1);
|
|
19
|
+
expect(stats.deletes).toBe(4);
|
|
20
|
+
expect(stats.preserved).toBe(5);
|
|
21
|
+
});
|
|
22
|
+
it('returns zeros for empty diff', () => {
|
|
23
|
+
const diff = createDiffResult();
|
|
24
|
+
const stats = getDiffStats(diff);
|
|
25
|
+
expect(stats.creates).toBe(0);
|
|
26
|
+
expect(stats.updates).toBe(0);
|
|
27
|
+
expect(stats.moves).toBe(0);
|
|
28
|
+
expect(stats.deletes).toBe(0);
|
|
29
|
+
expect(stats.preserved).toBe(0);
|
|
30
|
+
});
|
|
31
|
+
});
|
|
32
|
+
describe('isDiffEmpty', () => {
|
|
33
|
+
it('returns true when no operations exist', () => {
|
|
34
|
+
const diff = createDiffResult(0, 0, 0, 0, 5);
|
|
35
|
+
expect(isDiffEmpty(diff)).toBe(true);
|
|
36
|
+
});
|
|
37
|
+
it('returns false when creates exist', () => {
|
|
38
|
+
const diff = createDiffResult(1);
|
|
39
|
+
expect(isDiffEmpty(diff)).toBe(false);
|
|
40
|
+
});
|
|
41
|
+
it('returns false when updates exist', () => {
|
|
42
|
+
const diff = createDiffResult(0, 1);
|
|
43
|
+
expect(isDiffEmpty(diff)).toBe(false);
|
|
44
|
+
});
|
|
45
|
+
it('returns false when moves exist', () => {
|
|
46
|
+
const diff = createDiffResult(0, 0, 1);
|
|
47
|
+
expect(isDiffEmpty(diff)).toBe(false);
|
|
48
|
+
});
|
|
49
|
+
it('returns false when deletes exist', () => {
|
|
50
|
+
const diff = createDiffResult(0, 0, 0, 1);
|
|
51
|
+
expect(isDiffEmpty(diff)).toBe(false);
|
|
52
|
+
});
|
|
53
|
+
it('ignores preserved count when checking empty', () => {
|
|
54
|
+
const diff = createDiffResult(0, 0, 0, 0, 100);
|
|
55
|
+
expect(isDiffEmpty(diff)).toBe(true);
|
|
56
|
+
});
|
|
57
|
+
});
|
|
@@ -8,17 +8,42 @@ export class BlockRefSearchHandler extends BaseSearchHandler {
|
|
|
8
8
|
this.params = params;
|
|
9
9
|
}
|
|
10
10
|
async execute() {
|
|
11
|
-
const { block_uid, page_title_uid } = this.params;
|
|
11
|
+
const { block_uid, title, page_title_uid } = this.params;
|
|
12
12
|
// Get target page UID if provided
|
|
13
13
|
let targetPageUid;
|
|
14
14
|
if (page_title_uid) {
|
|
15
15
|
targetPageUid = await SearchUtils.findPageByTitleOrUid(this.graph, page_title_uid);
|
|
16
16
|
}
|
|
17
|
-
// Build query based on whether we're searching for references to a specific block
|
|
18
|
-
// or all block references within a page/graph
|
|
17
|
+
// Build query based on whether we're searching for references to a specific block,
|
|
18
|
+
// a page title, or all block references within a page/graph
|
|
19
19
|
let queryStr;
|
|
20
20
|
let queryParams;
|
|
21
|
-
if (
|
|
21
|
+
if (title) {
|
|
22
|
+
// Search for references to a page by title using :block/refs
|
|
23
|
+
if (targetPageUid) {
|
|
24
|
+
queryStr = `[:find ?block-uid ?block-str
|
|
25
|
+
:in $ ?target-title ?page-uid
|
|
26
|
+
:where [?target :node/title ?target-title]
|
|
27
|
+
[?p :block/uid ?page-uid]
|
|
28
|
+
[?b :block/page ?p]
|
|
29
|
+
[?b :block/refs ?target]
|
|
30
|
+
[?b :block/string ?block-str]
|
|
31
|
+
[?b :block/uid ?block-uid]]`;
|
|
32
|
+
queryParams = [title, targetPageUid];
|
|
33
|
+
}
|
|
34
|
+
else {
|
|
35
|
+
queryStr = `[:find ?block-uid ?block-str ?page-title
|
|
36
|
+
:in $ ?target-title
|
|
37
|
+
:where [?target :node/title ?target-title]
|
|
38
|
+
[?b :block/refs ?target]
|
|
39
|
+
[?b :block/string ?block-str]
|
|
40
|
+
[?b :block/uid ?block-uid]
|
|
41
|
+
[?b :block/page ?p]
|
|
42
|
+
[?p :node/title ?page-title]]`;
|
|
43
|
+
queryParams = [title];
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
else if (block_uid) {
|
|
22
47
|
// Search for references to a specific block
|
|
23
48
|
if (targetPageUid) {
|
|
24
49
|
queryStr = `[:find ?block-uid ?block-str
|
|
@@ -69,9 +94,11 @@ export class BlockRefSearchHandler extends BaseSearchHandler {
|
|
|
69
94
|
const resolvedContent = await resolveRefs(this.graph, content);
|
|
70
95
|
return [uid, resolvedContent, pageTitle];
|
|
71
96
|
}));
|
|
72
|
-
const searchDescription =
|
|
73
|
-
? `referencing
|
|
74
|
-
:
|
|
97
|
+
const searchDescription = title
|
|
98
|
+
? `referencing [[${title}]]`
|
|
99
|
+
: block_uid
|
|
100
|
+
? `referencing block ((${block_uid}))`
|
|
101
|
+
: 'containing block references';
|
|
75
102
|
return SearchUtils.formatSearchResults(resolvedResults, searchDescription, !targetPageUid);
|
|
76
103
|
}
|
|
77
104
|
}
|
|
@@ -223,6 +223,13 @@ export class RoamServer {
|
|
|
223
223
|
content: [{ type: 'text', text: JSON.stringify(result, null, 2) }],
|
|
224
224
|
};
|
|
225
225
|
}
|
|
226
|
+
case 'roam_update_page_markdown': {
|
|
227
|
+
const { title, markdown, dry_run = false } = request.params.arguments;
|
|
228
|
+
const result = await this.toolHandlers.updatePageMarkdown(title, markdown, dry_run);
|
|
229
|
+
return {
|
|
230
|
+
content: [{ type: 'text', text: JSON.stringify(result, null, 2) }],
|
|
231
|
+
};
|
|
232
|
+
}
|
|
226
233
|
default:
|
|
227
234
|
throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${request.params.name}`);
|
|
228
235
|
}
|
|
@@ -6,6 +6,7 @@ import { convertToRoamActions, convertToRoamMarkdown } from '../../markdown-util
|
|
|
6
6
|
import { pageUidCache } from '../../cache/page-uid-cache.js';
|
|
7
7
|
import { buildTableActions } from './table.js';
|
|
8
8
|
import { BatchOperations } from './batch.js';
|
|
9
|
+
import { parseExistingBlocks, markdownToBlocks, diffBlockTrees, generateBatchActions, getDiffStats, isDiffEmpty, summarizeActions, } from '../../diff/index.js';
|
|
9
10
|
// Helper to get ordinal suffix for dates
|
|
10
11
|
function getOrdinalSuffix(day) {
|
|
11
12
|
if (day > 3 && day < 21)
|
|
@@ -410,4 +411,98 @@ export class PageOperations {
|
|
|
410
411
|
};
|
|
411
412
|
return `# ${title}\n\n${toMarkdown(rootBlocks)}`;
|
|
412
413
|
}
|
|
414
|
+
/**
|
|
415
|
+
* Update an existing page with new markdown content using smart diff.
|
|
416
|
+
* Preserves block UIDs where possible and generates minimal changes.
|
|
417
|
+
*
|
|
418
|
+
* @param title - Title of the page to update
|
|
419
|
+
* @param markdown - New GFM markdown content
|
|
420
|
+
* @param dryRun - If true, returns actions without executing them
|
|
421
|
+
* @returns Result with actions, stats, and preserved UIDs
|
|
422
|
+
*/
|
|
423
|
+
async updatePageMarkdown(title, markdown, dryRun = false) {
|
|
424
|
+
if (!title) {
|
|
425
|
+
throw new McpError(ErrorCode.InvalidRequest, 'title is required');
|
|
426
|
+
}
|
|
427
|
+
if (!markdown) {
|
|
428
|
+
throw new McpError(ErrorCode.InvalidRequest, 'markdown is required');
|
|
429
|
+
}
|
|
430
|
+
// 1. Fetch existing page with raw block data
|
|
431
|
+
const pageTitle = String(title).trim();
|
|
432
|
+
// Try different case variations
|
|
433
|
+
const variations = [
|
|
434
|
+
pageTitle,
|
|
435
|
+
capitalizeWords(pageTitle),
|
|
436
|
+
pageTitle.toLowerCase()
|
|
437
|
+
];
|
|
438
|
+
let pageUid = null;
|
|
439
|
+
// Check cache first
|
|
440
|
+
for (const variation of variations) {
|
|
441
|
+
const cachedUid = pageUidCache.get(variation);
|
|
442
|
+
if (cachedUid) {
|
|
443
|
+
pageUid = cachedUid;
|
|
444
|
+
break;
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
// If not cached, query the database
|
|
448
|
+
if (!pageUid) {
|
|
449
|
+
const orClause = variations.map(v => `[?e :node/title "${v}"]`).join(' ');
|
|
450
|
+
const searchQuery = `[:find ?uid .
|
|
451
|
+
:where [?e :block/uid ?uid]
|
|
452
|
+
(or ${orClause})]`;
|
|
453
|
+
const result = await q(this.graph, searchQuery, []);
|
|
454
|
+
pageUid = (result === null || result === undefined) ? null : String(result);
|
|
455
|
+
if (pageUid) {
|
|
456
|
+
pageUidCache.set(pageTitle, pageUid);
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
if (!pageUid) {
|
|
460
|
+
throw new McpError(ErrorCode.InvalidRequest, `Page with title "${title}" not found`);
|
|
461
|
+
}
|
|
462
|
+
// 2. Fetch existing blocks with full hierarchy
|
|
463
|
+
const blocksQuery = `[:find (pull ?page [
|
|
464
|
+
:block/uid
|
|
465
|
+
:block/string
|
|
466
|
+
:block/order
|
|
467
|
+
:block/heading
|
|
468
|
+
{:block/children ...}
|
|
469
|
+
]) .
|
|
470
|
+
:where [?page :block/uid "${pageUid}"]]`;
|
|
471
|
+
const pageData = await q(this.graph, blocksQuery, []);
|
|
472
|
+
if (!pageData) {
|
|
473
|
+
throw new McpError(ErrorCode.InternalError, `Failed to fetch page data for "${title}"`);
|
|
474
|
+
}
|
|
475
|
+
// 3. Parse existing blocks into our format
|
|
476
|
+
const existingBlocks = parseExistingBlocks(pageData);
|
|
477
|
+
// 4. Convert new markdown to block structure
|
|
478
|
+
const newBlocks = markdownToBlocks(markdown, pageUid);
|
|
479
|
+
// 5. Compute diff
|
|
480
|
+
const diff = diffBlockTrees(existingBlocks, newBlocks, pageUid);
|
|
481
|
+
// 6. Generate ordered batch actions
|
|
482
|
+
const actions = generateBatchActions(diff);
|
|
483
|
+
const stats = getDiffStats(diff);
|
|
484
|
+
const summary = isDiffEmpty(diff) ? 'No changes needed' : summarizeActions(actions);
|
|
485
|
+
// 7. Execute if not dry run and there are actions
|
|
486
|
+
if (!dryRun && actions.length > 0) {
|
|
487
|
+
try {
|
|
488
|
+
const batchResult = await batchActions(this.graph, {
|
|
489
|
+
action: 'batch-actions',
|
|
490
|
+
actions: actions
|
|
491
|
+
});
|
|
492
|
+
if (!batchResult) {
|
|
493
|
+
throw new Error('Batch actions returned no result');
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
catch (error) {
|
|
497
|
+
throw new McpError(ErrorCode.InternalError, `Failed to apply changes: ${error instanceof Error ? error.message : String(error)}`);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
return {
|
|
501
|
+
success: true,
|
|
502
|
+
actions,
|
|
503
|
+
stats,
|
|
504
|
+
preservedUids: [...diff.preservedUids],
|
|
505
|
+
summary: dryRun ? `[DRY RUN] ${summary}` : summary
|
|
506
|
+
};
|
|
507
|
+
}
|
|
413
508
|
}
|
package/build/tools/schemas.js
CHANGED
|
@@ -259,13 +259,17 @@ export const toolSchemas = {
|
|
|
259
259
|
},
|
|
260
260
|
roam_search_block_refs: {
|
|
261
261
|
name: 'roam_search_block_refs',
|
|
262
|
-
description: 'Search for block references within a page or across the entire graph. Can search for references to a specific block or find all block references.',
|
|
262
|
+
description: 'Search for block references within a page or across the entire graph. Can search for references to a specific block, a page title, or find all block references.',
|
|
263
263
|
inputSchema: {
|
|
264
264
|
type: 'object',
|
|
265
265
|
properties: {
|
|
266
266
|
block_uid: {
|
|
267
267
|
type: 'string',
|
|
268
|
-
description: 'Optional: UID of the block to find references to'
|
|
268
|
+
description: 'Optional: UID of the block to find references to (searches for ((uid)) patterns in text)'
|
|
269
|
+
},
|
|
270
|
+
title: {
|
|
271
|
+
type: 'string',
|
|
272
|
+
description: 'Optional: Page title to find references to (uses :block/refs for [[page]] and #tag links)'
|
|
269
273
|
},
|
|
270
274
|
page_title_uid: {
|
|
271
275
|
type: 'string',
|
|
@@ -612,4 +616,27 @@ export const toolSchemas = {
|
|
|
612
616
|
required: ['parent_uid', 'headers', 'rows']
|
|
613
617
|
}
|
|
614
618
|
},
|
|
619
|
+
roam_update_page_markdown: {
|
|
620
|
+
name: 'roam_update_page_markdown',
|
|
621
|
+
description: 'Update an existing page with new markdown content using smart diff. Preserves block UIDs where possible and generates minimal changes. This is ideal for:\n- Syncing external markdown files to Roam\n- AI-assisted content updates that preserve references\n- Batch content modifications without losing block references\n\n**How it works:**\n1. Fetches existing page blocks\n2. Matches new content to existing blocks by text similarity\n3. Generates minimal create/update/move/delete operations\n4. Preserves UIDs for matched blocks (keeping references intact)\n\nIMPORTANT: Before using this tool, ensure that you have loaded into context the \'Roam Markdown Cheatsheet\' resource.',
|
|
622
|
+
inputSchema: {
|
|
623
|
+
type: 'object',
|
|
624
|
+
properties: {
|
|
625
|
+
title: {
|
|
626
|
+
type: 'string',
|
|
627
|
+
description: 'Title of the page to update'
|
|
628
|
+
},
|
|
629
|
+
markdown: {
|
|
630
|
+
type: 'string',
|
|
631
|
+
description: 'New GFM markdown content for the page'
|
|
632
|
+
},
|
|
633
|
+
dry_run: {
|
|
634
|
+
type: 'boolean',
|
|
635
|
+
description: 'If true, returns the planned actions without executing them. Useful for previewing changes.',
|
|
636
|
+
default: false
|
|
637
|
+
}
|
|
638
|
+
},
|
|
639
|
+
required: ['title', 'markdown']
|
|
640
|
+
}
|
|
641
|
+
},
|
|
615
642
|
};
|
|
@@ -89,6 +89,10 @@ export class ToolHandlers {
|
|
|
89
89
|
async createTable(input) {
|
|
90
90
|
return this.tableOps.createTable(input);
|
|
91
91
|
}
|
|
92
|
+
// Page Update with Diff
|
|
93
|
+
async updatePageMarkdown(title, markdown, dryRun = false) {
|
|
94
|
+
return this.pageOps.updatePageMarkdown(title, markdown, dryRun);
|
|
95
|
+
}
|
|
92
96
|
async getRoamMarkdownCheatsheet() {
|
|
93
97
|
if (this.cachedCheatsheet) {
|
|
94
98
|
return this.cachedCheatsheet;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "roam-research-mcp",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.6.0",
|
|
4
4
|
"description": "A Model Context Protocol (MCP) server for Roam Research API integration",
|
|
5
5
|
"private": false,
|
|
6
6
|
"repository": {
|
|
@@ -23,13 +23,13 @@
|
|
|
23
23
|
"type": "module",
|
|
24
24
|
"bin": {
|
|
25
25
|
"roam-research-mcp": "build/index.js",
|
|
26
|
-
"roam
|
|
26
|
+
"roam": "build/cli/roam.js"
|
|
27
27
|
},
|
|
28
28
|
"files": [
|
|
29
29
|
"build"
|
|
30
30
|
],
|
|
31
31
|
"scripts": {
|
|
32
|
-
"build": "echo \"Using custom instructions: .roam/${CUSTOM_INSTRUCTIONS_PREFIX}custom-instructions.md\" && tsc && cat Roam_Markdown_Cheatsheet.md .roam/${CUSTOM_INSTRUCTIONS_PREFIX}custom-instructions.md > build/Roam_Markdown_Cheatsheet.md && chmod 755 build/index.js build/cli/
|
|
32
|
+
"build": "echo \"Using custom instructions: .roam/${CUSTOM_INSTRUCTIONS_PREFIX}custom-instructions.md\" && tsc && cat Roam_Markdown_Cheatsheet.md .roam/${CUSTOM_INSTRUCTIONS_PREFIX}custom-instructions.md > build/Roam_Markdown_Cheatsheet.md && chmod 755 build/index.js build/cli/roam.js",
|
|
33
33
|
"clean": "rm -rf build",
|
|
34
34
|
"watch": "tsc --watch",
|
|
35
35
|
"inspector": "npx @modelcontextprotocol/inspector build/index.js",
|
|
@@ -37,16 +37,20 @@
|
|
|
37
37
|
"prepublishOnly": "npm run clean && npm run build",
|
|
38
38
|
"release:patch": "npm version patch && git push origin v$(node -p \"require('./package.json').version\")",
|
|
39
39
|
"release:minor": "npm version minor && git push origin v$(node -p \"require('./package.json').version\")",
|
|
40
|
-
"release:major": "npm version major && git push origin v$(node -p \"require('./package.json').version\")"
|
|
40
|
+
"release:major": "npm version major && git push origin v$(node -p \"require('./package.json').version\")",
|
|
41
|
+
"test": "vitest run",
|
|
42
|
+
"test:watch": "vitest"
|
|
41
43
|
},
|
|
42
44
|
"dependencies": {
|
|
43
45
|
"@modelcontextprotocol/sdk": "^1.13.2",
|
|
44
46
|
"@roam-research/roam-api-sdk": "^0.10.0",
|
|
47
|
+
"commander": "^14.0.2",
|
|
45
48
|
"dotenv": "^16.4.7"
|
|
46
49
|
},
|
|
47
50
|
"devDependencies": {
|
|
48
51
|
"@types/node": "^20.11.24",
|
|
49
52
|
"ts-node": "^10.9.2",
|
|
50
|
-
"typescript": "^5.3.3"
|
|
53
|
+
"typescript": "^5.3.3",
|
|
54
|
+
"vitest": "^3.2.4"
|
|
51
55
|
}
|
|
52
56
|
}
|