roam-research-mcp 2.4.0 → 2.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +175 -667
- package/build/Roam_Markdown_Cheatsheet.md +138 -289
- package/build/cache/page-uid-cache.js +40 -2
- package/build/cli/batch/translator.js +1 -1
- package/build/cli/commands/batch.js +3 -8
- package/build/cli/commands/get.js +478 -60
- package/build/cli/commands/refs.js +51 -31
- package/build/cli/commands/save.js +61 -10
- package/build/cli/commands/search.js +63 -58
- package/build/cli/commands/status.js +3 -4
- package/build/cli/commands/update.js +71 -28
- package/build/cli/utils/graph.js +6 -2
- package/build/cli/utils/input.js +10 -0
- package/build/cli/utils/output.js +28 -5
- package/build/cli/utils/sort-group.js +110 -0
- package/build/config/graph-registry.js +31 -13
- package/build/config/graph-registry.test.js +42 -5
- package/build/markdown-utils.js +114 -4
- package/build/markdown-utils.test.js +125 -0
- package/build/query/generator.js +330 -0
- package/build/query/index.js +149 -0
- package/build/query/parser.js +319 -0
- package/build/query/parser.test.js +389 -0
- package/build/query/types.js +4 -0
- package/build/search/ancestor-rule.js +14 -0
- package/build/search/block-ref-search.js +1 -5
- package/build/search/hierarchy-search.js +5 -12
- package/build/search/index.js +1 -0
- package/build/search/status-search.js +10 -9
- package/build/search/tag-search.js +8 -24
- package/build/search/text-search.js +70 -27
- package/build/search/types.js +13 -0
- package/build/search/utils.js +71 -2
- package/build/server/roam-server.js +4 -3
- package/build/shared/index.js +2 -0
- package/build/shared/page-validator.js +233 -0
- package/build/shared/page-validator.test.js +128 -0
- package/build/shared/staged-batch.js +144 -0
- package/build/tools/helpers/batch-utils.js +57 -0
- package/build/tools/helpers/page-resolution.js +136 -0
- package/build/tools/helpers/refs.js +68 -0
- package/build/tools/operations/batch.js +75 -3
- package/build/tools/operations/block-retrieval.js +15 -4
- package/build/tools/operations/block-retrieval.test.js +87 -0
- package/build/tools/operations/blocks.js +1 -288
- package/build/tools/operations/memory.js +32 -90
- package/build/tools/operations/outline.js +38 -156
- package/build/tools/operations/pages.js +169 -122
- package/build/tools/operations/todos.js +5 -37
- package/build/tools/schemas.js +20 -9
- package/build/tools/tool-handlers.js +4 -4
- package/build/utils/helpers.js +27 -0
- package/package.json +1 -1
|
@@ -0,0 +1,389 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { QueryParser, QueryParseError } from './parser.js';
|
|
3
|
+
import { DatalogGenerator, buildDatalogQuery } from './generator.js';
|
|
4
|
+
describe('QueryParser', () => {
|
|
5
|
+
describe('tag parsing', () => {
|
|
6
|
+
it('parses a single tag', () => {
|
|
7
|
+
const result = QueryParser.parse('[[Project]]');
|
|
8
|
+
expect(result).toEqual({ type: 'tag', value: 'Project' });
|
|
9
|
+
});
|
|
10
|
+
it('parses a tag with spaces', () => {
|
|
11
|
+
const result = QueryParser.parse('[[My Project]]');
|
|
12
|
+
expect(result).toEqual({ type: 'tag', value: 'My Project' });
|
|
13
|
+
});
|
|
14
|
+
it('parses a namespaced tag', () => {
|
|
15
|
+
const result = QueryParser.parse('[[Convention/Roam]]');
|
|
16
|
+
expect(result).toEqual({ type: 'tag', value: 'Convention/Roam' });
|
|
17
|
+
});
|
|
18
|
+
});
|
|
19
|
+
describe('and operator', () => {
|
|
20
|
+
it('parses and with two tags', () => {
|
|
21
|
+
const result = QueryParser.parse('{and: [[tag1]] [[tag2]]}');
|
|
22
|
+
expect(result).toEqual({
|
|
23
|
+
type: 'and',
|
|
24
|
+
children: [
|
|
25
|
+
{ type: 'tag', value: 'tag1' },
|
|
26
|
+
{ type: 'tag', value: 'tag2' }
|
|
27
|
+
]
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
it('parses and with multiple tags', () => {
|
|
31
|
+
const result = QueryParser.parse('{and: [[a]] [[b]] [[c]]}');
|
|
32
|
+
expect(result).toEqual({
|
|
33
|
+
type: 'and',
|
|
34
|
+
children: [
|
|
35
|
+
{ type: 'tag', value: 'a' },
|
|
36
|
+
{ type: 'tag', value: 'b' },
|
|
37
|
+
{ type: 'tag', value: 'c' }
|
|
38
|
+
]
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
});
|
|
42
|
+
describe('or operator', () => {
|
|
43
|
+
it('parses or with two tags', () => {
|
|
44
|
+
const result = QueryParser.parse('{or: [[tag1]] [[tag2]]}');
|
|
45
|
+
expect(result).toEqual({
|
|
46
|
+
type: 'or',
|
|
47
|
+
children: [
|
|
48
|
+
{ type: 'tag', value: 'tag1' },
|
|
49
|
+
{ type: 'tag', value: 'tag2' }
|
|
50
|
+
]
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
});
|
|
54
|
+
describe('not operator', () => {
|
|
55
|
+
it('parses not with a tag', () => {
|
|
56
|
+
const result = QueryParser.parse('{not: [[excluded]]}');
|
|
57
|
+
expect(result).toEqual({
|
|
58
|
+
type: 'not',
|
|
59
|
+
child: { type: 'tag', value: 'excluded' }
|
|
60
|
+
});
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
describe('between operator', () => {
|
|
64
|
+
it('parses between with two dates', () => {
|
|
65
|
+
const result = QueryParser.parse('{between: [[January 1st, 2026]] [[January 31st, 2026]]}');
|
|
66
|
+
expect(result).toEqual({
|
|
67
|
+
type: 'between',
|
|
68
|
+
startDate: 'January 1st, 2026',
|
|
69
|
+
endDate: 'January 31st, 2026'
|
|
70
|
+
});
|
|
71
|
+
});
|
|
72
|
+
it('parses between with relative dates', () => {
|
|
73
|
+
const result = QueryParser.parse('{between: [[last month]] [[last week]]}');
|
|
74
|
+
expect(result).toEqual({
|
|
75
|
+
type: 'between',
|
|
76
|
+
startDate: 'last month',
|
|
77
|
+
endDate: 'last week'
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
describe('nested queries', () => {
|
|
82
|
+
it('parses and containing or', () => {
|
|
83
|
+
const result = QueryParser.parse('{and: {or: [[a]] [[b]]} [[c]]}');
|
|
84
|
+
expect(result).toEqual({
|
|
85
|
+
type: 'and',
|
|
86
|
+
children: [
|
|
87
|
+
{
|
|
88
|
+
type: 'or',
|
|
89
|
+
children: [
|
|
90
|
+
{ type: 'tag', value: 'a' },
|
|
91
|
+
{ type: 'tag', value: 'b' }
|
|
92
|
+
]
|
|
93
|
+
},
|
|
94
|
+
{ type: 'tag', value: 'c' }
|
|
95
|
+
]
|
|
96
|
+
});
|
|
97
|
+
});
|
|
98
|
+
it('parses and with not', () => {
|
|
99
|
+
const result = QueryParser.parse('{and: [[include]] {not: [[exclude]]}}');
|
|
100
|
+
expect(result).toEqual({
|
|
101
|
+
type: 'and',
|
|
102
|
+
children: [
|
|
103
|
+
{ type: 'tag', value: 'include' },
|
|
104
|
+
{
|
|
105
|
+
type: 'not',
|
|
106
|
+
child: { type: 'tag', value: 'exclude' }
|
|
107
|
+
}
|
|
108
|
+
]
|
|
109
|
+
});
|
|
110
|
+
});
|
|
111
|
+
it('parses complex nested query', () => {
|
|
112
|
+
const result = QueryParser.parse('{and: {or: [[Project]] [[Task]]} {not: [[Archive]]} [[TODO]]}');
|
|
113
|
+
expect(result).toEqual({
|
|
114
|
+
type: 'and',
|
|
115
|
+
children: [
|
|
116
|
+
{
|
|
117
|
+
type: 'or',
|
|
118
|
+
children: [
|
|
119
|
+
{ type: 'tag', value: 'Project' },
|
|
120
|
+
{ type: 'tag', value: 'Task' }
|
|
121
|
+
]
|
|
122
|
+
},
|
|
123
|
+
{
|
|
124
|
+
type: 'not',
|
|
125
|
+
child: { type: 'tag', value: 'Archive' }
|
|
126
|
+
},
|
|
127
|
+
{ type: 'tag', value: 'TODO' }
|
|
128
|
+
]
|
|
129
|
+
});
|
|
130
|
+
});
|
|
131
|
+
});
|
|
132
|
+
describe('full query block format', () => {
|
|
133
|
+
it('parses {{[[query]]: ...}} format', () => {
|
|
134
|
+
const result = QueryParser.parse('{{[[query]]: {and: [[Convention/Roam]] [[Roam]]}}}');
|
|
135
|
+
expect(result).toEqual({
|
|
136
|
+
type: 'and',
|
|
137
|
+
children: [
|
|
138
|
+
{ type: 'tag', value: 'Convention/Roam' },
|
|
139
|
+
{ type: 'tag', value: 'Roam' }
|
|
140
|
+
]
|
|
141
|
+
});
|
|
142
|
+
});
|
|
143
|
+
it('handles whitespace in query block', () => {
|
|
144
|
+
const result = QueryParser.parse('{{[[query]]: {and: [[a]] [[b]]} }}');
|
|
145
|
+
expect(result).toEqual({
|
|
146
|
+
type: 'and',
|
|
147
|
+
children: [
|
|
148
|
+
{ type: 'tag', value: 'a' },
|
|
149
|
+
{ type: 'tag', value: 'b' }
|
|
150
|
+
]
|
|
151
|
+
});
|
|
152
|
+
});
|
|
153
|
+
});
|
|
154
|
+
describe('named queries', () => {
|
|
155
|
+
it('parses named query with parseWithName', () => {
|
|
156
|
+
const result = QueryParser.parseWithName('{{[[query]]: "My Query Name" {and: [[a]] [[b]]}}}');
|
|
157
|
+
expect(result.name).toBe('My Query Name');
|
|
158
|
+
expect(result.query).toEqual({
|
|
159
|
+
type: 'and',
|
|
160
|
+
children: [
|
|
161
|
+
{ type: 'tag', value: 'a' },
|
|
162
|
+
{ type: 'tag', value: 'b' }
|
|
163
|
+
]
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
it('returns undefined name for unnamed query', () => {
|
|
167
|
+
const result = QueryParser.parseWithName('{{[[query]]: {and: [[a]] [[b]]}}}');
|
|
168
|
+
expect(result.name).toBeUndefined();
|
|
169
|
+
expect(result.query.type).toBe('and');
|
|
170
|
+
});
|
|
171
|
+
it('parse() ignores name and returns just the query', () => {
|
|
172
|
+
const result = QueryParser.parse('{{[[query]]: "Named Query" [[tag]]}}');
|
|
173
|
+
expect(result).toEqual({ type: 'tag', value: 'tag' });
|
|
174
|
+
});
|
|
175
|
+
it('handles escaped quotes in name', () => {
|
|
176
|
+
const result = QueryParser.parseWithName('{search: test}');
|
|
177
|
+
// Simple query without name
|
|
178
|
+
expect(result.name).toBeUndefined();
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
describe('block reference', () => {
|
|
182
|
+
it('parses block ref', () => {
|
|
183
|
+
const result = QueryParser.parse('((abc123def))');
|
|
184
|
+
expect(result).toEqual({ type: 'block-ref', uid: 'abc123def' });
|
|
185
|
+
});
|
|
186
|
+
it('parses block ref in and', () => {
|
|
187
|
+
const result = QueryParser.parse('{and: [[tag]] ((uid123))}');
|
|
188
|
+
expect(result).toEqual({
|
|
189
|
+
type: 'and',
|
|
190
|
+
children: [
|
|
191
|
+
{ type: 'tag', value: 'tag' },
|
|
192
|
+
{ type: 'block-ref', uid: 'uid123' }
|
|
193
|
+
]
|
|
194
|
+
});
|
|
195
|
+
});
|
|
196
|
+
});
|
|
197
|
+
describe('search operator', () => {
|
|
198
|
+
it('parses search with unquoted text', () => {
|
|
199
|
+
const result = QueryParser.parse('{search: hello world}');
|
|
200
|
+
expect(result).toEqual({ type: 'search', text: 'hello world' });
|
|
201
|
+
});
|
|
202
|
+
it('parses search with quoted text', () => {
|
|
203
|
+
const result = QueryParser.parse('{search: "exact phrase"}');
|
|
204
|
+
expect(result).toEqual({ type: 'search', text: 'exact phrase' });
|
|
205
|
+
});
|
|
206
|
+
});
|
|
207
|
+
describe('daily notes operator', () => {
|
|
208
|
+
it('parses daily notes', () => {
|
|
209
|
+
const result = QueryParser.parse('{daily notes: }');
|
|
210
|
+
expect(result).toEqual({ type: 'daily-notes' });
|
|
211
|
+
});
|
|
212
|
+
it('parses daily notes in and', () => {
|
|
213
|
+
const result = QueryParser.parse('{and: {daily notes: } [[tag]]}');
|
|
214
|
+
expect(result).toEqual({
|
|
215
|
+
type: 'and',
|
|
216
|
+
children: [
|
|
217
|
+
{ type: 'daily-notes' },
|
|
218
|
+
{ type: 'tag', value: 'tag' }
|
|
219
|
+
]
|
|
220
|
+
});
|
|
221
|
+
});
|
|
222
|
+
});
|
|
223
|
+
describe('by operator', () => {
|
|
224
|
+
it('parses by with page ref', () => {
|
|
225
|
+
const result = QueryParser.parse('{by: [[PAI System User]]}');
|
|
226
|
+
expect(result).toEqual({ type: 'by', user: 'PAI System User' });
|
|
227
|
+
});
|
|
228
|
+
it('parses by in complex query', () => {
|
|
229
|
+
const result = QueryParser.parse('{{[[query]]: "Test Query" {and: [[Test/CreatePage Fix Verification]] {by: [[PAI System User]]}}}}');
|
|
230
|
+
expect(result).toEqual({
|
|
231
|
+
type: 'and',
|
|
232
|
+
children: [
|
|
233
|
+
{ type: 'tag', value: 'Test/CreatePage Fix Verification' },
|
|
234
|
+
{ type: 'by', user: 'PAI System User' }
|
|
235
|
+
]
|
|
236
|
+
});
|
|
237
|
+
});
|
|
238
|
+
});
|
|
239
|
+
describe('created by operator', () => {
|
|
240
|
+
it('parses created by with plain text', () => {
|
|
241
|
+
const result = QueryParser.parse('{created by: John Doe}');
|
|
242
|
+
expect(result).toEqual({ type: 'created-by', user: 'John Doe' });
|
|
243
|
+
});
|
|
244
|
+
it('parses created by with page ref', () => {
|
|
245
|
+
const result = QueryParser.parse('{created by: [[John Doe]]}');
|
|
246
|
+
expect(result).toEqual({ type: 'created-by', user: 'John Doe' });
|
|
247
|
+
});
|
|
248
|
+
});
|
|
249
|
+
describe('edited by operator', () => {
|
|
250
|
+
it('parses edited by with plain text', () => {
|
|
251
|
+
const result = QueryParser.parse('{edited by: Jane Smith}');
|
|
252
|
+
expect(result).toEqual({ type: 'edited-by', user: 'Jane Smith' });
|
|
253
|
+
});
|
|
254
|
+
});
|
|
255
|
+
describe('error handling', () => {
|
|
256
|
+
it('throws on unclosed tag', () => {
|
|
257
|
+
expect(() => QueryParser.parse('[[unclosed')).toThrow(QueryParseError);
|
|
258
|
+
});
|
|
259
|
+
it('throws on unknown operator', () => {
|
|
260
|
+
expect(() => QueryParser.parse('{unknown: [[tag]]}')).toThrow(QueryParseError);
|
|
261
|
+
});
|
|
262
|
+
it('throws on empty and', () => {
|
|
263
|
+
expect(() => QueryParser.parse('{and: }')).toThrow(QueryParseError);
|
|
264
|
+
});
|
|
265
|
+
});
|
|
266
|
+
});
|
|
267
|
+
describe('DatalogGenerator', () => {
|
|
268
|
+
const generator = new DatalogGenerator();
|
|
269
|
+
describe('tag generation', () => {
|
|
270
|
+
it('generates ref clauses for a tag', () => {
|
|
271
|
+
const ast = QueryParser.parse('[[Project]]');
|
|
272
|
+
const clauses = generator.generate(ast);
|
|
273
|
+
expect(clauses.where).toContain('[?ref-0 :node/title "Project"]');
|
|
274
|
+
expect(clauses.where).toContain('[?b :block/refs ?ref-0]');
|
|
275
|
+
});
|
|
276
|
+
});
|
|
277
|
+
describe('and generation', () => {
|
|
278
|
+
it('generates multiple ref clauses', () => {
|
|
279
|
+
const ast = QueryParser.parse('{and: [[tag1]] [[tag2]]}');
|
|
280
|
+
const clauses = generator.generate(ast);
|
|
281
|
+
expect(clauses.where.length).toBe(4); // 2 refs, 2 title matches
|
|
282
|
+
expect(clauses.where).toContain('[?ref-0 :node/title "tag1"]');
|
|
283
|
+
expect(clauses.where).toContain('[?ref-1 :node/title "tag2"]');
|
|
284
|
+
});
|
|
285
|
+
});
|
|
286
|
+
describe('or generation', () => {
|
|
287
|
+
it('generates or-join clause', () => {
|
|
288
|
+
const ast = QueryParser.parse('{or: [[a]] [[b]]}');
|
|
289
|
+
const clauses = generator.generate(ast);
|
|
290
|
+
expect(clauses.where.length).toBe(1);
|
|
291
|
+
expect(clauses.where[0]).toContain('or-join');
|
|
292
|
+
expect(clauses.where[0]).toContain('[?b]');
|
|
293
|
+
});
|
|
294
|
+
});
|
|
295
|
+
describe('not generation', () => {
|
|
296
|
+
it('wraps in not clause', () => {
|
|
297
|
+
const ast = QueryParser.parse('{not: [[excluded]]}');
|
|
298
|
+
const clauses = generator.generate(ast);
|
|
299
|
+
expect(clauses.where.length).toBe(1);
|
|
300
|
+
expect(clauses.where[0]).toMatch(/^\(not /);
|
|
301
|
+
});
|
|
302
|
+
});
|
|
303
|
+
describe('block-ref generation', () => {
|
|
304
|
+
it('generates block ref clauses', () => {
|
|
305
|
+
const ast = QueryParser.parse('((uid123))');
|
|
306
|
+
const clauses = generator.generate(ast);
|
|
307
|
+
expect(clauses.where).toContain('[?block-ref-0 :block/uid "uid123"]');
|
|
308
|
+
expect(clauses.where).toContain('[?b :block/refs ?block-ref-0]');
|
|
309
|
+
});
|
|
310
|
+
});
|
|
311
|
+
describe('search generation', () => {
|
|
312
|
+
it('generates search clause', () => {
|
|
313
|
+
const ast = QueryParser.parse('{search: test query}');
|
|
314
|
+
const clauses = generator.generate(ast);
|
|
315
|
+
expect(clauses.where.length).toBe(1);
|
|
316
|
+
expect(clauses.where[0]).toContain('clojure.string/includes?');
|
|
317
|
+
expect(clauses.where[0]).toContain('test query');
|
|
318
|
+
});
|
|
319
|
+
});
|
|
320
|
+
describe('daily-notes generation', () => {
|
|
321
|
+
it('generates daily notes filter', () => {
|
|
322
|
+
const ast = QueryParser.parse('{daily notes: }');
|
|
323
|
+
const clauses = generator.generate(ast);
|
|
324
|
+
expect(clauses.where.some(c => c.includes('re-find'))).toBe(true);
|
|
325
|
+
expect(clauses.where.some(c => c.includes('January|February'))).toBe(true);
|
|
326
|
+
});
|
|
327
|
+
});
|
|
328
|
+
describe('between generation with relative dates', () => {
|
|
329
|
+
it('generates between with relative dates', () => {
|
|
330
|
+
const ast = QueryParser.parse('{between: [[last month]] [[today]]}');
|
|
331
|
+
const clauses = generator.generate(ast);
|
|
332
|
+
// Should have create/time clauses
|
|
333
|
+
expect(clauses.where.some(c => c.includes(':create/time'))).toBe(true);
|
|
334
|
+
// Should have input values (timestamps)
|
|
335
|
+
expect(clauses.inputValues.length).toBe(2);
|
|
336
|
+
expect(typeof clauses.inputValues[0]).toBe('number');
|
|
337
|
+
expect(typeof clauses.inputValues[1]).toBe('number');
|
|
338
|
+
// Start should be before end
|
|
339
|
+
expect(clauses.inputValues[0]).toBeLessThan(clauses.inputValues[1]);
|
|
340
|
+
});
|
|
341
|
+
it('handles "N days ago" format', () => {
|
|
342
|
+
const ast = QueryParser.parse('{between: [[7 days ago]] [[today]]}');
|
|
343
|
+
const clauses = generator.generate(ast);
|
|
344
|
+
expect(clauses.inputValues.length).toBe(2);
|
|
345
|
+
const sevenDaysMs = 7 * 24 * 60 * 60 * 1000;
|
|
346
|
+
const diff = clauses.inputValues[1] - clauses.inputValues[0];
|
|
347
|
+
// Difference should be approximately 7 days (with some buffer for end-of-day adjustment)
|
|
348
|
+
expect(diff).toBeGreaterThanOrEqual(sevenDaysMs);
|
|
349
|
+
});
|
|
350
|
+
});
|
|
351
|
+
describe('by generation', () => {
|
|
352
|
+
it('generates by clause with or-join', () => {
|
|
353
|
+
const ast = QueryParser.parse('{by: [[Test User]]}');
|
|
354
|
+
const clauses = generator.generate(ast);
|
|
355
|
+
expect(clauses.where.length).toBe(1);
|
|
356
|
+
expect(clauses.where[0]).toContain('or-join');
|
|
357
|
+
expect(clauses.where[0]).toContain(':create/user');
|
|
358
|
+
expect(clauses.where[0]).toContain(':edit/user');
|
|
359
|
+
expect(clauses.where[0]).toContain('Test User');
|
|
360
|
+
});
|
|
361
|
+
});
|
|
362
|
+
describe('created-by generation', () => {
|
|
363
|
+
it('generates created by clause', () => {
|
|
364
|
+
const ast = QueryParser.parse('{created by: John}');
|
|
365
|
+
const clauses = generator.generate(ast);
|
|
366
|
+
expect(clauses.where.some(c => c.includes(':create/user'))).toBe(true);
|
|
367
|
+
expect(clauses.where.some(c => c.includes(':user/display-name'))).toBe(true);
|
|
368
|
+
});
|
|
369
|
+
});
|
|
370
|
+
describe('buildDatalogQuery', () => {
|
|
371
|
+
it('builds complete query string', () => {
|
|
372
|
+
const ast = QueryParser.parse('{and: [[Project]] [[TODO]]}');
|
|
373
|
+
const clauses = generator.generate(ast);
|
|
374
|
+
const { query, args } = buildDatalogQuery(clauses, { limit: 50 });
|
|
375
|
+
expect(query).toContain(':find');
|
|
376
|
+
expect(query).toContain(':where');
|
|
377
|
+
expect(query).toContain(':limit 50');
|
|
378
|
+
expect(query).toContain('[?b :block/string ?block-str]');
|
|
379
|
+
expect(args).toEqual([]);
|
|
380
|
+
});
|
|
381
|
+
it('adds page scope when pageUid provided', () => {
|
|
382
|
+
const ast = QueryParser.parse('[[tag]]');
|
|
383
|
+
const clauses = generator.generate(ast);
|
|
384
|
+
const { query, args } = buildDatalogQuery(clauses, { pageUid: 'abc123' });
|
|
385
|
+
expect(query).toContain('?target-page-uid');
|
|
386
|
+
expect(args).toContain('abc123');
|
|
387
|
+
});
|
|
388
|
+
});
|
|
389
|
+
});
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Datomic ancestor rule for traversing Roam block hierarchies.
|
|
3
|
+
* Used to find all blocks under a page or parent block.
|
|
4
|
+
*
|
|
5
|
+
* Usage in queries: Pass as second parameter (%) and use `(ancestor ?child ?parent)` in where clause.
|
|
6
|
+
* Example: `[:find ?block :in $ % ?page-uid :where (ancestor ?block ?page)]`
|
|
7
|
+
*/
|
|
8
|
+
export const ANCESTOR_RULE = `[
|
|
9
|
+
[ (ancestor ?b ?a)
|
|
10
|
+
[?a :block/children ?b] ]
|
|
11
|
+
[ (ancestor ?b ?a)
|
|
12
|
+
[?parent :block/children ?b]
|
|
13
|
+
(ancestor ?parent ?a) ]
|
|
14
|
+
]`;
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { q } from '@roam-research/roam-api-sdk';
|
|
2
2
|
import { BaseSearchHandler } from './types.js';
|
|
3
3
|
import { SearchUtils } from './utils.js';
|
|
4
|
-
import { resolveRefs } from '../tools/helpers/refs.js';
|
|
5
4
|
export class BlockRefSearchHandler extends BaseSearchHandler {
|
|
6
5
|
constructor(graph, params) {
|
|
7
6
|
super(graph);
|
|
@@ -90,10 +89,7 @@ export class BlockRefSearchHandler extends BaseSearchHandler {
|
|
|
90
89
|
}
|
|
91
90
|
const rawResults = await q(this.graph, queryStr, queryParams);
|
|
92
91
|
// Resolve block references in content
|
|
93
|
-
const resolvedResults = await
|
|
94
|
-
const resolvedContent = await resolveRefs(this.graph, content);
|
|
95
|
-
return [uid, resolvedContent, pageTitle];
|
|
96
|
-
}));
|
|
92
|
+
const resolvedResults = await this.resolveBlockRefs(rawResults);
|
|
97
93
|
const searchDescription = title
|
|
98
94
|
? `referencing [[${title}]]`
|
|
99
95
|
: block_uid
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { q } from '@roam-research/roam-api-sdk';
|
|
2
2
|
import { BaseSearchHandler } from './types.js';
|
|
3
3
|
import { SearchUtils } from './utils.js';
|
|
4
|
+
import { ANCESTOR_RULE } from './ancestor-rule.js';
|
|
4
5
|
import { resolveRefs } from '../tools/helpers/refs.js';
|
|
5
6
|
export class HierarchySearchHandler extends BaseSearchHandler {
|
|
6
7
|
constructor(graph, params) {
|
|
@@ -21,14 +22,6 @@ export class HierarchySearchHandler extends BaseSearchHandler {
|
|
|
21
22
|
if (page_title_uid) {
|
|
22
23
|
targetPageUid = await SearchUtils.findPageByTitleOrUid(this.graph, page_title_uid);
|
|
23
24
|
}
|
|
24
|
-
// Define ancestor rule for recursive traversal
|
|
25
|
-
const ancestorRule = `[
|
|
26
|
-
[ (ancestor ?child ?parent)
|
|
27
|
-
[?parent :block/children ?child] ]
|
|
28
|
-
[ (ancestor ?child ?a)
|
|
29
|
-
[?parent :block/children ?child]
|
|
30
|
-
(ancestor ?parent ?a) ]
|
|
31
|
-
]`;
|
|
32
25
|
let queryStr;
|
|
33
26
|
let queryParams;
|
|
34
27
|
if (parent_uid) {
|
|
@@ -43,7 +36,7 @@ export class HierarchySearchHandler extends BaseSearchHandler {
|
|
|
43
36
|
[?b :block/uid ?block-uid]
|
|
44
37
|
[?b :block/page ?p]
|
|
45
38
|
[(get-else $ ?b :block/path-length 1) ?depth]]`;
|
|
46
|
-
queryParams = [
|
|
39
|
+
queryParams = [ANCESTOR_RULE, parent_uid, targetPageUid];
|
|
47
40
|
}
|
|
48
41
|
else {
|
|
49
42
|
queryStr = `[:find ?block-uid ?block-str ?page-title ?depth
|
|
@@ -55,7 +48,7 @@ export class HierarchySearchHandler extends BaseSearchHandler {
|
|
|
55
48
|
[?b :block/page ?p]
|
|
56
49
|
[?p :node/title ?page-title]
|
|
57
50
|
[(get-else $ ?b :block/path-length 1) ?depth]]`;
|
|
58
|
-
queryParams = [
|
|
51
|
+
queryParams = [ANCESTOR_RULE, parent_uid];
|
|
59
52
|
}
|
|
60
53
|
}
|
|
61
54
|
else {
|
|
@@ -70,7 +63,7 @@ export class HierarchySearchHandler extends BaseSearchHandler {
|
|
|
70
63
|
[?b :block/uid ?block-uid]
|
|
71
64
|
[?b :block/page ?p]
|
|
72
65
|
[(get-else $ ?b :block/path-length 1) ?depth]]`;
|
|
73
|
-
queryParams = [
|
|
66
|
+
queryParams = [ANCESTOR_RULE, child_uid, targetPageUid];
|
|
74
67
|
}
|
|
75
68
|
else {
|
|
76
69
|
queryStr = `[:find ?block-uid ?block-str ?page-title ?depth
|
|
@@ -82,7 +75,7 @@ export class HierarchySearchHandler extends BaseSearchHandler {
|
|
|
82
75
|
[?b :block/page ?p]
|
|
83
76
|
[?p :node/title ?page-title]
|
|
84
77
|
[(get-else $ ?b :block/path-length 1) ?depth]]`;
|
|
85
|
-
queryParams = [
|
|
78
|
+
queryParams = [ANCESTOR_RULE, child_uid];
|
|
86
79
|
}
|
|
87
80
|
}
|
|
88
81
|
const rawResults = await q(this.graph, queryStr, queryParams);
|
package/build/search/index.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { q } from '@roam-research/roam-api-sdk';
|
|
2
2
|
import { BaseSearchHandler } from './types.js';
|
|
3
3
|
import { SearchUtils } from './utils.js';
|
|
4
|
-
import { resolveRefs } from '../tools/helpers/refs.js';
|
|
5
4
|
export class StatusSearchHandler extends BaseSearchHandler {
|
|
6
5
|
constructor(graph, params) {
|
|
7
6
|
super(graph);
|
|
@@ -19,31 +18,33 @@ export class StatusSearchHandler extends BaseSearchHandler {
|
|
|
19
18
|
let queryParams;
|
|
20
19
|
// Search for "{{TODO" or "{{DONE" which matches both {{[[TODO]]}} and {{TODO}} formats
|
|
21
20
|
if (targetPageUid) {
|
|
22
|
-
queryStr = `[:find ?block-uid ?block-str
|
|
21
|
+
queryStr = `[:find ?block-uid ?block-str ?page-title ?block-create-time ?block-edit-time
|
|
23
22
|
:in $ ?status ?page-uid
|
|
24
23
|
:where [?p :block/uid ?page-uid]
|
|
24
|
+
[?p :node/title ?page-title]
|
|
25
25
|
[?b :block/page ?p]
|
|
26
26
|
[?b :block/string ?block-str]
|
|
27
27
|
[?b :block/uid ?block-uid]
|
|
28
|
-
[(clojure.string/includes? ?block-str (str "{{" ?status))]
|
|
28
|
+
[(clojure.string/includes? ?block-str (str "{{" ?status))]
|
|
29
|
+
[(get-else $ ?b :create/time 0) ?block-create-time]
|
|
30
|
+
[(get-else $ ?b :edit/time 0) ?block-edit-time]]`;
|
|
29
31
|
queryParams = [status, targetPageUid];
|
|
30
32
|
}
|
|
31
33
|
else {
|
|
32
|
-
queryStr = `[:find ?block-uid ?block-str ?page-title
|
|
34
|
+
queryStr = `[:find ?block-uid ?block-str ?page-title ?block-create-time ?block-edit-time
|
|
33
35
|
:in $ ?status
|
|
34
36
|
:where [?b :block/string ?block-str]
|
|
35
37
|
[?b :block/uid ?block-uid]
|
|
36
38
|
[?b :block/page ?p]
|
|
37
39
|
[?p :node/title ?page-title]
|
|
38
|
-
[(clojure.string/includes? ?block-str (str "{{" ?status))]
|
|
40
|
+
[(clojure.string/includes? ?block-str (str "{{" ?status))]
|
|
41
|
+
[(get-else $ ?b :create/time 0) ?block-create-time]
|
|
42
|
+
[(get-else $ ?b :edit/time 0) ?block-edit-time]]`;
|
|
39
43
|
queryParams = [status];
|
|
40
44
|
}
|
|
41
45
|
const rawResults = await q(this.graph, queryStr, queryParams);
|
|
42
46
|
// Resolve block references in content
|
|
43
|
-
const resolvedResults = await
|
|
44
|
-
const resolvedContent = await resolveRefs(this.graph, content);
|
|
45
|
-
return [uid, resolvedContent, pageTitle];
|
|
46
|
-
}));
|
|
47
|
+
const resolvedResults = await this.resolveBlockRefs(rawResults);
|
|
47
48
|
return SearchUtils.formatSearchResults(resolvedResults, `with status ${status}`, !targetPageUid);
|
|
48
49
|
}
|
|
49
50
|
}
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { q } from '@roam-research/roam-api-sdk';
|
|
2
2
|
import { BaseSearchHandler } from './types.js';
|
|
3
3
|
import { SearchUtils } from './utils.js';
|
|
4
|
-
import { resolveRefs } from '../tools/helpers/refs.js';
|
|
5
4
|
export class TagSearchHandler extends BaseSearchHandler {
|
|
6
5
|
constructor(graph, params) {
|
|
7
6
|
super(graph);
|
|
@@ -38,33 +37,21 @@ export class TagSearchHandler extends BaseSearchHandler {
|
|
|
38
37
|
if (page_title_uid) {
|
|
39
38
|
targetPageUid = await SearchUtils.findPageByTitleOrUid(this.graph, page_title_uid);
|
|
40
39
|
}
|
|
41
|
-
const
|
|
42
|
-
if (case_sensitive) {
|
|
43
|
-
searchTags.push(primary_tag);
|
|
44
|
-
}
|
|
45
|
-
else {
|
|
46
|
-
searchTags.push(primary_tag);
|
|
47
|
-
searchTags.push(primary_tag.charAt(0).toUpperCase() + primary_tag.slice(1));
|
|
48
|
-
searchTags.push(primary_tag.toUpperCase());
|
|
49
|
-
searchTags.push(primary_tag.toLowerCase());
|
|
50
|
-
}
|
|
51
|
-
const tagWhereClauses = searchTags.map(tag => {
|
|
52
|
-
// Roam tags can be [[tag name]] or #tag-name or #[[tag name]]
|
|
53
|
-
// The :node/title for a tag page is just the tag name without any # or [[ ]]
|
|
54
|
-
return `[?ref-page :node/title "${tag}"]`;
|
|
55
|
-
}).join(' ');
|
|
40
|
+
const tagMatchClause = SearchUtils.buildTagMatchClause(primary_tag, '?ref-page', case_sensitive);
|
|
56
41
|
let inClause = `:in $`;
|
|
57
42
|
let queryLimit = limit === -1 ? '' : `:limit ${limit}`;
|
|
58
43
|
let queryOffset = offset === 0 ? '' : `:offset ${offset}`;
|
|
59
44
|
let queryOrder = `:order ?page-edit-time asc ?block-uid asc`; // Sort by page edit time, then block UID
|
|
60
45
|
let queryWhereClauses = `
|
|
61
|
-
|
|
46
|
+
${tagMatchClause}
|
|
62
47
|
[?b :block/refs ?ref-page]
|
|
63
48
|
[?b :block/string ?block-str]
|
|
64
49
|
[?b :block/uid ?block-uid]
|
|
65
50
|
[?b :block/page ?p]
|
|
66
51
|
[?p :node/title ?page-title]
|
|
67
|
-
[?p :edit/time ?page-edit-time]
|
|
52
|
+
[?p :edit/time ?page-edit-time]
|
|
53
|
+
[(get-else $ ?b :create/time 0) ?block-create-time]
|
|
54
|
+
[(get-else $ ?b :edit/time 0) ?block-edit-time]`; // Fetch page edit time for sorting, block timestamps for sort/group
|
|
68
55
|
if (nearTagUid) {
|
|
69
56
|
queryWhereClauses += `
|
|
70
57
|
[?b :block/refs ?near-tag-page]
|
|
@@ -80,9 +67,9 @@ export class TagSearchHandler extends BaseSearchHandler {
|
|
|
80
67
|
queryWhereClauses += `
|
|
81
68
|
[?p :block/uid ?target-page-uid]`;
|
|
82
69
|
}
|
|
83
|
-
const queryStr = `[:find ?block-uid ?block-str ?page-title
|
|
70
|
+
const queryStr = `[:find ?block-uid ?block-str ?page-title ?block-create-time ?block-edit-time
|
|
84
71
|
${inClause} ${queryLimit} ${queryOffset} ${queryOrder}
|
|
85
|
-
:where
|
|
72
|
+
:where
|
|
86
73
|
${queryWhereClauses}]`;
|
|
87
74
|
const queryArgs = [];
|
|
88
75
|
if (targetPageUid) {
|
|
@@ -97,10 +84,7 @@ export class TagSearchHandler extends BaseSearchHandler {
|
|
|
97
84
|
const totalCountResults = await q(this.graph, countQueryStr, queryArgs);
|
|
98
85
|
const totalCount = totalCountResults[0] ? totalCountResults[0][0] : 0;
|
|
99
86
|
// Resolve block references in content
|
|
100
|
-
const resolvedResults = await
|
|
101
|
-
const resolvedContent = await resolveRefs(this.graph, content);
|
|
102
|
-
return [uid, resolvedContent, pageTitle];
|
|
103
|
-
}));
|
|
87
|
+
const resolvedResults = await this.resolveBlockRefs(rawResults);
|
|
104
88
|
const searchDescription = `referencing "${primary_tag}"`;
|
|
105
89
|
const formattedResults = SearchUtils.formatSearchResults(resolvedResults, searchDescription, !targetPageUid);
|
|
106
90
|
formattedResults.total_count = totalCount;
|