@disco_trooper/apple-notes-mcp 1.3.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -4
- package/package.json +4 -2
- package/src/db/lancedb.ts +28 -0
- package/src/index.ts +127 -1
- package/src/notes/crud.test.ts +122 -1
- package/src/notes/crud.ts +207 -0
- package/src/search/chunk-indexer.ts +47 -0
- package/src/search/refresh.test.ts +173 -0
- package/src/search/refresh.ts +151 -0
package/README.md
CHANGED
|
@@ -19,12 +19,14 @@ MCP server for Apple Notes with semantic search and CRUD operations. Claude sear
|
|
|
19
19
|
- **Incremental Indexing** - Re-embed only changed notes
|
|
20
20
|
- **Dual Embedding** - Local HuggingFace or OpenRouter API
|
|
21
21
|
|
|
22
|
-
## What's New in 1.
|
|
22
|
+
## What's New in 1.4
|
|
23
23
|
|
|
24
|
-
- **
|
|
24
|
+
- **Smart Refresh** - Search auto-reindexes changed notes. No manual `index-notes` needed.
|
|
25
|
+
- **Batch Operations** - Delete or move multiple notes by title or folder.
|
|
26
|
+
- **Purge Index** - Clear all indexed data when switching models or fixing corruption.
|
|
27
|
+
- **Parent Document Retriever** - Splits long notes into 500-char chunks with 100-char overlap.
|
|
25
28
|
- **60x faster cached queries** - Query embedding cache eliminates redundant API calls.
|
|
26
|
-
- **
|
|
27
|
-
- **4-6x faster indexing** - Parallel processing and optimized chunk generation.
|
|
29
|
+
- **4-6x faster indexing** - Parallel processing and batch embeddings.
|
|
28
30
|
|
|
29
31
|
## Installation
|
|
30
32
|
|
|
@@ -178,6 +180,33 @@ title: "My Note"
|
|
|
178
180
|
folder: "Archive"
|
|
179
181
|
```
|
|
180
182
|
|
|
183
|
+
#### `batch-delete`
|
|
184
|
+
Delete multiple notes at once.
|
|
185
|
+
|
|
186
|
+
```
|
|
187
|
+
titles: ["Note 1", "Note 2"] # OR folder: "Old Project"
|
|
188
|
+
confirm: true # required for safety
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
#### `batch-move`
|
|
192
|
+
Move multiple notes to a target folder.
|
|
193
|
+
|
|
194
|
+
```
|
|
195
|
+
titles: ["Note 1", "Note 2"] # OR sourceFolder: "Old"
|
|
196
|
+
targetFolder: "Archive" # required
|
|
197
|
+
```
|
|
198
|
+
|
|
199
|
+
### Index Management
|
|
200
|
+
|
|
201
|
+
#### `purge-index`
|
|
202
|
+
Clear all indexed data. Use when switching embedding models or to fix corrupted index.
|
|
203
|
+
|
|
204
|
+
```
|
|
205
|
+
confirm: true # required for safety
|
|
206
|
+
```
|
|
207
|
+
|
|
208
|
+
After purging, run `index-notes` to rebuild.
|
|
209
|
+
|
|
181
210
|
### Knowledge Graph
|
|
182
211
|
|
|
183
212
|
#### `list-tags`
|
|
@@ -285,6 +314,9 @@ bun run check
|
|
|
285
314
|
# Run tests
|
|
286
315
|
bun run test
|
|
287
316
|
|
|
317
|
+
# Run with coverage
|
|
318
|
+
bun run test:coverage
|
|
319
|
+
|
|
288
320
|
# Run with debug logging
|
|
289
321
|
DEBUG=true bun run start
|
|
290
322
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@disco_trooper/apple-notes-mcp",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.4.0",
|
|
4
4
|
"description": "MCP server for Apple Notes with semantic search and CRUD operations",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "src/index.ts",
|
|
@@ -10,7 +10,8 @@
|
|
|
10
10
|
"dev": "bun --watch run src/index.ts",
|
|
11
11
|
"check": "bun run tsc --noEmit",
|
|
12
12
|
"test": "vitest run",
|
|
13
|
-
"test:watch": "vitest"
|
|
13
|
+
"test:watch": "vitest",
|
|
14
|
+
"test:coverage": "vitest run --coverage"
|
|
14
15
|
},
|
|
15
16
|
"dependencies": {
|
|
16
17
|
"@clack/prompts": "^0.8.0",
|
|
@@ -27,6 +28,7 @@
|
|
|
27
28
|
"devDependencies": {
|
|
28
29
|
"@types/bun": "^1.1.0",
|
|
29
30
|
"@types/turndown": "^5.0.0",
|
|
31
|
+
"@vitest/coverage-v8": "^4.0.16",
|
|
30
32
|
"typescript": "^5.7.0",
|
|
31
33
|
"vitest": "^4.0.16"
|
|
32
34
|
},
|
package/src/db/lancedb.ts
CHANGED
|
@@ -597,6 +597,34 @@ export class ChunkStore {
|
|
|
597
597
|
});
|
|
598
598
|
debug("ChunkStore: FTS index rebuilt");
|
|
599
599
|
}
|
|
600
|
+
|
|
601
|
+
/**
|
|
602
|
+
* Delete chunks for multiple notes at once.
|
|
603
|
+
*/
|
|
604
|
+
async deleteChunksByNoteIds(noteIds: string[]): Promise<void> {
|
|
605
|
+
if (noteIds.length === 0) return;
|
|
606
|
+
|
|
607
|
+
const table = await this.ensureTable();
|
|
608
|
+
for (const noteId of noteIds) {
|
|
609
|
+
const escapedNoteId = escapeForFilter(noteId);
|
|
610
|
+
await table.delete(`note_id = '${escapedNoteId}'`);
|
|
611
|
+
}
|
|
612
|
+
debug(`ChunkStore: Deleted chunks for ${noteIds.length} notes`);
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
/**
|
|
616
|
+
* Add chunks to existing table (for incremental updates).
|
|
617
|
+
*/
|
|
618
|
+
async addChunks(chunks: ChunkRecord[]): Promise<void> {
|
|
619
|
+
if (chunks.length === 0) return;
|
|
620
|
+
|
|
621
|
+
const table = await this.ensureTable();
|
|
622
|
+
await table.add(chunks);
|
|
623
|
+
debug(`ChunkStore: Added ${chunks.length} chunks`);
|
|
624
|
+
|
|
625
|
+
// Rebuild FTS index after adding
|
|
626
|
+
await this.rebuildFtsIndex();
|
|
627
|
+
}
|
|
600
628
|
}
|
|
601
629
|
|
|
602
630
|
// Singleton instance for ChunkStore
|
package/src/index.ts
CHANGED
|
@@ -26,11 +26,12 @@ import { validateEnv } from "./config/env.js";
|
|
|
26
26
|
// Import implementations
|
|
27
27
|
import { getVectorStore, getChunkStore } from "./db/lancedb.js";
|
|
28
28
|
import { getNoteByTitle, getAllFolders } from "./notes/read.js";
|
|
29
|
-
import { createNote, updateNote, deleteNote, moveNote, editTable } from "./notes/crud.js";
|
|
29
|
+
import { createNote, updateNote, deleteNote, moveNote, editTable, batchDelete, batchMove } from "./notes/crud.js";
|
|
30
30
|
import { searchNotes } from "./search/index.js";
|
|
31
31
|
import { indexNotes, reindexNote } from "./search/indexer.js";
|
|
32
32
|
import { fullChunkIndex, hasChunkIndex } from "./search/chunk-indexer.js";
|
|
33
33
|
import { searchChunks } from "./search/chunk-search.js";
|
|
34
|
+
import { refreshIfNeeded } from "./search/refresh.js";
|
|
34
35
|
import { listTags, searchByTag, findRelatedNotes } from "./graph/queries.js";
|
|
35
36
|
import { exportGraph } from "./graph/export.js";
|
|
36
37
|
|
|
@@ -125,6 +126,28 @@ const EditTableSchema = z.object({
|
|
|
125
126
|
})).min(1).max(100),
|
|
126
127
|
});
|
|
127
128
|
|
|
129
|
+
const BatchDeleteSchema = z.object({
|
|
130
|
+
titles: z.array(z.string().max(MAX_TITLE_LENGTH)).optional(),
|
|
131
|
+
folder: z.string().max(200).optional(),
|
|
132
|
+
confirm: z.literal(true),
|
|
133
|
+
}).refine(
|
|
134
|
+
(data) => (data.titles && !data.folder) || (!data.titles && data.folder),
|
|
135
|
+
{ message: "Specify either titles or folder, not both" }
|
|
136
|
+
);
|
|
137
|
+
|
|
138
|
+
const BatchMoveSchema = z.object({
|
|
139
|
+
titles: z.array(z.string().max(MAX_TITLE_LENGTH)).optional(),
|
|
140
|
+
sourceFolder: z.string().max(200).optional(),
|
|
141
|
+
targetFolder: z.string().min(1).max(200),
|
|
142
|
+
}).refine(
|
|
143
|
+
(data) => (data.titles && !data.sourceFolder) || (!data.titles && data.sourceFolder),
|
|
144
|
+
{ message: "Specify either titles or sourceFolder, not both" }
|
|
145
|
+
);
|
|
146
|
+
|
|
147
|
+
const PurgeIndexSchema = z.object({
|
|
148
|
+
confirm: z.literal(true),
|
|
149
|
+
});
|
|
150
|
+
|
|
128
151
|
// Knowledge Graph tool schemas
|
|
129
152
|
const ListTagsSchema = z.object({});
|
|
130
153
|
|
|
@@ -229,6 +252,17 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
|
229
252
|
required: ["title"],
|
|
230
253
|
},
|
|
231
254
|
},
|
|
255
|
+
{
|
|
256
|
+
name: "purge-index",
|
|
257
|
+
description: "Delete all indexed data (notes and chunks). Use when switching embedding models or to fix corrupted index.",
|
|
258
|
+
inputSchema: {
|
|
259
|
+
type: "object",
|
|
260
|
+
properties: {
|
|
261
|
+
confirm: { type: "boolean", description: "Must be true to confirm deletion" },
|
|
262
|
+
},
|
|
263
|
+
required: ["confirm"],
|
|
264
|
+
},
|
|
265
|
+
},
|
|
232
266
|
{
|
|
233
267
|
name: "list-notes",
|
|
234
268
|
description: "Count how many notes are indexed",
|
|
@@ -334,6 +368,52 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
|
334
368
|
required: ["title", "edits"],
|
|
335
369
|
},
|
|
336
370
|
},
|
|
371
|
+
{
|
|
372
|
+
name: "batch-delete",
|
|
373
|
+
description: "Delete multiple notes at once. Requires confirm: true for safety.",
|
|
374
|
+
inputSchema: {
|
|
375
|
+
type: "object",
|
|
376
|
+
properties: {
|
|
377
|
+
titles: {
|
|
378
|
+
type: "array",
|
|
379
|
+
items: { type: "string" },
|
|
380
|
+
description: "Note titles to delete (supports folder/title and id:xxx formats)",
|
|
381
|
+
},
|
|
382
|
+
folder: {
|
|
383
|
+
type: "string",
|
|
384
|
+
description: "Delete ALL notes in this folder",
|
|
385
|
+
},
|
|
386
|
+
confirm: {
|
|
387
|
+
type: "boolean",
|
|
388
|
+
description: "Must be true to confirm deletion",
|
|
389
|
+
},
|
|
390
|
+
},
|
|
391
|
+
required: ["confirm"],
|
|
392
|
+
},
|
|
393
|
+
},
|
|
394
|
+
{
|
|
395
|
+
name: "batch-move",
|
|
396
|
+
description: "Move multiple notes to a target folder at once.",
|
|
397
|
+
inputSchema: {
|
|
398
|
+
type: "object",
|
|
399
|
+
properties: {
|
|
400
|
+
titles: {
|
|
401
|
+
type: "array",
|
|
402
|
+
items: { type: "string" },
|
|
403
|
+
description: "Note titles to move (supports folder/title and id:xxx formats)",
|
|
404
|
+
},
|
|
405
|
+
sourceFolder: {
|
|
406
|
+
type: "string",
|
|
407
|
+
description: "Move ALL notes from this folder",
|
|
408
|
+
},
|
|
409
|
+
targetFolder: {
|
|
410
|
+
type: "string",
|
|
411
|
+
description: "Target folder to move notes to",
|
|
412
|
+
},
|
|
413
|
+
},
|
|
414
|
+
required: ["targetFolder"],
|
|
415
|
+
},
|
|
416
|
+
},
|
|
337
417
|
// Knowledge Graph tools
|
|
338
418
|
{
|
|
339
419
|
name: "list-tags",
|
|
@@ -405,6 +485,12 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
405
485
|
case "search-notes": {
|
|
406
486
|
const params = SearchNotesSchema.parse(args);
|
|
407
487
|
|
|
488
|
+
// Smart refresh: check for changes before search
|
|
489
|
+
const refreshed = await refreshIfNeeded();
|
|
490
|
+
if (refreshed) {
|
|
491
|
+
debug("Index refreshed before search");
|
|
492
|
+
}
|
|
493
|
+
|
|
408
494
|
// Use chunk-based search if chunk index exists (better for long notes)
|
|
409
495
|
const useChunkSearch = await hasChunkIndex();
|
|
410
496
|
|
|
@@ -483,6 +569,17 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
483
569
|
return textResponse(`Reindexed note: "${params.title}"`);
|
|
484
570
|
}
|
|
485
571
|
|
|
572
|
+
case "purge-index": {
|
|
573
|
+
PurgeIndexSchema.parse(args);
|
|
574
|
+
const store = getVectorStore();
|
|
575
|
+
const chunkStore = getChunkStore();
|
|
576
|
+
|
|
577
|
+
await store.clear();
|
|
578
|
+
await chunkStore.clear();
|
|
579
|
+
|
|
580
|
+
return textResponse("Index purged. Run index-notes to rebuild.");
|
|
581
|
+
}
|
|
582
|
+
|
|
486
583
|
case "list-notes": {
|
|
487
584
|
const store = getVectorStore();
|
|
488
585
|
const noteCount = await store.count();
|
|
@@ -578,6 +675,35 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
578
675
|
return textResponse(`Updated ${params.edits.length} cell(s) in table ${params.table_index}`);
|
|
579
676
|
}
|
|
580
677
|
|
|
678
|
+
case "batch-delete": {
|
|
679
|
+
const params = BatchDeleteSchema.parse(args);
|
|
680
|
+
const result = await batchDelete({
|
|
681
|
+
titles: params.titles,
|
|
682
|
+
folder: params.folder,
|
|
683
|
+
});
|
|
684
|
+
|
|
685
|
+
let message = `Deleted ${result.deleted} notes.`;
|
|
686
|
+
if (result.failed.length > 0) {
|
|
687
|
+
message += `\nFailed to delete: ${result.failed.join(", ")}`;
|
|
688
|
+
}
|
|
689
|
+
return textResponse(message);
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
case "batch-move": {
|
|
693
|
+
const params = BatchMoveSchema.parse(args);
|
|
694
|
+
const result = await batchMove({
|
|
695
|
+
titles: params.titles,
|
|
696
|
+
sourceFolder: params.sourceFolder,
|
|
697
|
+
targetFolder: params.targetFolder,
|
|
698
|
+
});
|
|
699
|
+
|
|
700
|
+
let message = `Moved ${result.moved} notes to "${params.targetFolder}".`;
|
|
701
|
+
if (result.failed.length > 0) {
|
|
702
|
+
message += `\nFailed to move: ${result.failed.join(", ")}`;
|
|
703
|
+
}
|
|
704
|
+
return textResponse(message);
|
|
705
|
+
}
|
|
706
|
+
|
|
581
707
|
// Knowledge Graph tools
|
|
582
708
|
case "list-tags": {
|
|
583
709
|
ListTagsSchema.parse(args);
|
package/src/notes/crud.test.ts
CHANGED
|
@@ -28,7 +28,7 @@ vi.mock("./tables.js", () => ({
|
|
|
28
28
|
}));
|
|
29
29
|
|
|
30
30
|
import { runJxa } from "run-jxa";
|
|
31
|
-
import { checkReadOnly, createNote, updateNote, deleteNote, moveNote, editTable } from "./crud.js";
|
|
31
|
+
import { checkReadOnly, createNote, updateNote, deleteNote, moveNote, editTable, batchDelete, batchMove } from "./crud.js";
|
|
32
32
|
import { resolveNoteTitle } from "./read.js";
|
|
33
33
|
import { findTables, updateTableCell } from "./tables.js";
|
|
34
34
|
|
|
@@ -290,3 +290,124 @@ describe("editTable", () => {
|
|
|
290
290
|
);
|
|
291
291
|
});
|
|
292
292
|
});
|
|
293
|
+
|
|
294
|
+
describe("batchDelete", () => {
|
|
295
|
+
beforeEach(() => {
|
|
296
|
+
vi.clearAllMocks();
|
|
297
|
+
delete process.env.READONLY_MODE;
|
|
298
|
+
});
|
|
299
|
+
|
|
300
|
+
it("should throw if both titles and folder provided", async () => {
|
|
301
|
+
await expect(
|
|
302
|
+
batchDelete({ titles: ["Note"], folder: "Folder" })
|
|
303
|
+
).rejects.toThrow("Specify either titles or folder, not both");
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
it("should throw if neither titles nor folder provided", async () => {
|
|
307
|
+
await expect(batchDelete({})).rejects.toThrow("Specify either titles or folder");
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
it("should throw in readonly mode", async () => {
|
|
311
|
+
process.env.READONLY_MODE = "true";
|
|
312
|
+
await expect(batchDelete({ titles: ["Note"] })).rejects.toThrow("read-only mode");
|
|
313
|
+
});
|
|
314
|
+
|
|
315
|
+
it("should delete all notes in a folder", async () => {
|
|
316
|
+
vi.mocked(runJxa).mockResolvedValue(JSON.stringify({ deletedCount: 5 }));
|
|
317
|
+
|
|
318
|
+
const result = await batchDelete({ folder: "Old Project" });
|
|
319
|
+
|
|
320
|
+
expect(result.deleted).toBe(5);
|
|
321
|
+
expect(result.failed).toEqual([]);
|
|
322
|
+
});
|
|
323
|
+
|
|
324
|
+
it("should delete individual notes by title", async () => {
|
|
325
|
+
vi.mocked(resolveNoteTitle)
|
|
326
|
+
.mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
|
|
327
|
+
.mockResolvedValueOnce({ success: true, note: { id: "2", title: "Note 2", folder: "Work" } });
|
|
328
|
+
vi.mocked(runJxa).mockResolvedValue("ok");
|
|
329
|
+
|
|
330
|
+
const result = await batchDelete({ titles: ["Note 1", "Note 2"] });
|
|
331
|
+
|
|
332
|
+
expect(result.deleted).toBe(2);
|
|
333
|
+
expect(result.failed).toEqual([]);
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
it("should track failed deletions", async () => {
|
|
337
|
+
vi.mocked(resolveNoteTitle)
|
|
338
|
+
.mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
|
|
339
|
+
.mockResolvedValueOnce({ success: false, error: "Note not found" });
|
|
340
|
+
vi.mocked(runJxa).mockResolvedValue("ok");
|
|
341
|
+
|
|
342
|
+
const result = await batchDelete({ titles: ["Note 1", "Missing Note"] });
|
|
343
|
+
|
|
344
|
+
expect(result.deleted).toBe(1);
|
|
345
|
+
expect(result.failed).toEqual(["Missing Note"]);
|
|
346
|
+
});
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
describe("batchMove", () => {
|
|
350
|
+
beforeEach(() => {
|
|
351
|
+
vi.clearAllMocks();
|
|
352
|
+
delete process.env.READONLY_MODE;
|
|
353
|
+
});
|
|
354
|
+
|
|
355
|
+
it("should throw if targetFolder missing", async () => {
|
|
356
|
+
await expect(
|
|
357
|
+
batchMove({ titles: ["Note"], targetFolder: "" })
|
|
358
|
+
).rejects.toThrow("targetFolder is required");
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
it("should throw if both titles and sourceFolder provided", async () => {
|
|
362
|
+
await expect(
|
|
363
|
+
batchMove({ titles: ["Note"], sourceFolder: "Folder", targetFolder: "Archive" })
|
|
364
|
+
).rejects.toThrow("Specify either titles or sourceFolder, not both");
|
|
365
|
+
});
|
|
366
|
+
|
|
367
|
+
it("should throw if neither titles nor sourceFolder provided", async () => {
|
|
368
|
+
await expect(
|
|
369
|
+
batchMove({ targetFolder: "Archive" })
|
|
370
|
+
).rejects.toThrow("Specify either titles or sourceFolder");
|
|
371
|
+
});
|
|
372
|
+
|
|
373
|
+
it("should throw in readonly mode", async () => {
|
|
374
|
+
process.env.READONLY_MODE = "true";
|
|
375
|
+
await expect(batchMove({ sourceFolder: "Temp", targetFolder: "Archive" })).rejects.toThrow("read-only mode");
|
|
376
|
+
});
|
|
377
|
+
|
|
378
|
+
it("should move all notes from source folder", async () => {
|
|
379
|
+
vi.mocked(runJxa).mockResolvedValue(JSON.stringify({ movedCount: 3 }));
|
|
380
|
+
|
|
381
|
+
const result = await batchMove({
|
|
382
|
+
sourceFolder: "Temp",
|
|
383
|
+
targetFolder: "Archive",
|
|
384
|
+
});
|
|
385
|
+
|
|
386
|
+
expect(result.moved).toBe(3);
|
|
387
|
+
expect(result.failed).toEqual([]);
|
|
388
|
+
});
|
|
389
|
+
|
|
390
|
+
it("should move individual notes by title", async () => {
|
|
391
|
+
vi.mocked(resolveNoteTitle)
|
|
392
|
+
.mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
|
|
393
|
+
.mockResolvedValueOnce({ success: true, note: { id: "2", title: "Note 2", folder: "Work" } });
|
|
394
|
+
vi.mocked(runJxa).mockResolvedValue("ok");
|
|
395
|
+
|
|
396
|
+
const result = await batchMove({ titles: ["Note 1", "Note 2"], targetFolder: "Archive" });
|
|
397
|
+
|
|
398
|
+
expect(result.moved).toBe(2);
|
|
399
|
+
expect(result.failed).toEqual([]);
|
|
400
|
+
});
|
|
401
|
+
|
|
402
|
+
it("should track failed moves", async () => {
|
|
403
|
+
vi.mocked(resolveNoteTitle)
|
|
404
|
+
.mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
|
|
405
|
+
.mockResolvedValueOnce({ success: false, error: "Note not found" });
|
|
406
|
+
vi.mocked(runJxa).mockResolvedValue("ok");
|
|
407
|
+
|
|
408
|
+
const result = await batchMove({ titles: ["Note 1", "Missing Note"], targetFolder: "Archive" });
|
|
409
|
+
|
|
410
|
+
expect(result.moved).toBe(1);
|
|
411
|
+
expect(result.failed).toEqual(["Missing Note"]);
|
|
412
|
+
});
|
|
413
|
+
});
|
package/src/notes/crud.ts
CHANGED
|
@@ -342,3 +342,210 @@ export async function editTable(
|
|
|
342
342
|
|
|
343
343
|
debug(`Table ${tableIndex} updated in note: "${title}"`);
|
|
344
344
|
}
|
|
345
|
+
|
|
346
|
+
/**
|
|
347
|
+
* Result of a batch operation.
|
|
348
|
+
*/
|
|
349
|
+
export interface BatchResult {
|
|
350
|
+
/** Number of notes successfully processed */
|
|
351
|
+
deleted: number;
|
|
352
|
+
/** Notes that failed to process */
|
|
353
|
+
failed: string[];
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
/**
|
|
357
|
+
* Options for batch delete.
|
|
358
|
+
*/
|
|
359
|
+
export interface BatchDeleteOptions {
|
|
360
|
+
/** List of note titles (supports folder/title and id:xxx formats) */
|
|
361
|
+
titles?: string[];
|
|
362
|
+
/** Delete all notes in this folder */
|
|
363
|
+
folder?: string;
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
/**
|
|
367
|
+
* Delete multiple notes at once.
|
|
368
|
+
*
|
|
369
|
+
* @param options - Either titles array OR folder name (not both)
|
|
370
|
+
* @returns BatchResult with deleted count and failed notes
|
|
371
|
+
* @throws Error if READONLY_MODE is enabled
|
|
372
|
+
* @throws Error if both titles and folder provided
|
|
373
|
+
* @throws Error if neither titles nor folder provided
|
|
374
|
+
*/
|
|
375
|
+
export async function batchDelete(options: BatchDeleteOptions): Promise<BatchResult> {
|
|
376
|
+
checkReadOnly();
|
|
377
|
+
|
|
378
|
+
const { titles, folder } = options;
|
|
379
|
+
|
|
380
|
+
if (titles && folder) {
|
|
381
|
+
throw new Error("Specify either titles or folder, not both");
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
if (!titles && !folder) {
|
|
385
|
+
throw new Error("Specify either titles or folder");
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
const result: BatchResult = { deleted: 0, failed: [] };
|
|
389
|
+
|
|
390
|
+
if (folder) {
|
|
391
|
+
// Delete all notes in folder via single JXA call
|
|
392
|
+
debug(`Batch deleting all notes in folder: "${folder}"`);
|
|
393
|
+
|
|
394
|
+
const escapedFolder = JSON.stringify(folder);
|
|
395
|
+
const jxaResult = await runJxa(`
|
|
396
|
+
const app = Application('Notes');
|
|
397
|
+
const folderName = ${escapedFolder};
|
|
398
|
+
|
|
399
|
+
const folders = app.folders.whose({name: folderName})();
|
|
400
|
+
if (folders.length === 0) {
|
|
401
|
+
throw new Error("Folder not found: " + folderName);
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
const folder = folders[0];
|
|
405
|
+
const notes = folder.notes();
|
|
406
|
+
let deletedCount = 0;
|
|
407
|
+
|
|
408
|
+
// Delete in reverse order to avoid index shifting
|
|
409
|
+
for (let i = notes.length - 1; i >= 0; i--) {
|
|
410
|
+
try {
|
|
411
|
+
notes[i].delete();
|
|
412
|
+
deletedCount++;
|
|
413
|
+
} catch (e) {
|
|
414
|
+
// Continue on individual failures
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
return JSON.stringify({ deletedCount });
|
|
419
|
+
`);
|
|
420
|
+
|
|
421
|
+
const { deletedCount } = JSON.parse(jxaResult as string);
|
|
422
|
+
result.deleted = deletedCount;
|
|
423
|
+
} else if (titles) {
|
|
424
|
+
// Delete individual notes
|
|
425
|
+
debug(`Batch deleting ${titles.length} notes by title`);
|
|
426
|
+
|
|
427
|
+
for (const title of titles) {
|
|
428
|
+
try {
|
|
429
|
+
await deleteNote(title);
|
|
430
|
+
result.deleted++;
|
|
431
|
+
} catch (error) {
|
|
432
|
+
result.failed.push(title);
|
|
433
|
+
debug(`Failed to delete "${title}":`, error);
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
debug(`Batch delete complete: ${result.deleted} deleted, ${result.failed.length} failed`);
|
|
439
|
+
return result;
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
/**
|
|
443
|
+
* Result of a batch move operation.
|
|
444
|
+
*/
|
|
445
|
+
export interface BatchMoveResult {
|
|
446
|
+
/** Number of notes successfully moved */
|
|
447
|
+
moved: number;
|
|
448
|
+
/** Notes that failed to move */
|
|
449
|
+
failed: string[];
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
/**
|
|
453
|
+
* Options for batch move.
|
|
454
|
+
*/
|
|
455
|
+
export interface BatchMoveOptions {
|
|
456
|
+
/** List of note titles (supports folder/title and id:xxx formats) */
|
|
457
|
+
titles?: string[];
|
|
458
|
+
/** Move all notes from this folder */
|
|
459
|
+
sourceFolder?: string;
|
|
460
|
+
/** Target folder (required) */
|
|
461
|
+
targetFolder: string;
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
/**
|
|
465
|
+
* Move multiple notes to a target folder.
|
|
466
|
+
*
|
|
467
|
+
* @param options - Either titles array OR sourceFolder (not both) + targetFolder
|
|
468
|
+
* @returns BatchMoveResult with moved count and failed notes
|
|
469
|
+
* @throws Error if READONLY_MODE is enabled
|
|
470
|
+
* @throws Error if both titles and sourceFolder provided
|
|
471
|
+
* @throws Error if neither titles nor sourceFolder provided
|
|
472
|
+
* @throws Error if targetFolder is empty
|
|
473
|
+
*/
|
|
474
|
+
export async function batchMove(options: BatchMoveOptions): Promise<BatchMoveResult> {
|
|
475
|
+
checkReadOnly();
|
|
476
|
+
|
|
477
|
+
const { titles, sourceFolder, targetFolder } = options;
|
|
478
|
+
|
|
479
|
+
if (!targetFolder) {
|
|
480
|
+
throw new Error("targetFolder is required");
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
if (titles && sourceFolder) {
|
|
484
|
+
throw new Error("Specify either titles or sourceFolder, not both");
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
if (!titles && !sourceFolder) {
|
|
488
|
+
throw new Error("Specify either titles or sourceFolder");
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
const result: BatchMoveResult = { moved: 0, failed: [] };
|
|
492
|
+
|
|
493
|
+
if (sourceFolder) {
|
|
494
|
+
// Move all notes from source folder via single JXA call
|
|
495
|
+
debug(`Batch moving all notes from "${sourceFolder}" to "${targetFolder}"`);
|
|
496
|
+
|
|
497
|
+
const escapedSource = JSON.stringify(sourceFolder);
|
|
498
|
+
const escapedTarget = JSON.stringify(targetFolder);
|
|
499
|
+
const jxaResult = await runJxa(`
|
|
500
|
+
const app = Application('Notes');
|
|
501
|
+
const sourceName = ${escapedSource};
|
|
502
|
+
const targetName = ${escapedTarget};
|
|
503
|
+
|
|
504
|
+
const sourceFolders = app.folders.whose({name: sourceName})();
|
|
505
|
+
if (sourceFolders.length === 0) {
|
|
506
|
+
throw new Error("Source folder not found: " + sourceName);
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
const targetFolders = app.folders.whose({name: targetName})();
|
|
510
|
+
if (targetFolders.length === 0) {
|
|
511
|
+
throw new Error("Target folder not found: " + targetName);
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
const source = sourceFolders[0];
|
|
515
|
+
const target = targetFolders[0];
|
|
516
|
+
const notes = source.notes();
|
|
517
|
+
let movedCount = 0;
|
|
518
|
+
|
|
519
|
+
// Move in reverse order to avoid index shifting
|
|
520
|
+
for (let i = notes.length - 1; i >= 0; i--) {
|
|
521
|
+
try {
|
|
522
|
+
notes[i].move({to: target});
|
|
523
|
+
movedCount++;
|
|
524
|
+
} catch (e) {
|
|
525
|
+
// Continue on individual failures
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
return JSON.stringify({ movedCount });
|
|
530
|
+
`);
|
|
531
|
+
|
|
532
|
+
const { movedCount } = JSON.parse(jxaResult as string);
|
|
533
|
+
result.moved = movedCount;
|
|
534
|
+
} else if (titles) {
|
|
535
|
+
// Move individual notes
|
|
536
|
+
debug(`Batch moving ${titles.length} notes to "${targetFolder}"`);
|
|
537
|
+
|
|
538
|
+
for (const title of titles) {
|
|
539
|
+
try {
|
|
540
|
+
await moveNote(title, targetFolder);
|
|
541
|
+
result.moved++;
|
|
542
|
+
} catch (error) {
|
|
543
|
+
result.failed.push(title);
|
|
544
|
+
debug(`Failed to move "${title}":`, error);
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
debug(`Batch move complete: ${result.moved} moved, ${result.failed.length} failed`);
|
|
550
|
+
return result;
|
|
551
|
+
}
|
|
@@ -205,3 +205,50 @@ export async function hasChunkIndex(): Promise<boolean> {
|
|
|
205
205
|
return false;
|
|
206
206
|
}
|
|
207
207
|
}
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Update chunks for specific notes (used by smart refresh).
|
|
211
|
+
* Deletes old chunks for these notes and creates new ones.
|
|
212
|
+
*
|
|
213
|
+
* @param notes - Notes to update chunks for
|
|
214
|
+
* @returns Number of chunks created
|
|
215
|
+
*/
|
|
216
|
+
export async function updateChunksForNotes(notes: NoteDetails[]): Promise<number> {
|
|
217
|
+
if (notes.length === 0) return 0;
|
|
218
|
+
|
|
219
|
+
debug(`Updating chunks for ${notes.length} notes...`);
|
|
220
|
+
|
|
221
|
+
// Chunk all notes
|
|
222
|
+
const allChunks: InternalChunkRecord[] = [];
|
|
223
|
+
for (const note of notes) {
|
|
224
|
+
const noteChunks = chunkNote(note);
|
|
225
|
+
allChunks.push(...noteChunks);
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
if (allChunks.length === 0) {
|
|
229
|
+
debug("No chunks to update");
|
|
230
|
+
return 0;
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// Generate embeddings
|
|
234
|
+
debug(`Generating embeddings for ${allChunks.length} chunks...`);
|
|
235
|
+
const chunkTexts = allChunks.map((chunk) => chunk.content);
|
|
236
|
+
const vectors = await getEmbeddingBatch(chunkTexts);
|
|
237
|
+
|
|
238
|
+
// Combine with vectors
|
|
239
|
+
const indexedAt = new Date().toISOString();
|
|
240
|
+
const completeChunks: ChunkRecord[] = allChunks.map((chunk, i) => ({
|
|
241
|
+
...chunk,
|
|
242
|
+
vector: vectors[i],
|
|
243
|
+
indexed_at: indexedAt,
|
|
244
|
+
}));
|
|
245
|
+
|
|
246
|
+
// Delete old chunks for these notes and add new ones
|
|
247
|
+
const chunkStore = getChunkStore();
|
|
248
|
+
const noteIds = notes.map((n) => n.id);
|
|
249
|
+
await chunkStore.deleteChunksByNoteIds(noteIds);
|
|
250
|
+
await chunkStore.addChunks(completeChunks);
|
|
251
|
+
|
|
252
|
+
debug(`Updated ${completeChunks.length} chunks for ${notes.length} notes`);
|
|
253
|
+
return completeChunks.length;
|
|
254
|
+
}
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from "vitest";
|
|
2
|
+
|
|
3
|
+
vi.mock("../notes/read.js", () => ({
|
|
4
|
+
getAllNotes: vi.fn(),
|
|
5
|
+
}));
|
|
6
|
+
|
|
7
|
+
vi.mock("../db/lancedb.js", () => ({
|
|
8
|
+
getVectorStore: vi.fn(),
|
|
9
|
+
}));
|
|
10
|
+
|
|
11
|
+
vi.mock("./indexer.js", () => ({
|
|
12
|
+
incrementalIndex: vi.fn(),
|
|
13
|
+
}));
|
|
14
|
+
|
|
15
|
+
describe("checkForChanges", () => {
|
|
16
|
+
beforeEach(() => {
|
|
17
|
+
vi.resetModules();
|
|
18
|
+
vi.clearAllMocks();
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
it("should return true if notes were modified after indexing", async () => {
|
|
22
|
+
const { getAllNotes } = await import("../notes/read.js");
|
|
23
|
+
const { getVectorStore } = await import("../db/lancedb.js");
|
|
24
|
+
|
|
25
|
+
vi.mocked(getAllNotes).mockResolvedValue([
|
|
26
|
+
{ title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-10T12:00:00Z" },
|
|
27
|
+
]);
|
|
28
|
+
|
|
29
|
+
vi.mocked(getVectorStore).mockReturnValue({
|
|
30
|
+
getAll: vi.fn().mockResolvedValue([
|
|
31
|
+
{ title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
|
|
32
|
+
]),
|
|
33
|
+
} as any);
|
|
34
|
+
|
|
35
|
+
const { checkForChanges } = await import("./refresh.js");
|
|
36
|
+
const hasChanges = await checkForChanges();
|
|
37
|
+
|
|
38
|
+
expect(hasChanges).toBe(true);
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it("should return false if no changes", async () => {
|
|
42
|
+
const { getAllNotes } = await import("../notes/read.js");
|
|
43
|
+
const { getVectorStore } = await import("../db/lancedb.js");
|
|
44
|
+
|
|
45
|
+
vi.mocked(getAllNotes).mockResolvedValue([
|
|
46
|
+
{ title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
|
|
47
|
+
]);
|
|
48
|
+
|
|
49
|
+
vi.mocked(getVectorStore).mockReturnValue({
|
|
50
|
+
getAll: vi.fn().mockResolvedValue([
|
|
51
|
+
{ title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
|
|
52
|
+
]),
|
|
53
|
+
} as any);
|
|
54
|
+
|
|
55
|
+
const { checkForChanges } = await import("./refresh.js");
|
|
56
|
+
const hasChanges = await checkForChanges();
|
|
57
|
+
|
|
58
|
+
expect(hasChanges).toBe(false);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it("should return true if new note added", async () => {
|
|
62
|
+
const { getAllNotes } = await import("../notes/read.js");
|
|
63
|
+
const { getVectorStore } = await import("../db/lancedb.js");
|
|
64
|
+
|
|
65
|
+
vi.mocked(getAllNotes).mockResolvedValue([
|
|
66
|
+
{ title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
|
|
67
|
+
{ title: "New Note", folder: "Work", created: "2026-01-10", modified: "2026-01-10T12:00:00Z" },
|
|
68
|
+
]);
|
|
69
|
+
|
|
70
|
+
vi.mocked(getVectorStore).mockReturnValue({
|
|
71
|
+
getAll: vi.fn().mockResolvedValue([
|
|
72
|
+
{ title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
|
|
73
|
+
]),
|
|
74
|
+
} as any);
|
|
75
|
+
|
|
76
|
+
const { checkForChanges } = await import("./refresh.js");
|
|
77
|
+
const hasChanges = await checkForChanges();
|
|
78
|
+
|
|
79
|
+
expect(hasChanges).toBe(true);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
it("should return true if note deleted", async () => {
|
|
83
|
+
const { getAllNotes } = await import("../notes/read.js");
|
|
84
|
+
const { getVectorStore } = await import("../db/lancedb.js");
|
|
85
|
+
|
|
86
|
+
vi.mocked(getAllNotes).mockResolvedValue([]);
|
|
87
|
+
|
|
88
|
+
vi.mocked(getVectorStore).mockReturnValue({
|
|
89
|
+
getAll: vi.fn().mockResolvedValue([
|
|
90
|
+
{ title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
|
|
91
|
+
]),
|
|
92
|
+
} as any);
|
|
93
|
+
|
|
94
|
+
const { checkForChanges } = await import("./refresh.js");
|
|
95
|
+
const hasChanges = await checkForChanges();
|
|
96
|
+
|
|
97
|
+
expect(hasChanges).toBe(true);
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
it("should return true if no index exists and notes exist", async () => {
|
|
101
|
+
const { getAllNotes } = await import("../notes/read.js");
|
|
102
|
+
const { getVectorStore } = await import("../db/lancedb.js");
|
|
103
|
+
|
|
104
|
+
vi.mocked(getAllNotes).mockResolvedValue([
|
|
105
|
+
{ title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
|
|
106
|
+
]);
|
|
107
|
+
|
|
108
|
+
vi.mocked(getVectorStore).mockReturnValue({
|
|
109
|
+
getAll: vi.fn().mockRejectedValue(new Error("Table not found")),
|
|
110
|
+
} as any);
|
|
111
|
+
|
|
112
|
+
const { checkForChanges } = await import("./refresh.js");
|
|
113
|
+
const hasChanges = await checkForChanges();
|
|
114
|
+
|
|
115
|
+
expect(hasChanges).toBe(true);
|
|
116
|
+
});
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
describe("refreshIfNeeded", () => {
|
|
120
|
+
beforeEach(() => {
|
|
121
|
+
vi.resetModules();
|
|
122
|
+
vi.clearAllMocks();
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it("should trigger incremental index if changes detected", async () => {
|
|
126
|
+
const { incrementalIndex } = await import("./indexer.js");
|
|
127
|
+
const { getAllNotes } = await import("../notes/read.js");
|
|
128
|
+
const { getVectorStore } = await import("../db/lancedb.js");
|
|
129
|
+
|
|
130
|
+
vi.mocked(getAllNotes).mockResolvedValue([
|
|
131
|
+
{ title: "New Note", folder: "Work", created: "2026-01-10", modified: "2026-01-10T12:00:00Z" },
|
|
132
|
+
]);
|
|
133
|
+
|
|
134
|
+
vi.mocked(getVectorStore).mockReturnValue({
|
|
135
|
+
getAll: vi.fn().mockResolvedValue([]),
|
|
136
|
+
} as any);
|
|
137
|
+
|
|
138
|
+
vi.mocked(incrementalIndex).mockResolvedValue({
|
|
139
|
+
total: 1,
|
|
140
|
+
indexed: 1,
|
|
141
|
+
errors: 0,
|
|
142
|
+
timeMs: 100,
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
const { refreshIfNeeded } = await import("./refresh.js");
|
|
146
|
+
const refreshed = await refreshIfNeeded();
|
|
147
|
+
|
|
148
|
+
expect(refreshed).toBe(true);
|
|
149
|
+
expect(incrementalIndex).toHaveBeenCalled();
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
it("should not trigger index if no changes", async () => {
|
|
153
|
+
const { incrementalIndex } = await import("./indexer.js");
|
|
154
|
+
const { getAllNotes } = await import("../notes/read.js");
|
|
155
|
+
const { getVectorStore } = await import("../db/lancedb.js");
|
|
156
|
+
|
|
157
|
+
vi.mocked(getAllNotes).mockResolvedValue([
|
|
158
|
+
{ title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
|
|
159
|
+
]);
|
|
160
|
+
|
|
161
|
+
vi.mocked(getVectorStore).mockReturnValue({
|
|
162
|
+
getAll: vi.fn().mockResolvedValue([
|
|
163
|
+
{ title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
|
|
164
|
+
]),
|
|
165
|
+
} as any);
|
|
166
|
+
|
|
167
|
+
const { refreshIfNeeded } = await import("./refresh.js");
|
|
168
|
+
const refreshed = await refreshIfNeeded();
|
|
169
|
+
|
|
170
|
+
expect(refreshed).toBe(false);
|
|
171
|
+
expect(incrementalIndex).not.toHaveBeenCalled();
|
|
172
|
+
});
|
|
173
|
+
});
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Smart refresh: check for note changes before search.
|
|
3
|
+
* Triggers incremental index if notes have been modified.
|
|
4
|
+
* Also updates chunk index for changed notes.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { getAllNotes, getNoteByFolderAndTitle, type NoteInfo } from "../notes/read.js";
|
|
8
|
+
import { getVectorStore, getChunkStore } from "../db/lancedb.js";
|
|
9
|
+
import { incrementalIndex } from "./indexer.js";
|
|
10
|
+
import { updateChunksForNotes, hasChunkIndex } from "./chunk-indexer.js";
|
|
11
|
+
import { createDebugLogger } from "../utils/debug.js";
|
|
12
|
+
|
|
13
|
+
const debug = createDebugLogger("REFRESH");
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Detected changes in notes.
|
|
17
|
+
*/
|
|
18
|
+
interface DetectedChanges {
|
|
19
|
+
hasChanges: boolean;
|
|
20
|
+
added: NoteInfo[];
|
|
21
|
+
modified: NoteInfo[];
|
|
22
|
+
deleted: string[]; // note IDs
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Check for note changes and return details about what changed.
|
|
27
|
+
*/
|
|
28
|
+
export async function detectChanges(): Promise<DetectedChanges> {
|
|
29
|
+
debug("Checking for changes...");
|
|
30
|
+
|
|
31
|
+
const currentNotes = await getAllNotes();
|
|
32
|
+
const store = getVectorStore();
|
|
33
|
+
|
|
34
|
+
let existingRecords;
|
|
35
|
+
try {
|
|
36
|
+
existingRecords = await store.getAll();
|
|
37
|
+
} catch {
|
|
38
|
+
// No index exists yet
|
|
39
|
+
debug("No existing index found");
|
|
40
|
+
return {
|
|
41
|
+
hasChanges: currentNotes.length > 0,
|
|
42
|
+
added: currentNotes,
|
|
43
|
+
modified: [],
|
|
44
|
+
deleted: [],
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Build lookup maps
|
|
49
|
+
const existingByKey = new Map<string, { indexed_at: string; id: string }>();
|
|
50
|
+
for (const record of existingRecords) {
|
|
51
|
+
const key = `${record.folder}/${record.title}`;
|
|
52
|
+
existingByKey.set(key, { indexed_at: record.indexed_at, id: record.id });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const added: NoteInfo[] = [];
|
|
56
|
+
const modified: NoteInfo[] = [];
|
|
57
|
+
const deleted: string[] = [];
|
|
58
|
+
|
|
59
|
+
// Check for new or modified notes
|
|
60
|
+
for (const note of currentNotes) {
|
|
61
|
+
const key = `${note.folder}/${note.title}`;
|
|
62
|
+
const existing = existingByKey.get(key);
|
|
63
|
+
|
|
64
|
+
if (!existing) {
|
|
65
|
+
debug(`New note detected: ${key}`);
|
|
66
|
+
added.push(note);
|
|
67
|
+
} else {
|
|
68
|
+
const noteModified = new Date(note.modified).getTime();
|
|
69
|
+
const recordIndexed = new Date(existing.indexed_at).getTime();
|
|
70
|
+
|
|
71
|
+
if (noteModified > recordIndexed) {
|
|
72
|
+
debug(`Modified note detected: ${key}`);
|
|
73
|
+
modified.push(note);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Check for deleted notes
|
|
79
|
+
const currentKeys = new Set(currentNotes.map((n) => `${n.folder}/${n.title}`));
|
|
80
|
+
for (const [key, { id }] of existingByKey) {
|
|
81
|
+
if (!currentKeys.has(key)) {
|
|
82
|
+
debug(`Deleted note detected: ${key}`);
|
|
83
|
+
deleted.push(id);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const hasChanges = added.length > 0 || modified.length > 0 || deleted.length > 0;
|
|
88
|
+
debug(`Changes: ${added.length} added, ${modified.length} modified, ${deleted.length} deleted`);
|
|
89
|
+
|
|
90
|
+
return { hasChanges, added, modified, deleted };
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Check if any notes have been modified since last index.
|
|
95
|
+
* @returns true if changes detected, false otherwise
|
|
96
|
+
*/
|
|
97
|
+
export async function checkForChanges(): Promise<boolean> {
|
|
98
|
+
const changes = await detectChanges();
|
|
99
|
+
return changes.hasChanges;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Refresh index if changes are detected.
|
|
104
|
+
* Updates both main index AND chunk index.
|
|
105
|
+
*
|
|
106
|
+
* @returns true if index was refreshed, false if no changes
|
|
107
|
+
*/
|
|
108
|
+
export async function refreshIfNeeded(): Promise<boolean> {
|
|
109
|
+
const changes = await detectChanges();
|
|
110
|
+
|
|
111
|
+
if (!changes.hasChanges) {
|
|
112
|
+
return false;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Update main index
|
|
116
|
+
debug("Changes detected, running incremental index...");
|
|
117
|
+
const result = await incrementalIndex();
|
|
118
|
+
debug(`Main index refresh: ${result.indexed} notes updated in ${result.timeMs}ms`);
|
|
119
|
+
|
|
120
|
+
// Update chunk index if it exists and there are changes
|
|
121
|
+
const hasChunks = await hasChunkIndex();
|
|
122
|
+
if (hasChunks && (changes.added.length > 0 || changes.modified.length > 0)) {
|
|
123
|
+
debug("Updating chunk index for changed notes...");
|
|
124
|
+
|
|
125
|
+
// Fetch full content for changed notes
|
|
126
|
+
const changedNotes = [...changes.added, ...changes.modified];
|
|
127
|
+
const notesWithContent = await Promise.all(
|
|
128
|
+
changedNotes.map(async (n) => {
|
|
129
|
+
const note = await getNoteByFolderAndTitle(n.folder, n.title);
|
|
130
|
+
return note;
|
|
131
|
+
})
|
|
132
|
+
);
|
|
133
|
+
|
|
134
|
+
// Filter out nulls (notes that couldn't be fetched)
|
|
135
|
+
const validNotes = notesWithContent.filter((n) => n !== null);
|
|
136
|
+
|
|
137
|
+
if (validNotes.length > 0) {
|
|
138
|
+
const chunksCreated = await updateChunksForNotes(validNotes);
|
|
139
|
+
debug(`Chunk index refresh: ${chunksCreated} chunks for ${validNotes.length} notes`);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Delete chunks for deleted notes
|
|
143
|
+
if (changes.deleted.length > 0) {
|
|
144
|
+
const chunkStore = getChunkStore();
|
|
145
|
+
await chunkStore.deleteChunksByNoteIds(changes.deleted);
|
|
146
|
+
debug(`Deleted chunks for ${changes.deleted.length} notes`);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return true;
|
|
151
|
+
}
|