@disco_trooper/apple-notes-mcp 1.3.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -19,12 +19,14 @@ MCP server for Apple Notes with semantic search and CRUD operations. Claude sear
19
19
  - **Incremental Indexing** - Re-embed only changed notes
20
20
  - **Dual Embedding** - Local HuggingFace or OpenRouter API
21
21
 
22
- ## What's New in 1.3
22
+ ## What's New in 1.4
23
23
 
24
- - **Parent Document Retriever** - Splits long notes into 500-char chunks with 100-char overlap. Searches match specific sections, returns full notes.
24
+ - **Smart Refresh** - Search auto-reindexes changed notes. No manual `index-notes` needed.
25
+ - **Batch Operations** - Delete or move multiple notes by title or folder.
26
+ - **Purge Index** - Clear all indexed data when switching models or fixing corruption.
27
+ - **Parent Document Retriever** - Splits long notes into 500-char chunks with 100-char overlap.
25
28
  - **60x faster cached queries** - Query embedding cache eliminates redundant API calls.
26
- - **Auto-filters Base64/encoded content** - Skips images and attachments during indexing.
27
- - **4-6x faster indexing** - Parallel processing and optimized chunk generation.
29
+ - **4-6x faster indexing** - Parallel processing and batch embeddings.
28
30
 
29
31
  ## Installation
30
32
 
@@ -111,7 +113,19 @@ include_content: false # include full content vs preview
111
113
  ```
112
114
 
113
115
  #### `list-notes`
114
- Count indexed notes.
116
+ List notes with sorting and filtering. Without parameters, shows index statistics.
117
+
118
+ ```
119
+ sort_by: "modified" # created, modified, or title (default: modified)
120
+ order: "desc" # asc or desc (default: desc)
121
+ limit: 10 # max notes to return (1-100)
122
+ folder: "Work" # filter by folder (case-insensitive)
123
+ ```
124
+
125
+ **Examples:**
126
+ - Get 5 newest notes: `{ sort_by: "created", order: "desc", limit: 5 }`
127
+ - Recently modified: `{ sort_by: "modified", limit: 10 }`
128
+ - Alphabetical in folder: `{ sort_by: "title", order: "asc", folder: "Projects" }`
115
129
 
116
130
  #### `list-folders`
117
131
  List all Apple Notes folders.
@@ -178,6 +192,33 @@ title: "My Note"
178
192
  folder: "Archive"
179
193
  ```
180
194
 
195
+ #### `batch-delete`
196
+ Delete multiple notes at once.
197
+
198
+ ```
199
+ titles: ["Note 1", "Note 2"] # OR folder: "Old Project"
200
+ confirm: true # required for safety
201
+ ```
202
+
203
+ #### `batch-move`
204
+ Move multiple notes to a target folder.
205
+
206
+ ```
207
+ titles: ["Note 1", "Note 2"] # OR sourceFolder: "Old"
208
+ targetFolder: "Archive" # required
209
+ ```
210
+
211
+ ### Index Management
212
+
213
+ #### `purge-index`
214
+ Clear all indexed data. Use when switching embedding models or to fix corrupted index.
215
+
216
+ ```
217
+ confirm: true # required for safety
218
+ ```
219
+
220
+ After purging, run `index-notes` to rebuild.
221
+
181
222
  ### Knowledge Graph
182
223
 
183
224
  #### `list-tags`
@@ -285,6 +326,9 @@ bun run check
285
326
  # Run tests
286
327
  bun run test
287
328
 
329
+ # Run with coverage
330
+ bun run test:coverage
331
+
288
332
  # Run with debug logging
289
333
  DEBUG=true bun run start
290
334
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@disco_trooper/apple-notes-mcp",
3
- "version": "1.3.0",
3
+ "version": "1.5.0",
4
4
  "description": "MCP server for Apple Notes with semantic search and CRUD operations",
5
5
  "type": "module",
6
6
  "main": "src/index.ts",
@@ -10,7 +10,8 @@
10
10
  "dev": "bun --watch run src/index.ts",
11
11
  "check": "bun run tsc --noEmit",
12
12
  "test": "vitest run",
13
- "test:watch": "vitest"
13
+ "test:watch": "vitest",
14
+ "test:coverage": "vitest run --coverage"
14
15
  },
15
16
  "dependencies": {
16
17
  "@clack/prompts": "^0.8.0",
@@ -27,6 +28,7 @@
27
28
  "devDependencies": {
28
29
  "@types/bun": "^1.1.0",
29
30
  "@types/turndown": "^5.0.0",
31
+ "@vitest/coverage-v8": "^4.0.16",
30
32
  "typescript": "^5.7.0",
31
33
  "vitest": "^4.0.16"
32
34
  },
package/src/db/lancedb.ts CHANGED
@@ -597,6 +597,34 @@ export class ChunkStore {
597
597
  });
598
598
  debug("ChunkStore: FTS index rebuilt");
599
599
  }
600
+
601
+ /**
602
+ * Delete chunks for multiple notes at once.
603
+ */
604
+ async deleteChunksByNoteIds(noteIds: string[]): Promise<void> {
605
+ if (noteIds.length === 0) return;
606
+
607
+ const table = await this.ensureTable();
608
+ for (const noteId of noteIds) {
609
+ const escapedNoteId = escapeForFilter(noteId);
610
+ await table.delete(`note_id = '${escapedNoteId}'`);
611
+ }
612
+ debug(`ChunkStore: Deleted chunks for ${noteIds.length} notes`);
613
+ }
614
+
615
+ /**
616
+ * Add chunks to existing table (for incremental updates).
617
+ */
618
+ async addChunks(chunks: ChunkRecord[]): Promise<void> {
619
+ if (chunks.length === 0) return;
620
+
621
+ const table = await this.ensureTable();
622
+ await table.add(chunks);
623
+ debug(`ChunkStore: Added ${chunks.length} chunks`);
624
+
625
+ // Rebuild FTS index after adding
626
+ await this.rebuildFtsIndex();
627
+ }
600
628
  }
601
629
 
602
630
  // Singleton instance for ChunkStore
package/src/index.ts CHANGED
@@ -25,12 +25,13 @@ import { validateEnv } from "./config/env.js";
25
25
 
26
26
  // Import implementations
27
27
  import { getVectorStore, getChunkStore } from "./db/lancedb.js";
28
- import { getNoteByTitle, getAllFolders } from "./notes/read.js";
29
- import { createNote, updateNote, deleteNote, moveNote, editTable } from "./notes/crud.js";
28
+ import { getNoteByTitle, getAllFolders, listNotes } from "./notes/read.js";
29
+ import { createNote, updateNote, deleteNote, moveNote, editTable, batchDelete, batchMove } from "./notes/crud.js";
30
30
  import { searchNotes } from "./search/index.js";
31
31
  import { indexNotes, reindexNote } from "./search/indexer.js";
32
32
  import { fullChunkIndex, hasChunkIndex } from "./search/chunk-indexer.js";
33
33
  import { searchChunks } from "./search/chunk-search.js";
34
+ import { refreshIfNeeded } from "./search/refresh.js";
34
35
  import { listTags, searchByTag, findRelatedNotes } from "./graph/queries.js";
35
36
  import { exportGraph } from "./graph/export.js";
36
37
 
@@ -125,6 +126,39 @@ const EditTableSchema = z.object({
125
126
  })).min(1).max(100),
126
127
  });
127
128
 
129
+ const BatchDeleteSchema = z.object({
130
+ titles: z.array(z.string().max(MAX_TITLE_LENGTH)).optional(),
131
+ folder: z.string().max(200).optional(),
132
+ confirm: z.literal(true),
133
+ }).refine(
134
+ (data) => (data.titles && !data.folder) || (!data.titles && data.folder),
135
+ { message: "Specify either titles or folder, not both" }
136
+ );
137
+
138
+ const BatchMoveSchema = z.object({
139
+ titles: z.array(z.string().max(MAX_TITLE_LENGTH)).optional(),
140
+ sourceFolder: z.string().max(200).optional(),
141
+ targetFolder: z.string().min(1).max(200),
142
+ }).refine(
143
+ (data) => (data.titles && !data.sourceFolder) || (!data.titles && data.sourceFolder),
144
+ { message: "Specify either titles or sourceFolder, not both" }
145
+ );
146
+
147
+ const PurgeIndexSchema = z.object({
148
+ confirm: z.literal(true),
149
+ });
150
+
151
+ const ListNotesSchema = z.object({
152
+ sort_by: z.enum(["created", "modified", "title"]).default("modified"),
153
+ order: z.enum(["asc", "desc"]).default("desc"),
154
+ limit: z.number().min(1).max(100).optional(),
155
+ folder: z.string().max(200).optional(),
156
+ });
157
+
158
+ /** Exported type for listNotes options - derived from Zod schema (single source of truth)
159
+ * Using z.input to get the input type (with optionals) rather than z.infer (output with defaults applied) */
160
+ export type ListNotesOptions = z.input<typeof ListNotesSchema>;
161
+
128
162
  // Knowledge Graph tool schemas
129
163
  const ListTagsSchema = z.object({});
130
164
 
@@ -229,12 +263,42 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
229
263
  required: ["title"],
230
264
  },
231
265
  },
266
+ {
267
+ name: "purge-index",
268
+ description: "Delete all indexed data (notes and chunks). Use when switching embedding models or to fix corrupted index.",
269
+ inputSchema: {
270
+ type: "object",
271
+ properties: {
272
+ confirm: { type: "boolean", description: "Must be true to confirm deletion" },
273
+ },
274
+ required: ["confirm"],
275
+ },
276
+ },
232
277
  {
233
278
  name: "list-notes",
234
- description: "Count how many notes are indexed",
279
+ description: "List notes from Apple Notes with sorting and filtering. Without parameters, shows index statistics.",
235
280
  inputSchema: {
236
281
  type: "object",
237
- properties: {},
282
+ properties: {
283
+ sort_by: {
284
+ type: "string",
285
+ enum: ["created", "modified", "title"],
286
+ description: "Sort by date or title (default: modified)"
287
+ },
288
+ order: {
289
+ type: "string",
290
+ enum: ["asc", "desc"],
291
+ description: "Sort order (default: desc)"
292
+ },
293
+ limit: {
294
+ type: "number",
295
+ description: "Max notes to return (1-100)"
296
+ },
297
+ folder: {
298
+ type: "string",
299
+ description: "Filter by folder"
300
+ },
301
+ },
238
302
  required: [],
239
303
  },
240
304
  },
@@ -334,6 +398,52 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
334
398
  required: ["title", "edits"],
335
399
  },
336
400
  },
401
+ {
402
+ name: "batch-delete",
403
+ description: "Delete multiple notes at once. Requires confirm: true for safety.",
404
+ inputSchema: {
405
+ type: "object",
406
+ properties: {
407
+ titles: {
408
+ type: "array",
409
+ items: { type: "string" },
410
+ description: "Note titles to delete (supports folder/title and id:xxx formats)",
411
+ },
412
+ folder: {
413
+ type: "string",
414
+ description: "Delete ALL notes in this folder",
415
+ },
416
+ confirm: {
417
+ type: "boolean",
418
+ description: "Must be true to confirm deletion",
419
+ },
420
+ },
421
+ required: ["confirm"],
422
+ },
423
+ },
424
+ {
425
+ name: "batch-move",
426
+ description: "Move multiple notes to a target folder at once.",
427
+ inputSchema: {
428
+ type: "object",
429
+ properties: {
430
+ titles: {
431
+ type: "array",
432
+ items: { type: "string" },
433
+ description: "Note titles to move (supports folder/title and id:xxx formats)",
434
+ },
435
+ sourceFolder: {
436
+ type: "string",
437
+ description: "Move ALL notes from this folder",
438
+ },
439
+ targetFolder: {
440
+ type: "string",
441
+ description: "Target folder to move notes to",
442
+ },
443
+ },
444
+ required: ["targetFolder"],
445
+ },
446
+ },
337
447
  // Knowledge Graph tools
338
448
  {
339
449
  name: "list-tags",
@@ -405,6 +515,12 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
405
515
  case "search-notes": {
406
516
  const params = SearchNotesSchema.parse(args);
407
517
 
518
+ // Smart refresh: check for changes before search
519
+ const refreshed = await refreshIfNeeded();
520
+ if (refreshed) {
521
+ debug("Index refreshed before search");
522
+ }
523
+
408
524
  // Use chunk-based search if chunk index exists (better for long notes)
409
525
  const useChunkSearch = await hasChunkIndex();
410
526
 
@@ -483,23 +599,46 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
483
599
  return textResponse(`Reindexed note: "${params.title}"`);
484
600
  }
485
601
 
486
- case "list-notes": {
602
+ case "purge-index": {
603
+ PurgeIndexSchema.parse(args);
487
604
  const store = getVectorStore();
488
- const noteCount = await store.count();
489
-
490
- let message = `${noteCount} notes indexed.`;
491
-
492
- // Show chunk statistics if chunk index exists
493
- const hasChunks = await hasChunkIndex();
494
- if (hasChunks) {
495
- const chunkStore = getChunkStore();
496
- const chunkCount = await chunkStore.count();
497
- message += ` ${chunkCount} chunks indexed for semantic search.`;
498
- } else {
499
- message += " Run index-notes with mode='full' to enable chunk-based search.";
605
+ const chunkStore = getChunkStore();
606
+
607
+ await store.clear();
608
+ await chunkStore.clear();
609
+
610
+ return textResponse("Index purged. Run index-notes to rebuild.");
611
+ }
612
+
613
+ case "list-notes": {
614
+ const params = ListNotesSchema.parse(args);
615
+
616
+ // No parameters provided: show index statistics (backwards compatible)
617
+ const hasFilteringParams =
618
+ params.limit !== undefined ||
619
+ params.folder !== undefined ||
620
+ (args && ("sort_by" in args || "order" in args));
621
+
622
+ if (!hasFilteringParams) {
623
+ const store = getVectorStore();
624
+ const noteCount = await store.count();
625
+ const hasChunks = await hasChunkIndex();
626
+
627
+ if (hasChunks) {
628
+ const chunkStore = getChunkStore();
629
+ const chunkCount = await chunkStore.count();
630
+ return textResponse(
631
+ `${noteCount} notes indexed. ${chunkCount} chunks indexed for semantic search.`
632
+ );
633
+ }
634
+
635
+ return textResponse(
636
+ `${noteCount} notes indexed. Run index-notes with mode='full' to enable chunk-based search.`
637
+ );
500
638
  }
501
639
 
502
- return textResponse(message);
640
+ const notes = await listNotes(params);
641
+ return textResponse(JSON.stringify(notes, null, 2));
503
642
  }
504
643
 
505
644
  case "get-note": {
@@ -578,6 +717,35 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
578
717
  return textResponse(`Updated ${params.edits.length} cell(s) in table ${params.table_index}`);
579
718
  }
580
719
 
720
+ case "batch-delete": {
721
+ const params = BatchDeleteSchema.parse(args);
722
+ const result = await batchDelete({
723
+ titles: params.titles,
724
+ folder: params.folder,
725
+ });
726
+
727
+ let message = `Deleted ${result.deleted} notes.`;
728
+ if (result.failed.length > 0) {
729
+ message += `\nFailed to delete: ${result.failed.join(", ")}`;
730
+ }
731
+ return textResponse(message);
732
+ }
733
+
734
+ case "batch-move": {
735
+ const params = BatchMoveSchema.parse(args);
736
+ const result = await batchMove({
737
+ titles: params.titles,
738
+ sourceFolder: params.sourceFolder,
739
+ targetFolder: params.targetFolder,
740
+ });
741
+
742
+ let message = `Moved ${result.moved} notes to "${params.targetFolder}".`;
743
+ if (result.failed.length > 0) {
744
+ message += `\nFailed to move: ${result.failed.join(", ")}`;
745
+ }
746
+ return textResponse(message);
747
+ }
748
+
581
749
  // Knowledge Graph tools
582
750
  case "list-tags": {
583
751
  ListTagsSchema.parse(args);
@@ -28,7 +28,7 @@ vi.mock("./tables.js", () => ({
28
28
  }));
29
29
 
30
30
  import { runJxa } from "run-jxa";
31
- import { checkReadOnly, createNote, updateNote, deleteNote, moveNote, editTable } from "./crud.js";
31
+ import { checkReadOnly, createNote, updateNote, deleteNote, moveNote, editTable, batchDelete, batchMove } from "./crud.js";
32
32
  import { resolveNoteTitle } from "./read.js";
33
33
  import { findTables, updateTableCell } from "./tables.js";
34
34
 
@@ -290,3 +290,124 @@ describe("editTable", () => {
290
290
  );
291
291
  });
292
292
  });
293
+
294
+ describe("batchDelete", () => {
295
+ beforeEach(() => {
296
+ vi.clearAllMocks();
297
+ delete process.env.READONLY_MODE;
298
+ });
299
+
300
+ it("should throw if both titles and folder provided", async () => {
301
+ await expect(
302
+ batchDelete({ titles: ["Note"], folder: "Folder" })
303
+ ).rejects.toThrow("Specify either titles or folder, not both");
304
+ });
305
+
306
+ it("should throw if neither titles nor folder provided", async () => {
307
+ await expect(batchDelete({})).rejects.toThrow("Specify either titles or folder");
308
+ });
309
+
310
+ it("should throw in readonly mode", async () => {
311
+ process.env.READONLY_MODE = "true";
312
+ await expect(batchDelete({ titles: ["Note"] })).rejects.toThrow("read-only mode");
313
+ });
314
+
315
+ it("should delete all notes in a folder", async () => {
316
+ vi.mocked(runJxa).mockResolvedValue(JSON.stringify({ deletedCount: 5 }));
317
+
318
+ const result = await batchDelete({ folder: "Old Project" });
319
+
320
+ expect(result.deleted).toBe(5);
321
+ expect(result.failed).toEqual([]);
322
+ });
323
+
324
+ it("should delete individual notes by title", async () => {
325
+ vi.mocked(resolveNoteTitle)
326
+ .mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
327
+ .mockResolvedValueOnce({ success: true, note: { id: "2", title: "Note 2", folder: "Work" } });
328
+ vi.mocked(runJxa).mockResolvedValue("ok");
329
+
330
+ const result = await batchDelete({ titles: ["Note 1", "Note 2"] });
331
+
332
+ expect(result.deleted).toBe(2);
333
+ expect(result.failed).toEqual([]);
334
+ });
335
+
336
+ it("should track failed deletions", async () => {
337
+ vi.mocked(resolveNoteTitle)
338
+ .mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
339
+ .mockResolvedValueOnce({ success: false, error: "Note not found" });
340
+ vi.mocked(runJxa).mockResolvedValue("ok");
341
+
342
+ const result = await batchDelete({ titles: ["Note 1", "Missing Note"] });
343
+
344
+ expect(result.deleted).toBe(1);
345
+ expect(result.failed).toEqual(["Missing Note"]);
346
+ });
347
+ });
348
+
349
+ describe("batchMove", () => {
350
+ beforeEach(() => {
351
+ vi.clearAllMocks();
352
+ delete process.env.READONLY_MODE;
353
+ });
354
+
355
+ it("should throw if targetFolder missing", async () => {
356
+ await expect(
357
+ batchMove({ titles: ["Note"], targetFolder: "" })
358
+ ).rejects.toThrow("targetFolder is required");
359
+ });
360
+
361
+ it("should throw if both titles and sourceFolder provided", async () => {
362
+ await expect(
363
+ batchMove({ titles: ["Note"], sourceFolder: "Folder", targetFolder: "Archive" })
364
+ ).rejects.toThrow("Specify either titles or sourceFolder, not both");
365
+ });
366
+
367
+ it("should throw if neither titles nor sourceFolder provided", async () => {
368
+ await expect(
369
+ batchMove({ targetFolder: "Archive" })
370
+ ).rejects.toThrow("Specify either titles or sourceFolder");
371
+ });
372
+
373
+ it("should throw in readonly mode", async () => {
374
+ process.env.READONLY_MODE = "true";
375
+ await expect(batchMove({ sourceFolder: "Temp", targetFolder: "Archive" })).rejects.toThrow("read-only mode");
376
+ });
377
+
378
+ it("should move all notes from source folder", async () => {
379
+ vi.mocked(runJxa).mockResolvedValue(JSON.stringify({ movedCount: 3 }));
380
+
381
+ const result = await batchMove({
382
+ sourceFolder: "Temp",
383
+ targetFolder: "Archive",
384
+ });
385
+
386
+ expect(result.moved).toBe(3);
387
+ expect(result.failed).toEqual([]);
388
+ });
389
+
390
+ it("should move individual notes by title", async () => {
391
+ vi.mocked(resolveNoteTitle)
392
+ .mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
393
+ .mockResolvedValueOnce({ success: true, note: { id: "2", title: "Note 2", folder: "Work" } });
394
+ vi.mocked(runJxa).mockResolvedValue("ok");
395
+
396
+ const result = await batchMove({ titles: ["Note 1", "Note 2"], targetFolder: "Archive" });
397
+
398
+ expect(result.moved).toBe(2);
399
+ expect(result.failed).toEqual([]);
400
+ });
401
+
402
+ it("should track failed moves", async () => {
403
+ vi.mocked(resolveNoteTitle)
404
+ .mockResolvedValueOnce({ success: true, note: { id: "1", title: "Note 1", folder: "Work" } })
405
+ .mockResolvedValueOnce({ success: false, error: "Note not found" });
406
+ vi.mocked(runJxa).mockResolvedValue("ok");
407
+
408
+ const result = await batchMove({ titles: ["Note 1", "Missing Note"], targetFolder: "Archive" });
409
+
410
+ expect(result.moved).toBe(1);
411
+ expect(result.failed).toEqual(["Missing Note"]);
412
+ });
413
+ });
package/src/notes/crud.ts CHANGED
@@ -342,3 +342,210 @@ export async function editTable(
342
342
 
343
343
  debug(`Table ${tableIndex} updated in note: "${title}"`);
344
344
  }
345
+
346
+ /**
347
+ * Result of a batch operation.
348
+ */
349
+ export interface BatchResult {
350
+ /** Number of notes successfully processed */
351
+ deleted: number;
352
+ /** Notes that failed to process */
353
+ failed: string[];
354
+ }
355
+
356
+ /**
357
+ * Options for batch delete.
358
+ */
359
+ export interface BatchDeleteOptions {
360
+ /** List of note titles (supports folder/title and id:xxx formats) */
361
+ titles?: string[];
362
+ /** Delete all notes in this folder */
363
+ folder?: string;
364
+ }
365
+
366
+ /**
367
+ * Delete multiple notes at once.
368
+ *
369
+ * @param options - Either titles array OR folder name (not both)
370
+ * @returns BatchResult with deleted count and failed notes
371
+ * @throws Error if READONLY_MODE is enabled
372
+ * @throws Error if both titles and folder provided
373
+ * @throws Error if neither titles nor folder provided
374
+ */
375
+ export async function batchDelete(options: BatchDeleteOptions): Promise<BatchResult> {
376
+ checkReadOnly();
377
+
378
+ const { titles, folder } = options;
379
+
380
+ if (titles && folder) {
381
+ throw new Error("Specify either titles or folder, not both");
382
+ }
383
+
384
+ if (!titles && !folder) {
385
+ throw new Error("Specify either titles or folder");
386
+ }
387
+
388
+ const result: BatchResult = { deleted: 0, failed: [] };
389
+
390
+ if (folder) {
391
+ // Delete all notes in folder via single JXA call
392
+ debug(`Batch deleting all notes in folder: "${folder}"`);
393
+
394
+ const escapedFolder = JSON.stringify(folder);
395
+ const jxaResult = await runJxa(`
396
+ const app = Application('Notes');
397
+ const folderName = ${escapedFolder};
398
+
399
+ const folders = app.folders.whose({name: folderName})();
400
+ if (folders.length === 0) {
401
+ throw new Error("Folder not found: " + folderName);
402
+ }
403
+
404
+ const folder = folders[0];
405
+ const notes = folder.notes();
406
+ let deletedCount = 0;
407
+
408
+ // Delete in reverse order to avoid index shifting
409
+ for (let i = notes.length - 1; i >= 0; i--) {
410
+ try {
411
+ notes[i].delete();
412
+ deletedCount++;
413
+ } catch (e) {
414
+ // Continue on individual failures
415
+ }
416
+ }
417
+
418
+ return JSON.stringify({ deletedCount });
419
+ `);
420
+
421
+ const { deletedCount } = JSON.parse(jxaResult as string);
422
+ result.deleted = deletedCount;
423
+ } else if (titles) {
424
+ // Delete individual notes
425
+ debug(`Batch deleting ${titles.length} notes by title`);
426
+
427
+ for (const title of titles) {
428
+ try {
429
+ await deleteNote(title);
430
+ result.deleted++;
431
+ } catch (error) {
432
+ result.failed.push(title);
433
+ debug(`Failed to delete "${title}":`, error);
434
+ }
435
+ }
436
+ }
437
+
438
+ debug(`Batch delete complete: ${result.deleted} deleted, ${result.failed.length} failed`);
439
+ return result;
440
+ }
441
+
442
+ /**
443
+ * Result of a batch move operation.
444
+ */
445
+ export interface BatchMoveResult {
446
+ /** Number of notes successfully moved */
447
+ moved: number;
448
+ /** Notes that failed to move */
449
+ failed: string[];
450
+ }
451
+
452
+ /**
453
+ * Options for batch move.
454
+ */
455
+ export interface BatchMoveOptions {
456
+ /** List of note titles (supports folder/title and id:xxx formats) */
457
+ titles?: string[];
458
+ /** Move all notes from this folder */
459
+ sourceFolder?: string;
460
+ /** Target folder (required) */
461
+ targetFolder: string;
462
+ }
463
+
464
+ /**
465
+ * Move multiple notes to a target folder.
466
+ *
467
+ * @param options - Either titles array OR sourceFolder (not both) + targetFolder
468
+ * @returns BatchMoveResult with moved count and failed notes
469
+ * @throws Error if READONLY_MODE is enabled
470
+ * @throws Error if both titles and sourceFolder provided
471
+ * @throws Error if neither titles nor sourceFolder provided
472
+ * @throws Error if targetFolder is empty
473
+ */
474
+ export async function batchMove(options: BatchMoveOptions): Promise<BatchMoveResult> {
475
+ checkReadOnly();
476
+
477
+ const { titles, sourceFolder, targetFolder } = options;
478
+
479
+ if (!targetFolder) {
480
+ throw new Error("targetFolder is required");
481
+ }
482
+
483
+ if (titles && sourceFolder) {
484
+ throw new Error("Specify either titles or sourceFolder, not both");
485
+ }
486
+
487
+ if (!titles && !sourceFolder) {
488
+ throw new Error("Specify either titles or sourceFolder");
489
+ }
490
+
491
+ const result: BatchMoveResult = { moved: 0, failed: [] };
492
+
493
+ if (sourceFolder) {
494
+ // Move all notes from source folder via single JXA call
495
+ debug(`Batch moving all notes from "${sourceFolder}" to "${targetFolder}"`);
496
+
497
+ const escapedSource = JSON.stringify(sourceFolder);
498
+ const escapedTarget = JSON.stringify(targetFolder);
499
+ const jxaResult = await runJxa(`
500
+ const app = Application('Notes');
501
+ const sourceName = ${escapedSource};
502
+ const targetName = ${escapedTarget};
503
+
504
+ const sourceFolders = app.folders.whose({name: sourceName})();
505
+ if (sourceFolders.length === 0) {
506
+ throw new Error("Source folder not found: " + sourceName);
507
+ }
508
+
509
+ const targetFolders = app.folders.whose({name: targetName})();
510
+ if (targetFolders.length === 0) {
511
+ throw new Error("Target folder not found: " + targetName);
512
+ }
513
+
514
+ const source = sourceFolders[0];
515
+ const target = targetFolders[0];
516
+ const notes = source.notes();
517
+ let movedCount = 0;
518
+
519
+ // Move in reverse order to avoid index shifting
520
+ for (let i = notes.length - 1; i >= 0; i--) {
521
+ try {
522
+ notes[i].move({to: target});
523
+ movedCount++;
524
+ } catch (e) {
525
+ // Continue on individual failures
526
+ }
527
+ }
528
+
529
+ return JSON.stringify({ movedCount });
530
+ `);
531
+
532
+ const { movedCount } = JSON.parse(jxaResult as string);
533
+ result.moved = movedCount;
534
+ } else if (titles) {
535
+ // Move individual notes
536
+ debug(`Batch moving ${titles.length} notes to "${targetFolder}"`);
537
+
538
+ for (const title of titles) {
539
+ try {
540
+ await moveNote(title, targetFolder);
541
+ result.moved++;
542
+ } catch (error) {
543
+ result.failed.push(title);
544
+ debug(`Failed to move "${title}":`, error);
545
+ }
546
+ }
547
+ }
548
+
549
+ debug(`Batch move complete: ${result.moved} moved, ${result.failed.length} failed`);
550
+ return result;
551
+ }
@@ -6,7 +6,7 @@ vi.mock("run-jxa", () => ({
6
6
  }));
7
7
 
8
8
  import { runJxa } from "run-jxa";
9
- import { getAllNotes, getNoteByTitle, getAllFolders, resolveNoteTitle } from "./read.js";
9
+ import { getAllNotes, getNoteByTitle, getAllFolders, resolveNoteTitle, listNotes } from "./read.js";
10
10
 
11
11
  describe("getAllNotes", () => {
12
12
  beforeEach(() => {
@@ -184,3 +184,164 @@ describe("getNoteByTitle with ID prefix", () => {
184
184
  expect(note).toBeNull();
185
185
  });
186
186
  });
187
+
188
+ describe("listNotes", () => {
189
+ beforeEach(() => {
190
+ vi.clearAllMocks();
191
+ });
192
+
193
+ const mockNotes = [
194
+ { title: "Alpha", folder: "Work", created: "2024-01-01T00:00:00Z", modified: "2024-01-10T00:00:00Z" },
195
+ { title: "Beta", folder: "Personal", created: "2024-01-03T00:00:00Z", modified: "2024-01-05T00:00:00Z" },
196
+ { title: "Gamma", folder: "Work", created: "2024-01-02T00:00:00Z", modified: "2024-01-15T00:00:00Z" },
197
+ ];
198
+
199
+ it("should return all notes with default sorting (modified desc)", async () => {
200
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
201
+
202
+ const notes = await listNotes();
203
+ expect(notes).toHaveLength(3);
204
+ // Most recently modified first
205
+ expect(notes[0].title).toBe("Gamma");
206
+ expect(notes[1].title).toBe("Alpha");
207
+ expect(notes[2].title).toBe("Beta");
208
+ });
209
+
210
+ it("should sort by created date ascending", async () => {
211
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
212
+
213
+ const notes = await listNotes({ sort_by: "created", order: "asc" });
214
+ expect(notes[0].title).toBe("Alpha");
215
+ expect(notes[1].title).toBe("Gamma");
216
+ expect(notes[2].title).toBe("Beta");
217
+ });
218
+
219
+ it("should sort by created date descending", async () => {
220
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
221
+
222
+ const notes = await listNotes({ sort_by: "created", order: "desc" });
223
+ expect(notes[0].title).toBe("Beta");
224
+ expect(notes[1].title).toBe("Gamma");
225
+ expect(notes[2].title).toBe("Alpha");
226
+ });
227
+
228
+ it("should sort by title alphabetically", async () => {
229
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
230
+
231
+ const notes = await listNotes({ sort_by: "title", order: "asc" });
232
+ expect(notes[0].title).toBe("Alpha");
233
+ expect(notes[1].title).toBe("Beta");
234
+ expect(notes[2].title).toBe("Gamma");
235
+ });
236
+
237
+ it("should sort by title descending", async () => {
238
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
239
+
240
+ const notes = await listNotes({ sort_by: "title", order: "desc" });
241
+ expect(notes[0].title).toBe("Gamma");
242
+ expect(notes[1].title).toBe("Beta");
243
+ expect(notes[2].title).toBe("Alpha");
244
+ });
245
+
246
+ it("should filter by folder", async () => {
247
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
248
+
249
+ const notes = await listNotes({ folder: "Work" });
250
+ expect(notes).toHaveLength(2);
251
+ expect(notes.every(n => n.folder === "Work")).toBe(true);
252
+ });
253
+
254
+ it("should apply limit", async () => {
255
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
256
+
257
+ const notes = await listNotes({ limit: 2 });
258
+ expect(notes).toHaveLength(2);
259
+ });
260
+
261
+ it("should combine folder filter and limit", async () => {
262
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
263
+
264
+ const notes = await listNotes({ folder: "Work", limit: 1 });
265
+ expect(notes).toHaveLength(1);
266
+ expect(notes[0].folder).toBe("Work");
267
+ });
268
+
269
+ it("should return empty array when folder has no notes", async () => {
270
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mockNotes));
271
+
272
+ const notes = await listNotes({ folder: "NonExistent" });
273
+ expect(notes).toHaveLength(0);
274
+ });
275
+
276
+ it("should handle empty notes array", async () => {
277
+ vi.mocked(runJxa).mockResolvedValueOnce("[]");
278
+
279
+ const notes = await listNotes();
280
+ expect(notes).toHaveLength(0);
281
+ });
282
+
283
+ it("should handle notes with empty date strings without crashing", async () => {
284
+ const notesWithEmptyDates = [
285
+ { title: "Valid", folder: "Work", created: "2024-01-01T00:00:00Z", modified: "2024-01-10T00:00:00Z" },
286
+ { title: "Empty", folder: "Work", created: "", modified: "" },
287
+ ];
288
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(notesWithEmptyDates));
289
+
290
+ const notes = await listNotes();
291
+ expect(notes).toHaveLength(2);
292
+ expect(notes[0].title).toBe("Valid");
293
+ expect(notes[1].title).toBe("Empty");
294
+ });
295
+
296
+ it("should sort notes with empty dates to the end (oldest)", async () => {
297
+ const notesWithEmptyDates = [
298
+ { title: "Empty", folder: "Work", created: "", modified: "" },
299
+ { title: "Valid", folder: "Work", created: "2024-01-01T00:00:00Z", modified: "2024-01-10T00:00:00Z" },
300
+ ];
301
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(notesWithEmptyDates));
302
+
303
+ const notes = await listNotes({ sort_by: "modified", order: "desc" });
304
+ expect(notes).toHaveLength(2);
305
+ // Valid note should come first (most recent)
306
+ expect(notes[0].title).toBe("Valid");
307
+ // Empty date should be last (treated as oldest)
308
+ expect(notes[1].title).toBe("Empty");
309
+ });
310
+
311
+ it("should handle mixing valid and empty dates correctly", async () => {
312
+ const mixedDates = [
313
+ { title: "Recent", folder: "Work", created: "2024-03-01T00:00:00Z", modified: "2024-03-15T00:00:00Z" },
314
+ { title: "Empty1", folder: "Work", created: "", modified: "" },
315
+ { title: "Old", folder: "Work", created: "2024-01-01T00:00:00Z", modified: "2024-01-10T00:00:00Z" },
316
+ { title: "Empty2", folder: "Work", created: "", modified: "" },
317
+ { title: "Middle", folder: "Work", created: "2024-02-01T00:00:00Z", modified: "2024-02-15T00:00:00Z" },
318
+ ];
319
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mixedDates));
320
+
321
+ const notes = await listNotes({ sort_by: "modified", order: "desc" });
322
+ expect(notes).toHaveLength(5);
323
+ // Order should be: Recent, Middle, Old, Empty1, Empty2
324
+ expect(notes[0].title).toBe("Recent");
325
+ expect(notes[1].title).toBe("Middle");
326
+ expect(notes[2].title).toBe("Old");
327
+ // Empty dates at the end (treated as epoch)
328
+ expect(notes[3].title).toBe("Empty1");
329
+ expect(notes[4].title).toBe("Empty2");
330
+ });
331
+
332
+ it("should sort empty dates to the beginning when sorting ascending", async () => {
333
+ const mixedDates = [
334
+ { title: "Recent", folder: "Work", created: "2024-03-01T00:00:00Z", modified: "2024-03-15T00:00:00Z" },
335
+ { title: "Empty", folder: "Work", created: "", modified: "" },
336
+ { title: "Old", folder: "Work", created: "2024-01-01T00:00:00Z", modified: "2024-01-10T00:00:00Z" },
337
+ ];
338
+ vi.mocked(runJxa).mockResolvedValueOnce(JSON.stringify(mixedDates));
339
+
340
+ const notes = await listNotes({ sort_by: "modified", order: "asc" });
341
+ expect(notes).toHaveLength(3);
342
+ // Empty date should be first (oldest when ascending)
343
+ expect(notes[0].title).toBe("Empty");
344
+ expect(notes[1].title).toBe("Old");
345
+ expect(notes[2].title).toBe("Recent");
346
+ });
347
+ });
package/src/notes/read.ts CHANGED
@@ -434,3 +434,52 @@ export async function getAllFolders(): Promise<string[]> {
434
434
  debug(`Found ${folders.length} folders`);
435
435
  return folders;
436
436
  }
437
+
438
+ // -----------------------------------------------------------------------------
439
+ // List Notes with Sorting and Filtering
440
+ // -----------------------------------------------------------------------------
441
+
442
+ // Re-export ListNotesOptions from index.ts (derived from Zod schema - single source of truth)
443
+ export type { ListNotesOptions } from "../index.js";
444
+
445
+ // Import the type for internal use
446
+ import type { ListNotesOptions } from "../index.js";
447
+
448
+ /**
449
+ * List notes with sorting and filtering.
450
+ *
451
+ * @param options - Sorting and filtering options
452
+ * @returns Array of note metadata sorted and filtered as specified
453
+ */
454
+ export async function listNotes(options: ListNotesOptions = {}): Promise<NoteInfo[]> {
455
+ const { sort_by = "modified", order = "desc", limit, folder } = options;
456
+
457
+ debug(`Listing notes: sort_by=${sort_by}, order=${order}, limit=${limit}, folder=${folder}`);
458
+
459
+ const allNotes = await getAllNotes();
460
+
461
+ // Filter by folder (case-insensitive for better UX)
462
+ const filtered = folder
463
+ ? allNotes.filter((n) => n.folder.toLowerCase() === folder.toLowerCase())
464
+ : allNotes;
465
+
466
+ filtered.sort((a, b) => {
467
+ let comparison: number;
468
+
469
+ if (sort_by === "title") {
470
+ comparison = a.title.localeCompare(b.title);
471
+ } else {
472
+ // Handle empty dates by treating them as epoch (0)
473
+ const aTime = a[sort_by] ? new Date(a[sort_by]).getTime() : 0;
474
+ const bTime = b[sort_by] ? new Date(b[sort_by]).getTime() : 0;
475
+ comparison = aTime - bTime;
476
+ }
477
+
478
+ return order === "desc" ? -comparison : comparison;
479
+ });
480
+
481
+ const result = limit ? filtered.slice(0, limit) : filtered;
482
+
483
+ debug(`Returning ${result.length} notes`);
484
+ return result;
485
+ }
@@ -205,3 +205,50 @@ export async function hasChunkIndex(): Promise<boolean> {
205
205
  return false;
206
206
  }
207
207
  }
208
+
209
+ /**
210
+ * Update chunks for specific notes (used by smart refresh).
211
+ * Deletes old chunks for these notes and creates new ones.
212
+ *
213
+ * @param notes - Notes to update chunks for
214
+ * @returns Number of chunks created
215
+ */
216
+ export async function updateChunksForNotes(notes: NoteDetails[]): Promise<number> {
217
+ if (notes.length === 0) return 0;
218
+
219
+ debug(`Updating chunks for ${notes.length} notes...`);
220
+
221
+ // Chunk all notes
222
+ const allChunks: InternalChunkRecord[] = [];
223
+ for (const note of notes) {
224
+ const noteChunks = chunkNote(note);
225
+ allChunks.push(...noteChunks);
226
+ }
227
+
228
+ if (allChunks.length === 0) {
229
+ debug("No chunks to update");
230
+ return 0;
231
+ }
232
+
233
+ // Generate embeddings
234
+ debug(`Generating embeddings for ${allChunks.length} chunks...`);
235
+ const chunkTexts = allChunks.map((chunk) => chunk.content);
236
+ const vectors = await getEmbeddingBatch(chunkTexts);
237
+
238
+ // Combine with vectors
239
+ const indexedAt = new Date().toISOString();
240
+ const completeChunks: ChunkRecord[] = allChunks.map((chunk, i) => ({
241
+ ...chunk,
242
+ vector: vectors[i],
243
+ indexed_at: indexedAt,
244
+ }));
245
+
246
+ // Delete old chunks for these notes and add new ones
247
+ const chunkStore = getChunkStore();
248
+ const noteIds = notes.map((n) => n.id);
249
+ await chunkStore.deleteChunksByNoteIds(noteIds);
250
+ await chunkStore.addChunks(completeChunks);
251
+
252
+ debug(`Updated ${completeChunks.length} chunks for ${notes.length} notes`);
253
+ return completeChunks.length;
254
+ }
@@ -0,0 +1,173 @@
1
+ import { describe, it, expect, vi, beforeEach } from "vitest";
2
+
3
+ vi.mock("../notes/read.js", () => ({
4
+ getAllNotes: vi.fn(),
5
+ }));
6
+
7
+ vi.mock("../db/lancedb.js", () => ({
8
+ getVectorStore: vi.fn(),
9
+ }));
10
+
11
+ vi.mock("./indexer.js", () => ({
12
+ incrementalIndex: vi.fn(),
13
+ }));
14
+
15
+ describe("checkForChanges", () => {
16
+ beforeEach(() => {
17
+ vi.resetModules();
18
+ vi.clearAllMocks();
19
+ });
20
+
21
+ it("should return true if notes were modified after indexing", async () => {
22
+ const { getAllNotes } = await import("../notes/read.js");
23
+ const { getVectorStore } = await import("../db/lancedb.js");
24
+
25
+ vi.mocked(getAllNotes).mockResolvedValue([
26
+ { title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-10T12:00:00Z" },
27
+ ]);
28
+
29
+ vi.mocked(getVectorStore).mockReturnValue({
30
+ getAll: vi.fn().mockResolvedValue([
31
+ { title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
32
+ ]),
33
+ } as any);
34
+
35
+ const { checkForChanges } = await import("./refresh.js");
36
+ const hasChanges = await checkForChanges();
37
+
38
+ expect(hasChanges).toBe(true);
39
+ });
40
+
41
+ it("should return false if no changes", async () => {
42
+ const { getAllNotes } = await import("../notes/read.js");
43
+ const { getVectorStore } = await import("../db/lancedb.js");
44
+
45
+ vi.mocked(getAllNotes).mockResolvedValue([
46
+ { title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
47
+ ]);
48
+
49
+ vi.mocked(getVectorStore).mockReturnValue({
50
+ getAll: vi.fn().mockResolvedValue([
51
+ { title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
52
+ ]),
53
+ } as any);
54
+
55
+ const { checkForChanges } = await import("./refresh.js");
56
+ const hasChanges = await checkForChanges();
57
+
58
+ expect(hasChanges).toBe(false);
59
+ });
60
+
61
+ it("should return true if new note added", async () => {
62
+ const { getAllNotes } = await import("../notes/read.js");
63
+ const { getVectorStore } = await import("../db/lancedb.js");
64
+
65
+ vi.mocked(getAllNotes).mockResolvedValue([
66
+ { title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
67
+ { title: "New Note", folder: "Work", created: "2026-01-10", modified: "2026-01-10T12:00:00Z" },
68
+ ]);
69
+
70
+ vi.mocked(getVectorStore).mockReturnValue({
71
+ getAll: vi.fn().mockResolvedValue([
72
+ { title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
73
+ ]),
74
+ } as any);
75
+
76
+ const { checkForChanges } = await import("./refresh.js");
77
+ const hasChanges = await checkForChanges();
78
+
79
+ expect(hasChanges).toBe(true);
80
+ });
81
+
82
+ it("should return true if note deleted", async () => {
83
+ const { getAllNotes } = await import("../notes/read.js");
84
+ const { getVectorStore } = await import("../db/lancedb.js");
85
+
86
+ vi.mocked(getAllNotes).mockResolvedValue([]);
87
+
88
+ vi.mocked(getVectorStore).mockReturnValue({
89
+ getAll: vi.fn().mockResolvedValue([
90
+ { title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
91
+ ]),
92
+ } as any);
93
+
94
+ const { checkForChanges } = await import("./refresh.js");
95
+ const hasChanges = await checkForChanges();
96
+
97
+ expect(hasChanges).toBe(true);
98
+ });
99
+
100
+ it("should return true if no index exists and notes exist", async () => {
101
+ const { getAllNotes } = await import("../notes/read.js");
102
+ const { getVectorStore } = await import("../db/lancedb.js");
103
+
104
+ vi.mocked(getAllNotes).mockResolvedValue([
105
+ { title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
106
+ ]);
107
+
108
+ vi.mocked(getVectorStore).mockReturnValue({
109
+ getAll: vi.fn().mockRejectedValue(new Error("Table not found")),
110
+ } as any);
111
+
112
+ const { checkForChanges } = await import("./refresh.js");
113
+ const hasChanges = await checkForChanges();
114
+
115
+ expect(hasChanges).toBe(true);
116
+ });
117
+ });
118
+
119
+ describe("refreshIfNeeded", () => {
120
+ beforeEach(() => {
121
+ vi.resetModules();
122
+ vi.clearAllMocks();
123
+ });
124
+
125
+ it("should trigger incremental index if changes detected", async () => {
126
+ const { incrementalIndex } = await import("./indexer.js");
127
+ const { getAllNotes } = await import("../notes/read.js");
128
+ const { getVectorStore } = await import("../db/lancedb.js");
129
+
130
+ vi.mocked(getAllNotes).mockResolvedValue([
131
+ { title: "New Note", folder: "Work", created: "2026-01-10", modified: "2026-01-10T12:00:00Z" },
132
+ ]);
133
+
134
+ vi.mocked(getVectorStore).mockReturnValue({
135
+ getAll: vi.fn().mockResolvedValue([]),
136
+ } as any);
137
+
138
+ vi.mocked(incrementalIndex).mockResolvedValue({
139
+ total: 1,
140
+ indexed: 1,
141
+ errors: 0,
142
+ timeMs: 100,
143
+ });
144
+
145
+ const { refreshIfNeeded } = await import("./refresh.js");
146
+ const refreshed = await refreshIfNeeded();
147
+
148
+ expect(refreshed).toBe(true);
149
+ expect(incrementalIndex).toHaveBeenCalled();
150
+ });
151
+
152
+ it("should not trigger index if no changes", async () => {
153
+ const { incrementalIndex } = await import("./indexer.js");
154
+ const { getAllNotes } = await import("../notes/read.js");
155
+ const { getVectorStore } = await import("../db/lancedb.js");
156
+
157
+ vi.mocked(getAllNotes).mockResolvedValue([
158
+ { title: "Note 1", folder: "Work", created: "2026-01-01", modified: "2026-01-08T12:00:00Z" },
159
+ ]);
160
+
161
+ vi.mocked(getVectorStore).mockReturnValue({
162
+ getAll: vi.fn().mockResolvedValue([
163
+ { title: "Note 1", folder: "Work", indexed_at: "2026-01-09T12:00:00Z" },
164
+ ]),
165
+ } as any);
166
+
167
+ const { refreshIfNeeded } = await import("./refresh.js");
168
+ const refreshed = await refreshIfNeeded();
169
+
170
+ expect(refreshed).toBe(false);
171
+ expect(incrementalIndex).not.toHaveBeenCalled();
172
+ });
173
+ });
@@ -0,0 +1,151 @@
1
+ /**
2
+ * Smart refresh: check for note changes before search.
3
+ * Triggers incremental index if notes have been modified.
4
+ * Also updates chunk index for changed notes.
5
+ */
6
+
7
+ import { getAllNotes, getNoteByFolderAndTitle, type NoteInfo } from "../notes/read.js";
8
+ import { getVectorStore, getChunkStore } from "../db/lancedb.js";
9
+ import { incrementalIndex } from "./indexer.js";
10
+ import { updateChunksForNotes, hasChunkIndex } from "./chunk-indexer.js";
11
+ import { createDebugLogger } from "../utils/debug.js";
12
+
13
+ const debug = createDebugLogger("REFRESH");
14
+
15
+ /**
16
+ * Detected changes in notes.
17
+ */
18
+ interface DetectedChanges {
19
+ hasChanges: boolean;
20
+ added: NoteInfo[];
21
+ modified: NoteInfo[];
22
+ deleted: string[]; // note IDs
23
+ }
24
+
25
+ /**
26
+ * Check for note changes and return details about what changed.
27
+ */
28
+ export async function detectChanges(): Promise<DetectedChanges> {
29
+ debug("Checking for changes...");
30
+
31
+ const currentNotes = await getAllNotes();
32
+ const store = getVectorStore();
33
+
34
+ let existingRecords;
35
+ try {
36
+ existingRecords = await store.getAll();
37
+ } catch {
38
+ // No index exists yet
39
+ debug("No existing index found");
40
+ return {
41
+ hasChanges: currentNotes.length > 0,
42
+ added: currentNotes,
43
+ modified: [],
44
+ deleted: [],
45
+ };
46
+ }
47
+
48
+ // Build lookup maps
49
+ const existingByKey = new Map<string, { indexed_at: string; id: string }>();
50
+ for (const record of existingRecords) {
51
+ const key = `${record.folder}/${record.title}`;
52
+ existingByKey.set(key, { indexed_at: record.indexed_at, id: record.id });
53
+ }
54
+
55
+ const added: NoteInfo[] = [];
56
+ const modified: NoteInfo[] = [];
57
+ const deleted: string[] = [];
58
+
59
+ // Check for new or modified notes
60
+ for (const note of currentNotes) {
61
+ const key = `${note.folder}/${note.title}`;
62
+ const existing = existingByKey.get(key);
63
+
64
+ if (!existing) {
65
+ debug(`New note detected: ${key}`);
66
+ added.push(note);
67
+ } else {
68
+ const noteModified = new Date(note.modified).getTime();
69
+ const recordIndexed = new Date(existing.indexed_at).getTime();
70
+
71
+ if (noteModified > recordIndexed) {
72
+ debug(`Modified note detected: ${key}`);
73
+ modified.push(note);
74
+ }
75
+ }
76
+ }
77
+
78
+ // Check for deleted notes
79
+ const currentKeys = new Set(currentNotes.map((n) => `${n.folder}/${n.title}`));
80
+ for (const [key, { id }] of existingByKey) {
81
+ if (!currentKeys.has(key)) {
82
+ debug(`Deleted note detected: ${key}`);
83
+ deleted.push(id);
84
+ }
85
+ }
86
+
87
+ const hasChanges = added.length > 0 || modified.length > 0 || deleted.length > 0;
88
+ debug(`Changes: ${added.length} added, ${modified.length} modified, ${deleted.length} deleted`);
89
+
90
+ return { hasChanges, added, modified, deleted };
91
+ }
92
+
93
+ /**
94
+ * Check if any notes have been modified since last index.
95
+ * @returns true if changes detected, false otherwise
96
+ */
97
+ export async function checkForChanges(): Promise<boolean> {
98
+ const changes = await detectChanges();
99
+ return changes.hasChanges;
100
+ }
101
+
102
+ /**
103
+ * Refresh index if changes are detected.
104
+ * Updates both main index AND chunk index.
105
+ *
106
+ * @returns true if index was refreshed, false if no changes
107
+ */
108
+ export async function refreshIfNeeded(): Promise<boolean> {
109
+ const changes = await detectChanges();
110
+
111
+ if (!changes.hasChanges) {
112
+ return false;
113
+ }
114
+
115
+ // Update main index
116
+ debug("Changes detected, running incremental index...");
117
+ const result = await incrementalIndex();
118
+ debug(`Main index refresh: ${result.indexed} notes updated in ${result.timeMs}ms`);
119
+
120
+ // Update chunk index if it exists and there are changes
121
+ const hasChunks = await hasChunkIndex();
122
+ if (hasChunks && (changes.added.length > 0 || changes.modified.length > 0)) {
123
+ debug("Updating chunk index for changed notes...");
124
+
125
+ // Fetch full content for changed notes
126
+ const changedNotes = [...changes.added, ...changes.modified];
127
+ const notesWithContent = await Promise.all(
128
+ changedNotes.map(async (n) => {
129
+ const note = await getNoteByFolderAndTitle(n.folder, n.title);
130
+ return note;
131
+ })
132
+ );
133
+
134
+ // Filter out nulls (notes that couldn't be fetched)
135
+ const validNotes = notesWithContent.filter((n) => n !== null);
136
+
137
+ if (validNotes.length > 0) {
138
+ const chunksCreated = await updateChunksForNotes(validNotes);
139
+ debug(`Chunk index refresh: ${chunksCreated} chunks for ${validNotes.length} notes`);
140
+ }
141
+
142
+ // Delete chunks for deleted notes
143
+ if (changes.deleted.length > 0) {
144
+ const chunkStore = getChunkStore();
145
+ await chunkStore.deleteChunksByNoteIds(changes.deleted);
146
+ debug(`Deleted chunks for ${changes.deleted.length} notes`);
147
+ }
148
+ }
149
+
150
+ return true;
151
+ }