@disco_trooper/apple-notes-mcp 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +136 -24
  2. package/package.json +13 -9
  3. package/src/config/claude.test.ts +47 -0
  4. package/src/config/claude.ts +106 -0
  5. package/src/config/constants.ts +11 -2
  6. package/src/config/paths.test.ts +40 -0
  7. package/src/config/paths.ts +86 -0
  8. package/src/db/arrow-fix.test.ts +101 -0
  9. package/src/db/lancedb.test.ts +209 -2
  10. package/src/db/lancedb.ts +373 -7
  11. package/src/embeddings/cache.test.ts +150 -0
  12. package/src/embeddings/cache.ts +204 -0
  13. package/src/embeddings/index.ts +21 -2
  14. package/src/embeddings/local.ts +61 -10
  15. package/src/embeddings/openrouter.ts +233 -11
  16. package/src/graph/export.test.ts +81 -0
  17. package/src/graph/export.ts +163 -0
  18. package/src/graph/extract.test.ts +90 -0
  19. package/src/graph/extract.ts +52 -0
  20. package/src/graph/queries.test.ts +156 -0
  21. package/src/graph/queries.ts +224 -0
  22. package/src/index.ts +376 -10
  23. package/src/notes/crud.test.ts +148 -3
  24. package/src/notes/crud.ts +250 -5
  25. package/src/notes/read.ts +83 -68
  26. package/src/search/chunk-indexer.test.ts +353 -0
  27. package/src/search/chunk-indexer.ts +254 -0
  28. package/src/search/chunk-search.test.ts +327 -0
  29. package/src/search/chunk-search.ts +298 -0
  30. package/src/search/indexer.ts +151 -109
  31. package/src/search/refresh.test.ts +173 -0
  32. package/src/search/refresh.ts +151 -0
  33. package/src/setup.ts +46 -67
  34. package/src/utils/chunker.test.ts +182 -0
  35. package/src/utils/chunker.ts +170 -0
  36. package/src/utils/content-filter.test.ts +225 -0
  37. package/src/utils/content-filter.ts +275 -0
  38. package/src/utils/runtime.test.ts +70 -0
  39. package/src/utils/runtime.ts +40 -0
@@ -0,0 +1,101 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
+ import * as fs from "fs";
3
+ import * as os from "os";
4
+ import * as path from "path";
5
+ import { LanceDBStore } from "./lancedb.js";
6
+
7
+ describe("Arrow type inference fix", () => {
8
+ let tempDir: string;
9
+ let store: LanceDBStore;
10
+
11
+ beforeEach(() => {
12
+ tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "arrow-test-"));
13
+ store = new LanceDBStore(tempDir);
14
+ });
15
+
16
+ afterEach(() => {
17
+ fs.rmSync(tempDir, { recursive: true, force: true });
18
+ });
19
+
20
+ it("handles records where ALL tags and outlinks are empty", async () => {
21
+ // This is the exact scenario that triggers the Arrow bug
22
+ const records = [
23
+ {
24
+ id: "1",
25
+ title: "Note 1",
26
+ content: "Hello world",
27
+ vector: new Array(384).fill(0.1),
28
+ folder: "Notes",
29
+ created: new Date().toISOString(),
30
+ modified: new Date().toISOString(),
31
+ indexed_at: new Date().toISOString(),
32
+ tags: [], // Empty!
33
+ outlinks: [], // Empty!
34
+ },
35
+ {
36
+ id: "2",
37
+ title: "Note 2",
38
+ content: "Test content",
39
+ vector: new Array(384).fill(0.2),
40
+ folder: "Notes",
41
+ created: new Date().toISOString(),
42
+ modified: new Date().toISOString(),
43
+ indexed_at: new Date().toISOString(),
44
+ tags: [], // Empty!
45
+ outlinks: [], // Empty!
46
+ },
47
+ ];
48
+
49
+ // This should NOT throw "Cannot infer list vector from empty array"
50
+ await store.index(records);
51
+
52
+ // Verify data was stored
53
+ const count = await store.count();
54
+ expect(count).toBe(2);
55
+
56
+ // Verify tags are empty (placeholders removed)
57
+ const all = await store.getAll();
58
+ expect(all[0].tags).toEqual([]);
59
+ expect(all[0].outlinks).toEqual([]);
60
+ });
61
+
62
+ it("handles mixed empty and non-empty arrays", async () => {
63
+ const records = [
64
+ {
65
+ id: "1",
66
+ title: "Note with tags",
67
+ content: "Hello",
68
+ vector: new Array(384).fill(0.1),
69
+ folder: "Notes",
70
+ created: new Date().toISOString(),
71
+ modified: new Date().toISOString(),
72
+ indexed_at: new Date().toISOString(),
73
+ tags: ["tag1", "tag2"],
74
+ outlinks: [],
75
+ },
76
+ {
77
+ id: "2",
78
+ title: "Note without tags",
79
+ content: "World",
80
+ vector: new Array(384).fill(0.2),
81
+ folder: "Notes",
82
+ created: new Date().toISOString(),
83
+ modified: new Date().toISOString(),
84
+ indexed_at: new Date().toISOString(),
85
+ tags: [],
86
+ outlinks: ["Note 1"],
87
+ },
88
+ ];
89
+
90
+ await store.index(records);
91
+
92
+ const all = await store.getAll();
93
+ const note1 = all.find(n => n.id === "1");
94
+ const note2 = all.find(n => n.id === "2");
95
+
96
+ expect(note1?.tags).toEqual(["tag1", "tag2"]);
97
+ expect(note1?.outlinks).toEqual([]);
98
+ expect(note2?.tags).toEqual([]);
99
+ expect(note2?.outlinks).toEqual(["Note 1"]);
100
+ });
101
+ });
@@ -1,6 +1,6 @@
1
1
  import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
- import { LanceDBStore } from "./lancedb.js";
3
- import type { NoteRecord } from "./lancedb.js";
2
+ import { LanceDBStore, ChunkStore } from "./lancedb.js";
3
+ import type { NoteRecord, ChunkRecord } from "./lancedb.js";
4
4
  import * as fs from "node:fs";
5
5
  import * as path from "node:path";
6
6
 
@@ -28,6 +28,9 @@ describe("LanceDBStore", () => {
28
28
  created: new Date().toISOString(),
29
29
  indexed_at: new Date().toISOString(),
30
30
  vector: Array(384).fill(0.1),
31
+ // LanceDB requires at least one element to infer the list type
32
+ tags: ["test-tag"],
33
+ outlinks: ["test-link"],
31
34
  });
32
35
 
33
36
  describe("index and getByTitle", () => {
@@ -184,3 +187,207 @@ describe("LanceDBStore", () => {
184
187
  });
185
188
  });
186
189
  });
190
+
191
+ describe("ChunkStore", () => {
192
+ let chunkStore: ChunkStore;
193
+ let testDbPath: string;
194
+
195
+ beforeEach(() => {
196
+ testDbPath = path.join("/tmp", `lancedb-chunk-test-${Date.now()}`);
197
+ chunkStore = new ChunkStore(testDbPath);
198
+ });
199
+
200
+ afterEach(() => {
201
+ if (fs.existsSync(testDbPath)) {
202
+ fs.rmSync(testDbPath, { recursive: true, force: true });
203
+ }
204
+ });
205
+
206
+ const createTestChunk = (
207
+ noteId: string,
208
+ chunkIndex: number,
209
+ totalChunks: number,
210
+ content?: string
211
+ ): ChunkRecord => ({
212
+ chunk_id: `${noteId}_chunk_${chunkIndex}`,
213
+ note_id: noteId,
214
+ note_title: `Note ${noteId}`,
215
+ folder: "Test",
216
+ chunk_index: chunkIndex,
217
+ total_chunks: totalChunks,
218
+ content: content ?? `Chunk ${chunkIndex} content for note ${noteId}`,
219
+ vector: Array(384).fill(0.1),
220
+ created: new Date().toISOString(),
221
+ modified: new Date().toISOString(),
222
+ indexed_at: new Date().toISOString(),
223
+ tags: ["test-tag"],
224
+ outlinks: ["test-link"],
225
+ });
226
+
227
+ describe("indexChunks", () => {
228
+ it("indexes chunks and allows retrieval", async () => {
229
+ const chunks = [
230
+ createTestChunk("note-1", 0, 2),
231
+ createTestChunk("note-1", 1, 2),
232
+ createTestChunk("note-2", 0, 1),
233
+ ];
234
+
235
+ await chunkStore.indexChunks(chunks);
236
+ const count = await chunkStore.count();
237
+
238
+ expect(count).toBe(3);
239
+ });
240
+
241
+ it("handles empty chunks array", async () => {
242
+ await chunkStore.indexChunks([]);
243
+ const count = await chunkStore.count();
244
+ expect(count).toBe(0);
245
+ });
246
+
247
+ it("handles chunks with empty tags and outlinks", async () => {
248
+ const chunks = [
249
+ { ...createTestChunk("note-1", 0, 1), tags: [], outlinks: [] },
250
+ ];
251
+
252
+ await chunkStore.indexChunks(chunks);
253
+ const count = await chunkStore.count();
254
+ expect(count).toBe(1);
255
+ });
256
+ });
257
+
258
+ describe("searchChunks", () => {
259
+ it("returns results based on vector similarity", async () => {
260
+ const chunks = [
261
+ createTestChunk("note-1", 0, 2),
262
+ createTestChunk("note-1", 1, 2),
263
+ ];
264
+ await chunkStore.indexChunks(chunks);
265
+
266
+ const queryVector = Array(384).fill(0.1);
267
+ const results = await chunkStore.searchChunks(queryVector, 2);
268
+
269
+ expect(results).toHaveLength(2);
270
+ expect(results[0]).toHaveProperty("chunk_id");
271
+ expect(results[0]).toHaveProperty("note_id");
272
+ expect(results[0]).toHaveProperty("score");
273
+ });
274
+ });
275
+
276
+ describe("searchChunksFTS", () => {
277
+ it("returns results matching query text", async () => {
278
+ const chunks = [
279
+ createTestChunk("note-1", 0, 1, "Meeting notes about project planning"),
280
+ createTestChunk("note-2", 0, 1, "Shopping list for groceries"),
281
+ ];
282
+ await chunkStore.indexChunks(chunks);
283
+ await chunkStore.rebuildFtsIndex();
284
+
285
+ const results = await chunkStore.searchChunksFTS("Meeting", 10);
286
+ expect(results.length).toBeGreaterThanOrEqual(1);
287
+ expect(results[0].content).toContain("Meeting");
288
+ });
289
+
290
+ it("returns empty array for no matches", async () => {
291
+ const chunks = [createTestChunk("note-1", 0, 1)];
292
+ await chunkStore.indexChunks(chunks);
293
+ await chunkStore.rebuildFtsIndex();
294
+
295
+ const results = await chunkStore.searchChunksFTS("nonexistentquery12345", 10);
296
+ expect(results).toHaveLength(0);
297
+ });
298
+ });
299
+
300
+ describe("getChunksByNoteId", () => {
301
+ it("returns all chunks for a note sorted by chunk_index", async () => {
302
+ const chunks = [
303
+ createTestChunk("note-1", 2, 3),
304
+ createTestChunk("note-1", 0, 3),
305
+ createTestChunk("note-1", 1, 3),
306
+ createTestChunk("note-2", 0, 1),
307
+ ];
308
+ await chunkStore.indexChunks(chunks);
309
+
310
+ const noteChunks = await chunkStore.getChunksByNoteId("note-1");
311
+
312
+ expect(noteChunks).toHaveLength(3);
313
+ expect(noteChunks[0].chunk_index).toBe(0);
314
+ expect(noteChunks[1].chunk_index).toBe(1);
315
+ expect(noteChunks[2].chunk_index).toBe(2);
316
+ });
317
+
318
+ it("returns empty array for non-existent note", async () => {
319
+ await chunkStore.indexChunks([createTestChunk("note-1", 0, 1)]);
320
+
321
+ const chunks = await chunkStore.getChunksByNoteId("non-existent");
322
+ expect(chunks).toHaveLength(0);
323
+ });
324
+ });
325
+
326
+ describe("deleteNoteChunks", () => {
327
+ it("deletes all chunks for a note", async () => {
328
+ const chunks = [
329
+ createTestChunk("note-1", 0, 2),
330
+ createTestChunk("note-1", 1, 2),
331
+ createTestChunk("note-2", 0, 1),
332
+ ];
333
+ await chunkStore.indexChunks(chunks);
334
+
335
+ await chunkStore.deleteNoteChunks("note-1");
336
+
337
+ const remaining = await chunkStore.count();
338
+ expect(remaining).toBe(1);
339
+
340
+ const note1Chunks = await chunkStore.getChunksByNoteId("note-1");
341
+ expect(note1Chunks).toHaveLength(0);
342
+
343
+ const note2Chunks = await chunkStore.getChunksByNoteId("note-2");
344
+ expect(note2Chunks).toHaveLength(1);
345
+ });
346
+
347
+ it("does not throw when deleting non-existent note chunks", async () => {
348
+ await chunkStore.indexChunks([createTestChunk("note-1", 0, 1)]);
349
+ await expect(chunkStore.deleteNoteChunks("non-existent")).resolves.not.toThrow();
350
+ });
351
+ });
352
+
353
+ describe("count", () => {
354
+ it("returns correct count", async () => {
355
+ const chunks = [
356
+ createTestChunk("note-1", 0, 2),
357
+ createTestChunk("note-1", 1, 2),
358
+ createTestChunk("note-2", 0, 1),
359
+ ];
360
+ await chunkStore.indexChunks(chunks);
361
+
362
+ expect(await chunkStore.count()).toBe(3);
363
+ });
364
+
365
+ it("returns 0 for empty store", async () => {
366
+ expect(await chunkStore.count()).toBe(0);
367
+ });
368
+ });
369
+
370
+ describe("clear", () => {
371
+ it("removes all chunks", async () => {
372
+ await chunkStore.indexChunks([
373
+ createTestChunk("note-1", 0, 1),
374
+ createTestChunk("note-2", 0, 1),
375
+ ]);
376
+ expect(await chunkStore.count()).toBe(2);
377
+
378
+ await chunkStore.clear();
379
+ expect(await chunkStore.count()).toBe(0);
380
+ });
381
+ });
382
+
383
+ describe("rebuildFtsIndex", () => {
384
+ it("rebuilds FTS index without error", async () => {
385
+ await chunkStore.indexChunks([
386
+ createTestChunk("note-1", 0, 1),
387
+ createTestChunk("note-2", 0, 1),
388
+ ]);
389
+
390
+ await expect(chunkStore.rebuildFtsIndex()).resolves.not.toThrow();
391
+ });
392
+ });
393
+ });