context-mode 0.9.17 → 0.9.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/build/store.js CHANGED
@@ -87,6 +87,10 @@ function maxEditDistance(wordLength) {
87
87
  return 2;
88
88
  return 3;
89
89
  }
90
+ // Oversized chunks (e.g., a 50KB section between two headings) hurt BM25
91
+ // length normalization and produce unwieldy search results. Split at paragraph
92
+ // boundaries when a chunk exceeds this cap.
93
+ const MAX_CHUNK_BYTES = 4096;
90
94
  // ─────────────────────────────────────────────────────────
91
95
  // ContentStore
92
96
  // ─────────────────────────────────────────────────────────
@@ -126,6 +130,27 @@ export function cleanupStaleDBs() {
126
130
  export class ContentStore {
127
131
  #db;
128
132
  #dbPath;
133
+ // ── Cached Prepared Statements ──
134
+ // Prepared once at construction, reused on every call to avoid
135
+ // re-compiling SQL on each invocation.
136
+ // Write path
137
+ #stmtInsertSourceEmpty;
138
+ #stmtInsertSource;
139
+ #stmtInsertChunk;
140
+ #stmtInsertChunkTrigram;
141
+ #stmtInsertVocab;
142
+ // Search path (hot)
143
+ #stmtSearchPorter;
144
+ #stmtSearchPorterFiltered;
145
+ #stmtSearchTrigram;
146
+ #stmtSearchTrigramFiltered;
147
+ #stmtFuzzyVocab;
148
+ // Read path
149
+ #stmtListSources;
150
+ #stmtChunksBySource;
151
+ #stmtSourceChunkCount;
152
+ #stmtChunkContent;
153
+ #stmtStats;
129
154
  constructor(dbPath) {
130
155
  const Database = loadDatabase();
131
156
  this.#dbPath =
@@ -134,6 +159,7 @@ export class ContentStore {
134
159
  this.#db.pragma("journal_mode = WAL");
135
160
  this.#db.pragma("synchronous = NORMAL");
136
161
  this.#initSchema();
162
+ this.#prepareStatements();
137
163
  }
138
164
  /** Delete this session's DB files. Call on process exit. */
139
165
  cleanup() {
@@ -180,6 +206,88 @@ export class ContentStore {
180
206
  );
181
207
  `);
182
208
  }
209
+ #prepareStatements() {
210
+ // Write path
211
+ this.#stmtInsertSourceEmpty = this.#db.prepare("INSERT INTO sources (label, chunk_count, code_chunk_count) VALUES (?, 0, 0)");
212
+ this.#stmtInsertSource = this.#db.prepare("INSERT INTO sources (label, chunk_count, code_chunk_count) VALUES (?, ?, ?)");
213
+ this.#stmtInsertChunk = this.#db.prepare("INSERT INTO chunks (title, content, source_id, content_type) VALUES (?, ?, ?, ?)");
214
+ this.#stmtInsertChunkTrigram = this.#db.prepare("INSERT INTO chunks_trigram (title, content, source_id, content_type) VALUES (?, ?, ?, ?)");
215
+ this.#stmtInsertVocab = this.#db.prepare("INSERT OR IGNORE INTO vocabulary (word) VALUES (?)");
216
+ // Search path (hot)
217
+ this.#stmtSearchPorter = this.#db.prepare(`
218
+ SELECT
219
+ chunks.title,
220
+ chunks.content,
221
+ chunks.content_type,
222
+ sources.label,
223
+ bm25(chunks, 2.0, 1.0) AS rank,
224
+ highlight(chunks, 1, char(2), char(3)) AS highlighted
225
+ FROM chunks
226
+ JOIN sources ON sources.id = chunks.source_id
227
+ WHERE chunks MATCH ?
228
+ ORDER BY rank
229
+ LIMIT ?
230
+ `);
231
+ this.#stmtSearchPorterFiltered = this.#db.prepare(`
232
+ SELECT
233
+ chunks.title,
234
+ chunks.content,
235
+ chunks.content_type,
236
+ sources.label,
237
+ bm25(chunks, 2.0, 1.0) AS rank,
238
+ highlight(chunks, 1, char(2), char(3)) AS highlighted
239
+ FROM chunks
240
+ JOIN sources ON sources.id = chunks.source_id
241
+ WHERE chunks MATCH ? AND sources.label LIKE ?
242
+ ORDER BY rank
243
+ LIMIT ?
244
+ `);
245
+ this.#stmtSearchTrigram = this.#db.prepare(`
246
+ SELECT
247
+ chunks_trigram.title,
248
+ chunks_trigram.content,
249
+ chunks_trigram.content_type,
250
+ sources.label,
251
+ bm25(chunks_trigram, 2.0, 1.0) AS rank,
252
+ highlight(chunks_trigram, 1, char(2), char(3)) AS highlighted
253
+ FROM chunks_trigram
254
+ JOIN sources ON sources.id = chunks_trigram.source_id
255
+ WHERE chunks_trigram MATCH ?
256
+ ORDER BY rank
257
+ LIMIT ?
258
+ `);
259
+ this.#stmtSearchTrigramFiltered = this.#db.prepare(`
260
+ SELECT
261
+ chunks_trigram.title,
262
+ chunks_trigram.content,
263
+ chunks_trigram.content_type,
264
+ sources.label,
265
+ bm25(chunks_trigram, 2.0, 1.0) AS rank,
266
+ highlight(chunks_trigram, 1, char(2), char(3)) AS highlighted
267
+ FROM chunks_trigram
268
+ JOIN sources ON sources.id = chunks_trigram.source_id
269
+ WHERE chunks_trigram MATCH ? AND sources.label LIKE ?
270
+ ORDER BY rank
271
+ LIMIT ?
272
+ `);
273
+ // Fuzzy path
274
+ this.#stmtFuzzyVocab = this.#db.prepare("SELECT word FROM vocabulary WHERE length(word) BETWEEN ? AND ?");
275
+ // Read path
276
+ this.#stmtListSources = this.#db.prepare("SELECT label, chunk_count as chunkCount FROM sources ORDER BY id DESC");
277
+ this.#stmtChunksBySource = this.#db.prepare(`SELECT c.title, c.content, c.content_type, s.label
278
+ FROM chunks c
279
+ JOIN sources s ON s.id = c.source_id
280
+ WHERE c.source_id = ?
281
+ ORDER BY c.rowid`);
282
+ this.#stmtSourceChunkCount = this.#db.prepare("SELECT chunk_count FROM sources WHERE id = ?");
283
+ this.#stmtChunkContent = this.#db.prepare("SELECT content FROM chunks WHERE source_id = ?");
284
+ this.#stmtStats = this.#db.prepare(`
285
+ SELECT
286
+ (SELECT COUNT(*) FROM sources) AS sources,
287
+ (SELECT COUNT(*) FROM chunks) AS chunks,
288
+ (SELECT COUNT(*) FROM chunks WHERE content_type = 'code') AS codeChunks
289
+ `);
290
+ }
183
291
  // ── Index ──
184
292
  index(options) {
185
293
  const { content, path, source } = options;
@@ -189,38 +297,7 @@ export class ContentStore {
189
297
  const text = content ?? readFileSync(path, "utf-8");
190
298
  const label = source ?? path ?? "untitled";
191
299
  const chunks = this.#chunkMarkdown(text);
192
- if (chunks.length === 0) {
193
- const insertSource = this.#db.prepare("INSERT INTO sources (label, chunk_count, code_chunk_count) VALUES (?, 0, 0)");
194
- const info = insertSource.run(label);
195
- return {
196
- sourceId: Number(info.lastInsertRowid),
197
- label,
198
- totalChunks: 0,
199
- codeChunks: 0,
200
- };
201
- }
202
- const codeChunks = chunks.filter((c) => c.hasCode).length;
203
- const insertSource = this.#db.prepare("INSERT INTO sources (label, chunk_count, code_chunk_count) VALUES (?, ?, ?)");
204
- const insertChunk = this.#db.prepare("INSERT INTO chunks (title, content, source_id, content_type) VALUES (?, ?, ?, ?)");
205
- const insertChunkTrigram = this.#db.prepare("INSERT INTO chunks_trigram (title, content, source_id, content_type) VALUES (?, ?, ?, ?)");
206
- const transaction = this.#db.transaction(() => {
207
- const info = insertSource.run(label, chunks.length, codeChunks);
208
- const sourceId = Number(info.lastInsertRowid);
209
- for (const chunk of chunks) {
210
- const ct = chunk.hasCode ? "code" : "prose";
211
- insertChunk.run(chunk.title, chunk.content, sourceId, ct);
212
- insertChunkTrigram.run(chunk.title, chunk.content, sourceId, ct);
213
- }
214
- return sourceId;
215
- });
216
- const sourceId = transaction();
217
- this.#extractAndStoreVocabulary(text);
218
- return {
219
- sourceId,
220
- label,
221
- totalChunks: chunks.length,
222
- codeChunks,
223
- };
300
+ return this.#insertChunks(chunks, label, text);
224
301
  }
225
302
  // ── Index Plain Text ──
226
303
  /**
@@ -230,55 +307,79 @@ export class ContentStore {
230
307
  */
231
308
  indexPlainText(content, source, linesPerChunk = 20) {
232
309
  if (!content || content.trim().length === 0) {
233
- const insertSource = this.#db.prepare("INSERT INTO sources (label, chunk_count, code_chunk_count) VALUES (?, 0, 0)");
234
- const info = insertSource.run(source);
310
+ return this.#insertChunks([], source, "");
311
+ }
312
+ const chunks = this.#chunkPlainText(content, linesPerChunk);
313
+ return this.#insertChunks(chunks.map((c) => ({ ...c, hasCode: false })), source, content);
314
+ }
315
+ // ── Index JSON ──
316
+ /**
317
+ * Index JSON content by walking the object tree and using key paths
318
+ * as chunk titles (analogous to heading hierarchy in markdown). Objects
319
+ * recurse by key; arrays batch items by size.
320
+ *
321
+ * Falls back to `indexPlainText` if the content is not valid JSON.
322
+ */
323
+ indexJSON(content, source, maxChunkBytes = MAX_CHUNK_BYTES) {
324
+ if (!content || content.trim().length === 0) {
325
+ return this.indexPlainText("", source);
326
+ }
327
+ let parsed;
328
+ try {
329
+ parsed = JSON.parse(content);
330
+ }
331
+ catch {
332
+ return this.indexPlainText(content, source);
333
+ }
334
+ const chunks = [];
335
+ this.#walkJSON(parsed, [], chunks, maxChunkBytes);
336
+ if (chunks.length === 0) {
337
+ return this.indexPlainText(content, source);
338
+ }
339
+ return this.#insertChunks(chunks, source, content);
340
+ }
341
+ // ── Shared DB Insertion ──
342
+ /**
343
+ * Shared DB insertion logic for all index methods. Inserts chunks
344
+ * into both FTS5 tables within a transaction and extracts vocabulary.
345
+ * Uses cached prepared statements from #prepareStatements().
346
+ */
347
+ #insertChunks(chunks, label, text) {
348
+ if (chunks.length === 0) {
349
+ const info = this.#stmtInsertSourceEmpty.run(label);
235
350
  return {
236
351
  sourceId: Number(info.lastInsertRowid),
237
- label: source,
352
+ label,
238
353
  totalChunks: 0,
239
354
  codeChunks: 0,
240
355
  };
241
356
  }
242
- const chunks = this.#chunkPlainText(content, linesPerChunk);
243
- const insertSource = this.#db.prepare("INSERT INTO sources (label, chunk_count, code_chunk_count) VALUES (?, ?, ?)");
244
- const insertChunk = this.#db.prepare("INSERT INTO chunks (title, content, source_id, content_type) VALUES (?, ?, ?, ?)");
245
- const insertChunkTrigram = this.#db.prepare("INSERT INTO chunks_trigram (title, content, source_id, content_type) VALUES (?, ?, ?, ?)");
357
+ const codeChunks = chunks.filter((c) => c.hasCode).length;
246
358
  const transaction = this.#db.transaction(() => {
247
- const info = insertSource.run(source, chunks.length, 0);
359
+ const info = this.#stmtInsertSource.run(label, chunks.length, codeChunks);
248
360
  const sourceId = Number(info.lastInsertRowid);
249
361
  for (const chunk of chunks) {
250
- insertChunk.run(chunk.title, chunk.content, sourceId, "prose");
251
- insertChunkTrigram.run(chunk.title, chunk.content, sourceId, "prose");
362
+ const ct = chunk.hasCode ? "code" : "prose";
363
+ this.#stmtInsertChunk.run(chunk.title, chunk.content, sourceId, ct);
364
+ this.#stmtInsertChunkTrigram.run(chunk.title, chunk.content, sourceId, ct);
252
365
  }
253
366
  return sourceId;
254
367
  });
255
368
  const sourceId = transaction();
256
- this.#extractAndStoreVocabulary(content);
369
+ this.#extractAndStoreVocabulary(text);
257
370
  return {
258
371
  sourceId,
259
- label: source,
372
+ label,
260
373
  totalChunks: chunks.length,
261
- codeChunks: 0,
374
+ codeChunks,
262
375
  };
263
376
  }
264
377
  // ── Search ──
265
378
  search(query, limit = 3, source) {
266
379
  const sanitized = sanitizeQuery(query);
267
- const sourceFilter = source ? "AND sources.label LIKE ?" : "";
268
- const stmt = this.#db.prepare(`
269
- SELECT
270
- chunks.title,
271
- chunks.content,
272
- chunks.content_type,
273
- sources.label,
274
- bm25(chunks, 2.0, 1.0) AS rank,
275
- highlight(chunks, 1, char(2), char(3)) AS highlighted
276
- FROM chunks
277
- JOIN sources ON sources.id = chunks.source_id
278
- WHERE chunks MATCH ? ${sourceFilter}
279
- ORDER BY rank
280
- LIMIT ?
281
- `);
380
+ const stmt = source
381
+ ? this.#stmtSearchPorterFiltered
382
+ : this.#stmtSearchPorter;
282
383
  const params = source
283
384
  ? [sanitized, `%${source}%`, limit]
284
385
  : [sanitized, limit];
@@ -297,21 +398,9 @@ export class ContentStore {
297
398
  const sanitized = sanitizeTrigramQuery(query);
298
399
  if (!sanitized)
299
400
  return [];
300
- const sourceFilter = source ? "AND sources.label LIKE ?" : "";
301
- const stmt = this.#db.prepare(`
302
- SELECT
303
- chunks_trigram.title,
304
- chunks_trigram.content,
305
- chunks_trigram.content_type,
306
- sources.label,
307
- bm25(chunks_trigram, 2.0, 1.0) AS rank,
308
- highlight(chunks_trigram, 1, char(2), char(3)) AS highlighted
309
- FROM chunks_trigram
310
- JOIN sources ON sources.id = chunks_trigram.source_id
311
- WHERE chunks_trigram MATCH ? ${sourceFilter}
312
- ORDER BY rank
313
- LIMIT ?
314
- `);
401
+ const stmt = source
402
+ ? this.#stmtSearchTrigramFiltered
403
+ : this.#stmtSearchTrigram;
315
404
  const params = source
316
405
  ? [sanitized, `%${source}%`, limit]
317
406
  : [sanitized, limit];
@@ -331,9 +420,7 @@ export class ContentStore {
331
420
  if (word.length < 3)
332
421
  return null;
333
422
  const maxDist = maxEditDistance(word.length);
334
- const candidates = this.#db
335
- .prepare("SELECT word FROM vocabulary WHERE length(word) BETWEEN ? AND ?")
336
- .all(word.length - maxDist, word.length + maxDist);
423
+ const candidates = this.#stmtFuzzyVocab.all(word.length - maxDist, word.length + maxDist);
337
424
  let bestWord = null;
338
425
  let bestDist = maxDist + 1;
339
426
  for (const { word: candidate } of candidates) {
@@ -393,22 +480,14 @@ export class ContentStore {
393
480
  }
394
481
  // ── Sources ──
395
482
  listSources() {
396
- return this.#db
397
- .prepare("SELECT label, chunk_count as chunkCount FROM sources ORDER BY id DESC")
398
- .all();
483
+ return this.#stmtListSources.all();
399
484
  }
400
485
  /**
401
486
  * Get all chunks for a given source by ID — bypasses FTS5 MATCH entirely.
402
487
  * Use this for inventory/listing where you need all sections, not search.
403
488
  */
404
489
  getChunksBySource(sourceId) {
405
- const rows = this.#db
406
- .prepare(`SELECT c.title, c.content, c.content_type, s.label
407
- FROM chunks c
408
- JOIN sources s ON s.id = c.source_id
409
- WHERE c.source_id = ?
410
- ORDER BY c.rowid`)
411
- .all(sourceId);
490
+ const rows = this.#stmtChunksBySource.all(sourceId);
412
491
  return rows.map((r) => ({
413
492
  title: r.title,
414
493
  content: r.content,
@@ -419,19 +498,16 @@ export class ContentStore {
419
498
  }
420
499
  // ── Vocabulary ──
421
500
  getDistinctiveTerms(sourceId, maxTerms = 40) {
422
- const stats = this.#db
423
- .prepare("SELECT chunk_count FROM sources WHERE id = ?")
424
- .get(sourceId);
501
+ const stats = this.#stmtSourceChunkCount.get(sourceId);
425
502
  if (!stats || stats.chunk_count < 3)
426
503
  return [];
427
504
  const totalChunks = stats.chunk_count;
428
505
  const minAppearances = 2;
429
506
  const maxAppearances = Math.max(3, Math.ceil(totalChunks * 0.4));
430
507
  // Stream chunks one at a time to avoid loading all content into memory
431
- const stmt = this.#db.prepare("SELECT content FROM chunks WHERE source_id = ?");
432
508
  // Count document frequency (how many sections contain each word)
433
509
  const docFreq = new Map();
434
- for (const row of stmt.iterate(sourceId)) {
510
+ for (const row of this.#stmtChunkContent.iterate(sourceId)) {
435
511
  const words = new Set(row.content
436
512
  .toLowerCase()
437
513
  .split(/[^\p{L}\p{N}_-]+/u)
@@ -458,14 +534,12 @@ export class ContentStore {
458
534
  }
459
535
  // ── Stats ──
460
536
  getStats() {
461
- const sources = this.#db.prepare("SELECT COUNT(*) as c FROM sources").get()?.c ?? 0;
462
- const chunks = this.#db
463
- .prepare("SELECT COUNT(*) as c FROM chunks")
464
- .get()?.c ?? 0;
465
- const codeChunks = this.#db
466
- .prepare("SELECT COUNT(*) as c FROM chunks WHERE content_type = 'code'")
467
- .get()?.c ?? 0;
468
- return { sources, chunks, codeChunks };
537
+ const row = this.#stmtStats.get();
538
+ return {
539
+ sources: row?.sources ?? 0,
540
+ chunks: row?.chunks ?? 0,
541
+ codeChunks: row?.codeChunks ?? 0,
542
+ };
469
543
  }
470
544
  // ── Cleanup ──
471
545
  close() {
@@ -478,15 +552,14 @@ export class ContentStore {
478
552
  .split(/[^\p{L}\p{N}_-]+/u)
479
553
  .filter((w) => w.length >= 3 && !STOPWORDS.has(w));
480
554
  const unique = [...new Set(words)];
481
- const insert = this.#db.prepare("INSERT OR IGNORE INTO vocabulary (word) VALUES (?)");
482
555
  this.#db.transaction(() => {
483
556
  for (const word of unique) {
484
- insert.run(word);
557
+ this.#stmtInsertVocab.run(word);
485
558
  }
486
559
  })();
487
560
  }
488
561
  // ── Chunking ──
489
- #chunkMarkdown(text) {
562
+ #chunkMarkdown(text, maxChunkBytes = MAX_CHUNK_BYTES) {
490
563
  const chunks = [];
491
564
  const lines = text.split("\n");
492
565
  const headingStack = [];
@@ -496,11 +569,43 @@ export class ContentStore {
496
569
  const joined = currentContent.join("\n").trim();
497
570
  if (joined.length === 0)
498
571
  return;
499
- chunks.push({
500
- title: this.#buildTitle(headingStack, currentHeading),
501
- content: joined,
502
- hasCode: currentContent.some((l) => /^`{3,}/.test(l)),
503
- });
572
+ const title = this.#buildTitle(headingStack, currentHeading);
573
+ const hasCode = currentContent.some((l) => /^`{3,}/.test(l));
574
+ // If under the cap, emit as-is (fast path — most chunks hit this)
575
+ if (Buffer.byteLength(joined) <= maxChunkBytes) {
576
+ chunks.push({ title, content: joined, hasCode });
577
+ currentContent = [];
578
+ return;
579
+ }
580
+ // Split oversized chunk at paragraph boundaries (double newlines)
581
+ const paragraphs = joined.split(/\n\n+/);
582
+ let accumulator = [];
583
+ let partIndex = 1;
584
+ const flushAccumulator = () => {
585
+ if (accumulator.length === 0)
586
+ return;
587
+ const part = accumulator.join("\n\n").trim();
588
+ if (part.length === 0)
589
+ return;
590
+ const partTitle = paragraphs.length > 1 ? `${title} (${partIndex})` : title;
591
+ partIndex++;
592
+ chunks.push({
593
+ title: partTitle,
594
+ content: part,
595
+ hasCode: part.includes("```"),
596
+ });
597
+ accumulator = [];
598
+ };
599
+ for (const para of paragraphs) {
600
+ accumulator.push(para);
601
+ const candidate = accumulator.join("\n\n");
602
+ if (Buffer.byteLength(candidate) > maxChunkBytes && accumulator.length > 1) {
603
+ accumulator.pop();
604
+ flushAccumulator();
605
+ accumulator = [para];
606
+ }
607
+ }
608
+ flushAccumulator();
504
609
  currentContent = [];
505
610
  };
506
611
  let i = 0;
@@ -594,6 +699,108 @@ export class ContentStore {
594
699
  }
595
700
  return chunks;
596
701
  }
702
+ #walkJSON(value, path, chunks, maxChunkBytes) {
703
+ const title = path.length > 0 ? path.join(" > ") : "(root)";
704
+ const serialized = JSON.stringify(value, null, 2);
705
+ // Small enough — emit as a single chunk
706
+ if (Buffer.byteLength(serialized) <= maxChunkBytes) {
707
+ // Exception: objects with nested structure (object/array values) always
708
+ // recurse so that key paths become chunk titles for searchability —
709
+ // even when the subtree fits in one chunk. Flat objects (all primitive
710
+ // values) stay as a single chunk since there's no hierarchy to expose.
711
+ const shouldRecurse = typeof value === "object" &&
712
+ value !== null &&
713
+ !Array.isArray(value) &&
714
+ Object.values(value).some((v) => typeof v === "object" && v !== null);
715
+ if (!shouldRecurse) {
716
+ chunks.push({ title, content: serialized, hasCode: true });
717
+ return;
718
+ }
719
+ }
720
+ // Object — recurse into each key
721
+ if (typeof value === "object" && value !== null && !Array.isArray(value)) {
722
+ const entries = Object.entries(value);
723
+ if (entries.length > 0) {
724
+ for (const [key, val] of entries) {
725
+ this.#walkJSON(val, [...path, key], chunks, maxChunkBytes);
726
+ }
727
+ return;
728
+ }
729
+ // Empty object — emit as-is
730
+ chunks.push({ title, content: serialized, hasCode: true });
731
+ return;
732
+ }
733
+ // Array — batch by size with identity-field-aware titles
734
+ if (Array.isArray(value)) {
735
+ this.#chunkJSONArray(value, path, chunks, maxChunkBytes);
736
+ return;
737
+ }
738
+ // Primitive that exceeds maxChunkBytes (e.g., very long string)
739
+ chunks.push({ title, content: serialized, hasCode: false });
740
+ }
741
+ /**
742
+ * Scan the first element of an array of objects for a recognizable
743
+ * identity field. Returns the field name or null.
744
+ */
745
+ #findIdentityField(arr) {
746
+ if (arr.length === 0)
747
+ return null;
748
+ const first = arr[0];
749
+ if (typeof first !== "object" || first === null || Array.isArray(first))
750
+ return null;
751
+ const candidates = ["id", "name", "title", "path", "slug", "key", "label"];
752
+ const obj = first;
753
+ for (const field of candidates) {
754
+ if (field in obj && (typeof obj[field] === "string" || typeof obj[field] === "number")) {
755
+ return field;
756
+ }
757
+ }
758
+ return null;
759
+ }
760
+ #jsonBatchTitle(prefix, startIdx, endIdx, batch, identityField) {
761
+ const sep = prefix ? `${prefix} > ` : "";
762
+ if (!identityField) {
763
+ return startIdx === endIdx
764
+ ? `${sep}[${startIdx}]`
765
+ : `${sep}[${startIdx}-${endIdx}]`;
766
+ }
767
+ const getId = (item) => String(item[identityField]);
768
+ if (batch.length === 1) {
769
+ return `${sep}${getId(batch[0])}`;
770
+ }
771
+ if (batch.length <= 3) {
772
+ return sep + batch.map(getId).join(", ");
773
+ }
774
+ return `${sep}${getId(batch[0])}\u2026${getId(batch[batch.length - 1])}`;
775
+ }
776
+ #chunkJSONArray(arr, path, chunks, maxChunkBytes) {
777
+ const prefix = path.length > 0 ? path.join(" > ") : "(root)";
778
+ const identityField = this.#findIdentityField(arr);
779
+ let batch = [];
780
+ let batchStart = 0;
781
+ const flushBatch = (batchEnd) => {
782
+ if (batch.length === 0)
783
+ return;
784
+ const title = this.#jsonBatchTitle(prefix, batchStart, batchEnd, batch, identityField);
785
+ chunks.push({
786
+ title,
787
+ content: JSON.stringify(batch, null, 2),
788
+ hasCode: true,
789
+ });
790
+ };
791
+ for (let i = 0; i < arr.length; i++) {
792
+ batch.push(arr[i]);
793
+ const candidate = JSON.stringify(batch, null, 2);
794
+ if (Buffer.byteLength(candidate) > maxChunkBytes && batch.length > 1) {
795
+ batch.pop();
796
+ flushBatch(i - 1);
797
+ batch = [arr[i]];
798
+ batchStart = i;
799
+ }
800
+ }
801
+ // Flush remaining
802
+ flushBatch(batchStart + batch.length - 1);
803
+ }
597
804
  #buildTitle(headingStack, currentHeading) {
598
805
  if (headingStack.length === 0) {
599
806
  return currentHeading || "Untitled";
package/hooks/hooks.json CHANGED
@@ -46,6 +46,44 @@
46
46
  "command": "node ${CLAUDE_PLUGIN_ROOT}/hooks/pretooluse.mjs"
47
47
  }
48
48
  ]
49
+ },
50
+ {
51
+ "matcher": "mcp__plugin_context-mode_context-mode__execute",
52
+ "hooks": [
53
+ {
54
+ "type": "command",
55
+ "command": "node ${CLAUDE_PLUGIN_ROOT}/hooks/pretooluse.mjs"
56
+ }
57
+ ]
58
+ },
59
+ {
60
+ "matcher": "mcp__plugin_context-mode_context-mode__execute_file",
61
+ "hooks": [
62
+ {
63
+ "type": "command",
64
+ "command": "node ${CLAUDE_PLUGIN_ROOT}/hooks/pretooluse.mjs"
65
+ }
66
+ ]
67
+ },
68
+ {
69
+ "matcher": "mcp__plugin_context-mode_context-mode__batch_execute",
70
+ "hooks": [
71
+ {
72
+ "type": "command",
73
+ "command": "node ${CLAUDE_PLUGIN_ROOT}/hooks/pretooluse.mjs"
74
+ }
75
+ ]
76
+ }
77
+ ],
78
+ "SessionStart": [
79
+ {
80
+ "matcher": "",
81
+ "hooks": [
82
+ {
83
+ "type": "command",
84
+ "command": "node ${CLAUDE_PLUGIN_ROOT}/hooks/sessionstart.mjs"
85
+ }
86
+ ]
49
87
  }
50
88
  ]
51
89
  }