@fs/mycroft 0.1.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/README.md +28 -2
  2. package/completions/mycroft.bash +22 -1
  3. package/completions/mycroft.fish +12 -0
  4. package/completions/mycroft.zsh +25 -1
  5. package/dist/batch-embedder-6IIWAZPW.js +14 -0
  6. package/dist/batch-embedder-6IIWAZPW.js.map +1 -0
  7. package/dist/batch-embedder-7DGZAQKL.js +14 -0
  8. package/dist/batch-embedder-7DGZAQKL.js.map +1 -0
  9. package/dist/batch-embedder-IZDBS3IL.js +13 -0
  10. package/dist/batch-embedder-IZDBS3IL.js.map +1 -0
  11. package/dist/batch-embedder-LYCZDYI4.js +15 -0
  12. package/dist/batch-embedder-LYCZDYI4.js.map +1 -0
  13. package/dist/batch-embedder-RHKD2OJD.js +14 -0
  14. package/dist/batch-embedder-RHKD2OJD.js.map +1 -0
  15. package/dist/batch-embedder-VQZUI7R6.js +14 -0
  16. package/dist/batch-embedder-VQZUI7R6.js.map +1 -0
  17. package/dist/batch-embedder-ZJZLNLOK.js +14 -0
  18. package/dist/batch-embedder-ZJZLNLOK.js.map +1 -0
  19. package/dist/batch-summarizer-7MCT4HJB.js +14 -0
  20. package/dist/batch-summarizer-7MCT4HJB.js.map +1 -0
  21. package/dist/batch-summarizer-BMIBVFAE.js +14 -0
  22. package/dist/batch-summarizer-BMIBVFAE.js.map +1 -0
  23. package/dist/chunk-35EO53CC.js +8058 -0
  24. package/dist/chunk-35EO53CC.js.map +1 -0
  25. package/dist/chunk-57ZGGKEF.js +8060 -0
  26. package/dist/chunk-57ZGGKEF.js.map +1 -0
  27. package/dist/chunk-6DLQHHCC.js +249 -0
  28. package/dist/chunk-6DLQHHCC.js.map +1 -0
  29. package/dist/chunk-7CO4PMU5.js +92 -0
  30. package/dist/chunk-7CO4PMU5.js.map +1 -0
  31. package/dist/chunk-7DUQNGEK.js +253 -0
  32. package/dist/chunk-7DUQNGEK.js.map +1 -0
  33. package/dist/chunk-7IPX4MKA.js +4637 -0
  34. package/dist/chunk-7IPX4MKA.js.map +1 -0
  35. package/dist/chunk-7NLMBXXY.js +6438 -0
  36. package/dist/chunk-7NLMBXXY.js.map +1 -0
  37. package/dist/chunk-BR2PM6D3.js +11047 -0
  38. package/dist/chunk-BR2PM6D3.js.map +1 -0
  39. package/dist/chunk-KGG7WEYE.js +162 -0
  40. package/dist/chunk-KGG7WEYE.js.map +1 -0
  41. package/dist/chunk-QRDUQX63.js +256 -0
  42. package/dist/chunk-QRDUQX63.js.map +1 -0
  43. package/dist/chunk-R3FOJK5A.js +2088 -0
  44. package/dist/chunk-R3FOJK5A.js.map +1 -0
  45. package/dist/chunk-XXO66RCF.js +94 -0
  46. package/dist/chunk-XXO66RCF.js.map +1 -0
  47. package/dist/cli.js +1050 -201
  48. package/dist/cli.js.map +1 -1
  49. package/dist/fileFromPath-FLANAQWT.js +128 -0
  50. package/dist/fileFromPath-FLANAQWT.js.map +1 -0
  51. package/dist/main-36PRDAPE.js +1857 -0
  52. package/dist/main-36PRDAPE.js.map +1 -0
  53. package/dist/main-B3QJZGLU.js +1859 -0
  54. package/dist/main-B3QJZGLU.js.map +1 -0
  55. package/package.json +7 -1
package/dist/cli.js CHANGED
@@ -1,154 +1,43 @@
1
1
  #!/usr/bin/env node
2
+ import {
3
+ submitBatchEmbeddings
4
+ } from "./chunk-XXO66RCF.js";
5
+ import {
6
+ submitBatchSummaries
7
+ } from "./chunk-7DUQNGEK.js";
8
+ import {
9
+ CHUNK_OVERLAP,
10
+ CHUNK_SIZE,
11
+ SEPARATORS,
12
+ SUMMARY_CONCURRENCY,
13
+ SUMMARY_MAX_TOKENS,
14
+ SUMMARY_TARGET_WORDS,
15
+ configPath,
16
+ ensureConfigDirs,
17
+ ensureDataDirs,
18
+ getModels,
19
+ handleSigint,
20
+ isAskEnabled,
21
+ isInteractive,
22
+ loadConfig,
23
+ logInfo,
24
+ logWarn,
25
+ printError,
26
+ requireOpenAIKey,
27
+ resolvePaths,
28
+ setConfigOverrides,
29
+ stdout
30
+ } from "./chunk-KGG7WEYE.js";
2
31
 
3
32
  // src/cli.ts
4
33
  import { Command } from "commander";
5
-
6
- // src/config.ts
7
- import { mkdir, readFile } from "fs/promises";
8
- import { homedir } from "os";
9
- import { dirname, join, resolve } from "path";
10
- var DEFAULT_CONFIG = {
11
- dataDir: "~/.local/share/mycroft",
12
- askEnabled: true,
13
- models: {
14
- embedding: "text-embedding-3-small",
15
- summary: "gpt-5-nano",
16
- chat: "gpt-5.1"
17
- }
18
- };
19
- var expandHome = (input) => {
20
- if (!input.startsWith("~")) return input;
21
- return join(homedir(), input.slice(1));
22
- };
23
- var resolvePath = (input) => resolve(expandHome(input));
24
- var getConfigPath = () => {
25
- const override = process.env.MYCROFT_CONFIG;
26
- if (override) return resolvePath(override);
27
- return resolvePath("~/.config/mycroft/config.json");
28
- };
29
- var normalizeModels = (models) => ({
30
- embedding: models?.embedding || DEFAULT_CONFIG.models.embedding,
31
- summary: models?.summary || DEFAULT_CONFIG.models.summary,
32
- chat: models?.chat || DEFAULT_CONFIG.models.chat
33
- });
34
- var overrides = {};
35
- var setConfigOverrides = (next) => {
36
- overrides = { ...overrides, ...next };
37
- };
38
- var normalizeConfig = (input) => {
39
- const dataDirEnv = process.env.MYCROFT_DATA_DIR;
40
- const dataDir = overrides.dataDir || dataDirEnv || input?.dataDir || DEFAULT_CONFIG.dataDir;
41
- return {
42
- dataDir,
43
- askEnabled: input?.askEnabled ?? DEFAULT_CONFIG.askEnabled,
44
- models: normalizeModels(input?.models)
45
- };
46
- };
47
- var readConfigFile = async (path) => {
48
- try {
49
- const contents = await readFile(path, "utf-8");
50
- return JSON.parse(contents);
51
- } catch {
52
- return null;
53
- }
54
- };
55
- var loadConfig = async () => {
56
- const configPath2 = getConfigPath();
57
- const data = await readConfigFile(configPath2);
58
- const normalized = normalizeConfig(data);
59
- return {
60
- ...normalized,
61
- dataDir: resolvePath(normalized.dataDir)
62
- };
63
- };
64
- var ensureConfigDirs = async (configPath2) => {
65
- const path = configPath2 || getConfigPath();
66
- await mkdir(dirname(path), { recursive: true });
67
- };
68
- var configPath = () => getConfigPath();
69
-
70
- // src/commands/io.ts
71
- import chalk from "chalk";
72
- var isTTY = () => Boolean(process.stdout.isTTY);
73
- var isInteractive = () => Boolean(process.stdin.isTTY && process.stdout.isTTY);
74
- var formatError = (text) => isTTY() ? chalk.red(text) : text;
75
- var formatWarn = (text) => isTTY() ? chalk.yellow(text) : text;
76
- var stdout = (message) => {
77
- process.stdout.write(message.endsWith("\n") ? message : `${message}
78
- `);
79
- };
80
- var stderr = (message) => {
81
- process.stderr.write(message.endsWith("\n") ? message : `${message}
82
- `);
83
- };
84
- var printError = (message) => {
85
- stderr(formatError(`Error: ${message}`));
86
- };
87
- var logInfo = (message) => {
88
- stderr(message);
89
- };
90
- var logWarn = (message) => {
91
- stderr(formatWarn(message));
92
- };
93
- var handleSigint = (onCancel) => {
94
- const handler = () => {
95
- if (onCancel) onCancel();
96
- stderr("\nCancelled.");
97
- process.exit(130);
98
- };
99
- process.once("SIGINT", handler);
100
- return () => process.off("SIGINT", handler);
101
- };
102
-
103
- // src/cli.ts
104
34
  import { readFile as readFile2 } from "fs/promises";
105
- import { dirname as dirname2, resolve as resolve2 } from "path";
35
+ import { dirname, resolve } from "path";
106
36
  import { fileURLToPath } from "url";
107
37
 
108
38
  // src/services/epub-parser.ts
109
39
  import { initEpubFile } from "@lingo-reader/epub-parser";
110
40
  import { basename } from "path";
111
-
112
- // src/services/constants.ts
113
- import { mkdir as mkdir2 } from "fs/promises";
114
- var CHUNK_SIZE = 1e3;
115
- var CHUNK_OVERLAP = 100;
116
- var SEPARATORS = ["\n\n", "\n", ". ", " ", ""];
117
- var SUMMARY_MAX_TOKENS = 3e4;
118
- var SUMMARY_CONCURRENCY = 3;
119
- var SUMMARY_TARGET_WORDS = 250;
120
- var resolvePaths = async () => {
121
- const config = await loadConfig();
122
- const dataDir = config.dataDir;
123
- return {
124
- dataDir,
125
- booksDir: `${dataDir}/books`,
126
- vectorsDir: `${dataDir}/vectors`,
127
- dbPath: `${dataDir}/metadata.db`
128
- };
129
- };
130
- var ensureDataDirs = async () => {
131
- const paths = await resolvePaths();
132
- await mkdir2(paths.dataDir, { recursive: true });
133
- await mkdir2(paths.booksDir, { recursive: true });
134
- await mkdir2(paths.vectorsDir, { recursive: true });
135
- return paths;
136
- };
137
- var getModels = async () => {
138
- const config = await loadConfig();
139
- return config.models;
140
- };
141
- var isAskEnabled = async () => {
142
- const config = await loadConfig();
143
- return config.askEnabled;
144
- };
145
- var requireOpenAIKey = () => {
146
- if (!process.env.OPENAI_API_KEY) {
147
- throw new Error("OPENAI_API_KEY is not set. Export it to use embeddings and chat.");
148
- }
149
- };
150
-
151
- // src/services/epub-parser.ts
152
41
  var detectNarrativeBoundaries = (chapterTitles) => {
153
42
  const frontMatterPattern = /^(about|contents|table of contents|dedication|preface|foreword|title|half.?title|copyright|epigraph|frontispiece|map)/i;
154
43
  const backMatterPattern = /^(acknowledgment|afterword|appendix|glossary|index|bibliography|about the author|also by|praise|copyright page|notes|bonus|preview|excerpt|major characters|locations)/i;
@@ -264,7 +153,7 @@ var parseEpub = async (epubPath, resourceSaveDir) => {
264
153
 
265
154
  // src/services/ingest.ts
266
155
  import { randomUUID } from "crypto";
267
- import { mkdir as mkdir3, unlink, copyFile } from "fs/promises";
156
+ import { mkdir, unlink, copyFile, readFile, writeFile } from "fs/promises";
268
157
 
269
158
  // src/services/chunker.ts
270
159
  var splitRecursive = (text, separators) => {
@@ -338,7 +227,7 @@ import { embedMany } from "ai";
338
227
  import { openai } from "@ai-sdk/openai";
339
228
  var MAX_TOKENS_PER_BATCH = 25e4;
340
229
  var CHARS_PER_TOKEN = 4;
341
- var embedChunks = async (chunks) => {
230
+ var embedChunks = async (chunks, options) => {
342
231
  if (chunks.length === 0) return [];
343
232
  const batches = [];
344
233
  let currentBatch = [];
@@ -367,10 +256,23 @@ var embedChunks = async (chunks) => {
367
256
  model: openai.embeddingModel(models.embedding),
368
257
  values: batch.map((chunk) => chunk.content)
369
258
  });
259
+ const embeddedBatch = [];
370
260
  for (let j = 0; j < batch.length; j++) {
371
- allEmbedded.push({
261
+ const embeddedChunk = {
372
262
  ...batch[j],
373
263
  vector: embeddings[j] ?? []
264
+ };
265
+ embeddedBatch.push(embeddedChunk);
266
+ allEmbedded.push({
267
+ ...embeddedChunk
268
+ });
269
+ }
270
+ if (options?.onBatch) {
271
+ await options.onBatch(embeddedBatch, {
272
+ batchIndex: i + 1,
273
+ batchCount: batches.length,
274
+ completed: allEmbedded.length,
275
+ total: chunks.length
374
276
  });
375
277
  }
376
278
  }
@@ -485,8 +387,7 @@ var summarizeSection = async (text, title, sectionNum) => {
485
387
  model: openai2(models.summary),
486
388
  prompt: `Summarize this section from chapter "${title}" (Part ${sectionNum}). Focus on key events, characters, and revelations. Keep it concise (100-150 words):
487
389
 
488
- ${text}`,
489
- temperature: 0.3
390
+ ${text}`
490
391
  });
491
392
  return summary;
492
393
  };
@@ -495,8 +396,7 @@ var generateStructuredSummary = async (content, title, chapterIndex) => {
495
396
  const models = await getModels();
496
397
  const { text } = await generateText({
497
398
  model: openai2(models.summary),
498
- prompt: SUMMARY_PROMPT(title, chapterIndex + 1, content),
499
- temperature: 0.3
399
+ prompt: SUMMARY_PROMPT(title, chapterIndex + 1, content)
500
400
  });
501
401
  let jsonText = text.trim();
502
402
  if (jsonText.startsWith("```json")) {
@@ -550,7 +450,9 @@ var summarizeChapter = async (chapter, chapterIndex) => {
550
450
  };
551
451
  var summarizeAllChapters = async (chapters) => {
552
452
  const summaries = [];
553
- logInfo(`[Summarizer] Starting summarization of ${chapters.length} chapters (concurrency: ${SUMMARY_CONCURRENCY})`);
453
+ logInfo(
454
+ `[Summarizer] Starting summarization of ${chapters.length} chapters (concurrency: ${SUMMARY_CONCURRENCY})`
455
+ );
554
456
  for (let i = 0; i < chapters.length; i += SUMMARY_CONCURRENCY) {
555
457
  const batch = chapters.slice(i, i + SUMMARY_CONCURRENCY);
556
458
  const batchPromises = batch.map((chapter, batchIndex) => summarizeChapter(chapter, i + batchIndex));
@@ -566,6 +468,9 @@ var summarizeAllChapters = async (chapters) => {
566
468
  return summaries;
567
469
  };
568
470
 
471
+ // src/db/queries.ts
472
+ import "better-sqlite3";
473
+
569
474
  // src/db/schema.ts
570
475
  import Database from "better-sqlite3";
571
476
  var resolveDbPath = async () => {
@@ -588,6 +493,27 @@ var createDb = async () => {
588
493
  progress_chapter INTEGER
589
494
  );
590
495
  `);
496
+ db.exec(`
497
+ CREATE TABLE IF NOT EXISTS chat_sessions (
498
+ id TEXT PRIMARY KEY,
499
+ book_id TEXT NOT NULL,
500
+ title TEXT,
501
+ summary TEXT,
502
+ created_at INTEGER DEFAULT (strftime('%s','now')),
503
+ updated_at INTEGER DEFAULT (strftime('%s','now'))
504
+ );
505
+ `);
506
+ db.exec(`
507
+ CREATE TABLE IF NOT EXISTS chat_messages (
508
+ id TEXT PRIMARY KEY,
509
+ session_id TEXT NOT NULL,
510
+ role TEXT NOT NULL,
511
+ content TEXT NOT NULL,
512
+ token_count INTEGER,
513
+ created_at INTEGER DEFAULT (strftime('%s','now'))
514
+ );
515
+ `);
516
+ db.exec("CREATE INDEX IF NOT EXISTS chat_messages_session_idx ON chat_messages(session_id, created_at)");
591
517
  const columns = db.prepare("PRAGMA table_info(books)").all().map((col) => col.name);
592
518
  const ensureColumn = (name, definition) => {
593
519
  if (!columns.includes(name)) {
@@ -599,6 +525,14 @@ var createDb = async () => {
599
525
  ensureColumn("summaries", "summaries TEXT");
600
526
  ensureColumn("narrative_start_index", "narrative_start_index INTEGER DEFAULT 0");
601
527
  ensureColumn("narrative_end_index", "narrative_end_index INTEGER");
528
+ ensureColumn("batch_id", "batch_id TEXT");
529
+ ensureColumn("batch_file_id", "batch_file_id TEXT");
530
+ ensureColumn("batch_chunks", "batch_chunks TEXT");
531
+ ensureColumn("ingest_state", "ingest_state TEXT");
532
+ ensureColumn("ingest_resume_path", "ingest_resume_path TEXT");
533
+ ensureColumn("summary_batch_id", "summary_batch_id TEXT");
534
+ ensureColumn("summary_batch_file_id", "summary_batch_file_id TEXT");
535
+ ensureColumn("summary_batch_chapters", "summary_batch_chapters TEXT");
602
536
  return db;
603
537
  };
604
538
 
@@ -615,7 +549,13 @@ var mapRow = (row) => ({
615
549
  chapters: row.chapters ? JSON.parse(row.chapters) : [],
616
550
  progressChapter: row.progress_chapter ?? null,
617
551
  narrativeStartIndex: row.narrative_start_index ?? null,
618
- narrativeEndIndex: row.narrative_end_index ?? null
552
+ narrativeEndIndex: row.narrative_end_index ?? null,
553
+ batchId: row.batch_id ?? null,
554
+ batchFileId: row.batch_file_id ?? null,
555
+ ingestState: row.ingest_state ?? null,
556
+ ingestResumePath: row.ingest_resume_path ?? null,
557
+ summaryBatchId: row.summary_batch_id ?? null,
558
+ summaryBatchFileId: row.summary_batch_file_id ?? null
619
559
  });
620
560
  var dbPromise = null;
621
561
  var getDb = async () => {
@@ -691,6 +631,38 @@ var updateBook = async (id, updates) => {
691
631
  fields.push("narrative_end_index = @narrativeEndIndex");
692
632
  params.narrativeEndIndex = updates.narrativeEndIndex;
693
633
  }
634
+ if (updates.batchId !== void 0) {
635
+ fields.push("batch_id = @batchId");
636
+ params.batchId = updates.batchId;
637
+ }
638
+ if (updates.batchFileId !== void 0) {
639
+ fields.push("batch_file_id = @batchFileId");
640
+ params.batchFileId = updates.batchFileId;
641
+ }
642
+ if (updates.batchChunks !== void 0) {
643
+ fields.push("batch_chunks = @batchChunks");
644
+ params.batchChunks = updates.batchChunks;
645
+ }
646
+ if (updates.ingestState !== void 0) {
647
+ fields.push("ingest_state = @ingestState");
648
+ params.ingestState = updates.ingestState;
649
+ }
650
+ if (updates.ingestResumePath !== void 0) {
651
+ fields.push("ingest_resume_path = @ingestResumePath");
652
+ params.ingestResumePath = updates.ingestResumePath;
653
+ }
654
+ if (updates.summaryBatchId !== void 0) {
655
+ fields.push("summary_batch_id = @summaryBatchId");
656
+ params.summaryBatchId = updates.summaryBatchId;
657
+ }
658
+ if (updates.summaryBatchFileId !== void 0) {
659
+ fields.push("summary_batch_file_id = @summaryBatchFileId");
660
+ params.summaryBatchFileId = updates.summaryBatchFileId;
661
+ }
662
+ if (updates.summaryBatchChapters !== void 0) {
663
+ fields.push("summary_batch_chapters = @summaryBatchChapters");
664
+ params.summaryBatchChapters = updates.summaryBatchChapters;
665
+ }
694
666
  if (fields.length === 0) return;
695
667
  const db = await getDb();
696
668
  db.prepare(`UPDATE books SET ${fields.join(", ")} WHERE id = @id`).run(params);
@@ -705,12 +677,135 @@ var getBook = async (id) => {
705
677
  const row = db.prepare("SELECT * FROM books WHERE id = ?").get(id);
706
678
  return row ? mapRow(row) : null;
707
679
  };
680
+ var getBookBatchChunks = async (id) => {
681
+ const db = await getDb();
682
+ const row = db.prepare("SELECT batch_chunks FROM books WHERE id = ?").get(id);
683
+ return row?.batch_chunks ?? null;
684
+ };
685
+ var getBookSummaryBatchChapters = async (id) => {
686
+ const db = await getDb();
687
+ const row = db.prepare("SELECT summary_batch_chapters FROM books WHERE id = ?").get(id);
688
+ return row?.summary_batch_chapters ?? null;
689
+ };
708
690
  var deleteBook = async (id) => {
709
691
  const db = await getDb();
692
+ db.prepare("DELETE FROM chat_messages WHERE session_id IN (SELECT id FROM chat_sessions WHERE book_id = ?)").run(id);
693
+ db.prepare("DELETE FROM chat_sessions WHERE book_id = ?").run(id);
710
694
  db.prepare("DELETE FROM books WHERE id = ?").run(id);
711
695
  };
696
+ var mapSession = (row) => ({
697
+ id: row.id,
698
+ bookId: row.book_id,
699
+ title: row.title ?? null,
700
+ summary: row.summary ?? null,
701
+ createdAt: row.created_at ?? 0,
702
+ updatedAt: row.updated_at ?? 0
703
+ });
704
+ var mapSessionSummary = (row) => ({
705
+ ...mapSession(row),
706
+ bookTitle: row.book_title ?? null
707
+ });
708
+ var mapMessage = (row) => ({
709
+ id: row.id,
710
+ sessionId: row.session_id,
711
+ role: row.role,
712
+ content: row.content,
713
+ tokenCount: row.token_count ?? null,
714
+ createdAt: row.created_at ?? 0
715
+ });
716
+ var insertChatSession = async (session) => {
717
+ const db = await getDb();
718
+ db.prepare(
719
+ "INSERT INTO chat_sessions (id, book_id, title, summary, created_at, updated_at) VALUES (@id, @bookId, @title, @summary, @createdAt, @updatedAt)"
720
+ ).run({
721
+ id: session.id,
722
+ bookId: session.bookId,
723
+ title: session.title ?? null,
724
+ summary: session.summary ?? null,
725
+ createdAt: session.createdAt ?? Date.now(),
726
+ updatedAt: session.updatedAt ?? Date.now()
727
+ });
728
+ return session.id;
729
+ };
730
+ var updateChatSession = async (id, updates) => {
731
+ const fields = [];
732
+ const params = { id };
733
+ if (updates.title !== void 0) {
734
+ fields.push("title = @title");
735
+ params.title = updates.title;
736
+ }
737
+ if (updates.summary !== void 0) {
738
+ fields.push("summary = @summary");
739
+ params.summary = updates.summary;
740
+ }
741
+ if (updates.updatedAt !== void 0) {
742
+ fields.push("updated_at = @updatedAt");
743
+ params.updatedAt = updates.updatedAt;
744
+ }
745
+ if (fields.length === 0) return;
746
+ const db = await getDb();
747
+ db.prepare(`UPDATE chat_sessions SET ${fields.join(", ")} WHERE id = @id`).run(params);
748
+ };
749
+ var getChatSession = async (id) => {
750
+ const db = await getDb();
751
+ const row = db.prepare("SELECT * FROM chat_sessions WHERE id = ?").get(id);
752
+ return row ? mapSession(row) : null;
753
+ };
754
+ var listChatSessions = async () => {
755
+ const db = await getDb();
756
+ const rows = db.prepare(
757
+ "SELECT chat_sessions.*, books.title as book_title FROM chat_sessions LEFT JOIN books ON books.id = chat_sessions.book_id ORDER BY chat_sessions.updated_at DESC"
758
+ ).all();
759
+ return rows.map(mapSessionSummary);
760
+ };
761
+ var insertChatMessage = async (message) => {
762
+ const db = await getDb();
763
+ db.prepare(
764
+ "INSERT INTO chat_messages (id, session_id, role, content, token_count, created_at) VALUES (@id, @sessionId, @role, @content, @tokenCount, @createdAt)"
765
+ ).run({
766
+ id: message.id,
767
+ sessionId: message.sessionId,
768
+ role: message.role,
769
+ content: message.content,
770
+ tokenCount: message.tokenCount ?? null,
771
+ createdAt: message.createdAt ?? Date.now()
772
+ });
773
+ return message.id;
774
+ };
775
+ var getChatMessages = async (sessionId, limit) => {
776
+ const db = await getDb();
777
+ const rows = limit !== void 0 ? db.prepare("SELECT * FROM chat_messages WHERE session_id = ? ORDER BY created_at DESC LIMIT ?").all(sessionId, limit) : db.prepare("SELECT * FROM chat_messages WHERE session_id = ? ORDER BY created_at ASC").all(sessionId);
778
+ const mapped = rows.map(mapMessage);
779
+ return limit !== void 0 ? mapped.reverse() : mapped;
780
+ };
712
781
 
713
782
  // src/services/ingest.ts
783
+ var resumePathForBook = async (bookId) => {
784
+ const paths = await ensureDataDirs();
785
+ return `${paths.ingestDir}/${bookId}.json`;
786
+ };
787
+ var loadResumeState = async (bookId, resumePath) => {
788
+ const raw = await readFile(resumePath, "utf-8");
789
+ const parsed = JSON.parse(raw);
790
+ if (!Array.isArray(parsed.chunks) || typeof parsed.resumeIndex !== "number") {
791
+ throw new Error(`Invalid resume state for book ${bookId}. Re-ingest to start over.`);
792
+ }
793
+ return parsed;
794
+ };
795
+ var persistResumeState = async (bookId, state) => {
796
+ const resumePath = await resumePathForBook(bookId);
797
+ await writeFile(resumePath, JSON.stringify(state));
798
+ await updateBook(bookId, {
799
+ ingestState: "pending",
800
+ ingestResumePath: resumePath
801
+ });
802
+ return resumePath;
803
+ };
804
+ var finalizeResumeState = async (bookId, resumePath) => {
805
+ const path = resumePath || await resumePathForBook(bookId);
806
+ await unlink(path).catch(() => void 0);
807
+ await updateBook(bookId, { ingestState: null, ingestResumePath: null });
808
+ };
714
809
  var formatDuration = (ms) => {
715
810
  const seconds = Math.round(ms / 100) / 10;
716
811
  return `${seconds}s`;
@@ -720,8 +815,9 @@ var ingestEpub = async (filePath, selectedChapterIndices, options) => {
720
815
  const paths = await ensureDataDirs();
721
816
  const fileName = `${bookId}.epub`;
722
817
  const bookPath = `${paths.booksDir}/${fileName}`;
818
+ let resumePath = null;
723
819
  logInfo(`[Ingest] Starting ingestion for book ${bookId}`);
724
- await mkdir3(paths.booksDir, { recursive: true });
820
+ await mkdir(paths.booksDir, { recursive: true });
725
821
  await copyFile(filePath, bookPath);
726
822
  logInfo(`[Ingest] EPUB file saved to ${bookPath}`);
727
823
  const parseStart = Date.now();
@@ -747,7 +843,7 @@ var ingestEpub = async (filePath, selectedChapterIndices, options) => {
747
843
  );
748
844
  logInfo(`[Ingest] Processing ${chaptersToProcess.length} selected chapters (indices: ${selectedIndices.join(", ")})`);
749
845
  let adjustedSummaries = [];
750
- if (options?.summarize !== false) {
846
+ if (options?.summarize !== false && !options?.batch) {
751
847
  logInfo(`[Ingest] Generating summaries for ${chaptersToProcess.length} chapters...`);
752
848
  const summarizeStart = Date.now();
753
849
  const summaries = await summarizeAllChapters(chaptersToProcess);
@@ -775,23 +871,251 @@ var ingestEpub = async (filePath, selectedChapterIndices, options) => {
775
871
  );
776
872
  const chunks = chunkChapters(bookId, chunksToProcess).filter((chunk) => chunk.content.length > 0);
777
873
  logInfo(`[Ingest] Created ${chunks.length} chunks from selected chapters`);
778
- const allChunks = [...chunks, ...adjustedSummaries];
779
- const embedStart = Date.now();
780
- const embedded = await embedChunks(allChunks);
781
- logInfo(`[Ingest] Embedded ${embedded.length} total chunks (${formatDuration(Date.now() - embedStart)})`);
782
- await addChunksToIndex(bookId, embedded);
783
- logInfo(`[Ingest] Added chunks to vector index`);
784
- await updateBook(bookId, { chunkCount: embedded.length, indexedAt: Date.now() });
785
- logInfo(`[Ingest] Updated book record with chunk count: ${embedded.length}`);
874
+ if (options?.batch) {
875
+ if (options?.summarize !== false) {
876
+ logInfo(`[Ingest] Submitting ${chaptersToProcess.length} chapters for batch summarization`);
877
+ const { batchId: summaryBatchId, inputFileId: summaryFileId, metadata } = await submitBatchSummaries(chaptersToProcess);
878
+ await updateBook(bookId, {
879
+ summaryBatchId,
880
+ summaryBatchFileId: summaryFileId,
881
+ summaryBatchChapters: JSON.stringify({ chapters: chaptersToProcess, metadata, selectedIndices, textChunks: chunks })
882
+ });
883
+ logInfo(`[Ingest] Summary batch submitted (${summaryBatchId}). Use "mycroft book ingest status ${bookId.slice(0, 8)}" or "mycroft book ingest resume ${bookId.slice(0, 8)}".`);
884
+ } else {
885
+ logInfo(`[Ingest] Submitting ${chunks.length} chunks to OpenAI Batch API`);
886
+ const { batchId, inputFileId } = await submitBatchEmbeddings(chunks);
887
+ await updateBook(bookId, {
888
+ batchId,
889
+ batchFileId: inputFileId,
890
+ batchChunks: JSON.stringify(chunks)
891
+ });
892
+ logInfo(`[Ingest] Batch submitted (${batchId}). Use "mycroft book ingest status ${bookId.slice(0, 8)}" or "mycroft book ingest resume ${bookId.slice(0, 8)}".`);
893
+ }
894
+ } else {
895
+ const allChunks = [...chunks, ...adjustedSummaries];
896
+ const embedStart = Date.now();
897
+ resumePath = await persistResumeState(bookId, { chunks: allChunks, resumeIndex: 0 });
898
+ const embedded = await embedChunks(allChunks, {
899
+ onBatch: async (embeddedBatch, progress) => {
900
+ await addChunksToIndex(bookId, embeddedBatch);
901
+ await updateBook(bookId, { chunkCount: progress.completed });
902
+ if (!resumePath) return;
903
+ await writeFile(
904
+ resumePath,
905
+ JSON.stringify({ chunks: allChunks, resumeIndex: progress.completed })
906
+ );
907
+ }
908
+ });
909
+ logInfo(`[Ingest] Embedded ${embedded.length} total chunks (${formatDuration(Date.now() - embedStart)})`);
910
+ await updateBook(bookId, { chunkCount: embedded.length, indexedAt: Date.now() });
911
+ logInfo(`[Ingest] Updated book record with chunk count: ${embedded.length}`);
912
+ await finalizeResumeState(bookId, resumePath);
913
+ }
786
914
  } catch (error) {
787
915
  logWarn(`[Ingest] Error during chunking/embedding: ${error instanceof Error ? error.message : String(error)}`);
788
- await deleteBookIndex(bookId);
789
- await unlink(bookPath).catch(() => void 0);
790
- await deleteBook(bookId).catch(() => void 0);
916
+ if (resumePath) {
917
+ logWarn(`[Ingest] Partial progress saved. Use "mycroft book ingest status ${bookId.slice(0, 8)}" or "mycroft book ingest resume ${bookId.slice(0, 8)}".`);
918
+ return { id: bookId, status: "interrupted" };
919
+ } else {
920
+ await deleteBookIndex(bookId);
921
+ await unlink(bookPath).catch(() => void 0);
922
+ await deleteBook(bookId).catch(() => void 0);
923
+ }
791
924
  throw error;
792
925
  }
793
926
  logInfo(`[Ingest] Ingestion complete for ${bookId}`);
794
- return { id: bookId };
927
+ return { id: bookId, status: "completed" };
928
+ };
929
+ var resumeIngest = async (bookId, storedChunks, batchId, batchFileId) => {
930
+ const { checkBatchStatus, downloadBatchResults, cleanupBatchFiles } = await import("./batch-embedder-ZJZLNLOK.js");
931
+ logInfo(`[Resume] Checking embedding batch ${batchId} for book ${bookId}`);
932
+ const status = await checkBatchStatus(batchId);
933
+ logInfo(`[Resume] Batch status: ${status.status} (completed: ${status.completed}/${status.total})`);
934
+ if (["validating", "in_progress", "finalizing"].includes(status.status)) {
935
+ return { status: status.status, completed: status.completed, total: status.total };
936
+ }
937
+ if (status.status === "failed" || status.status === "expired" || status.status === "cancelled") {
938
+ logWarn(`[Resume] Batch ${batchId} ended with status "${status.status}". Re-submitting...`);
939
+ await cleanupBatchFiles(batchFileId, status.outputFileId);
940
+ const { submitBatchEmbeddings: submitBatchEmbeddings2 } = await import("./batch-embedder-ZJZLNLOK.js");
941
+ const { batchId: newBatchId, inputFileId: newFileId } = await submitBatchEmbeddings2(storedChunks);
942
+ await updateBook(bookId, { batchId: newBatchId, batchFileId: newFileId });
943
+ logInfo(`[Resume] New batch submitted (${newBatchId}). Run resume again later.`);
944
+ return { status: "resubmitted", batchId: newBatchId };
945
+ }
946
+ if (status.status !== "completed") {
947
+ throw new Error(`Unexpected batch status: ${status.status}`);
948
+ }
949
+ if (!status.outputFileId) {
950
+ logWarn(`[Resume] Batch ${batchId} completed but produced no output (${status.failed}/${status.total} failed). Re-submitting...`);
951
+ await cleanupBatchFiles(batchFileId, null);
952
+ const { submitBatchEmbeddings: submitBatchEmbeddings2 } = await import("./batch-embedder-ZJZLNLOK.js");
953
+ const { batchId: newBatchId, inputFileId: newFileId } = await submitBatchEmbeddings2(storedChunks);
954
+ await updateBook(bookId, { batchId: newBatchId, batchFileId: newFileId });
955
+ logInfo(`[Resume] New batch submitted (${newBatchId}). Run resume again later.`);
956
+ return { status: "resubmitted", batchId: newBatchId };
957
+ }
958
+ const embedded = await downloadBatchResults(status.outputFileId, storedChunks);
959
+ await addChunksToIndex(bookId, embedded);
960
+ logInfo(`[Resume] Added ${embedded.length} chunks to vector index`);
961
+ await updateBook(bookId, {
962
+ chunkCount: embedded.length,
963
+ indexedAt: Date.now(),
964
+ batchId: null,
965
+ batchFileId: null,
966
+ batchChunks: null
967
+ });
968
+ logInfo(`[Resume] Book ${bookId} indexing complete`);
969
+ await cleanupBatchFiles(batchFileId, status.outputFileId);
970
+ return { status: "completed" };
971
+ };
972
+ var resumeSummaryBatch = async (bookId, summaryBatchId, summaryBatchFileId, storedData) => {
973
+ const { checkBatchStatus, cleanupBatchFiles } = await import("./batch-embedder-ZJZLNLOK.js");
974
+ const { downloadBatchSummaryResults, submitMergePass, downloadMergeResults } = await import("./batch-summarizer-BMIBVFAE.js");
975
+ logInfo(`[Resume] Checking summary batch ${summaryBatchId} for book ${bookId}`);
976
+ const status = await checkBatchStatus(summaryBatchId);
977
+ logInfo(`[Resume] Summary batch status: ${status.status} (completed: ${status.completed}/${status.total})`);
978
+ if (["validating", "in_progress", "finalizing"].includes(status.status)) {
979
+ return { status: status.status, completed: status.completed, total: status.total, phase: "summary" };
980
+ }
981
+ if (status.status === "failed" || status.status === "expired" || status.status === "cancelled") {
982
+ logWarn(`[Resume] Summary batch ${summaryBatchId} ended with status "${status.status}". Re-submitting...`);
983
+ await cleanupBatchFiles(summaryBatchFileId, status.outputFileId);
984
+ const { submitBatchSummaries: submitBatchSummaries2 } = await import("./batch-summarizer-BMIBVFAE.js");
985
+ const { batchId: newBatchId, inputFileId: newFileId, metadata: newMetadata } = await submitBatchSummaries2(storedData.chapters);
986
+ await updateBook(bookId, {
987
+ summaryBatchId: newBatchId,
988
+ summaryBatchFileId: newFileId,
989
+ summaryBatchChapters: JSON.stringify({ ...storedData, metadata: newMetadata })
990
+ });
991
+ logInfo(`[Resume] New summary batch submitted (${newBatchId}).`);
992
+ return { status: "resubmitted", batchId: newBatchId, phase: "summary" };
993
+ }
994
+ if (status.status !== "completed") {
995
+ throw new Error(`Unexpected summary batch status: ${status.status}`);
996
+ }
997
+ if (!status.outputFileId) {
998
+ logWarn(`[Resume] Summary batch ${summaryBatchId} completed but produced no output (${status.failed}/${status.total} failed). Re-submitting...`);
999
+ await cleanupBatchFiles(summaryBatchFileId, null);
1000
+ const { submitBatchSummaries: submitBatchSummaries2 } = await import("./batch-summarizer-BMIBVFAE.js");
1001
+ const { batchId: newBatchId, inputFileId: newFileId, metadata: newMetadata } = await submitBatchSummaries2(storedData.chapters);
1002
+ await updateBook(bookId, {
1003
+ summaryBatchId: newBatchId,
1004
+ summaryBatchFileId: newFileId,
1005
+ summaryBatchChapters: JSON.stringify({ ...storedData, metadata: newMetadata })
1006
+ });
1007
+ logInfo(`[Resume] New summary batch submitted (${newBatchId}).`);
1008
+ return { status: "resubmitted", batchId: newBatchId, phase: "summary" };
1009
+ }
1010
+ let { summaries, needsMergePass } = await downloadBatchSummaryResults(
1011
+ status.outputFileId,
1012
+ storedData.chapters,
1013
+ storedData.metadata
1014
+ );
1015
+ await cleanupBatchFiles(summaryBatchFileId, status.outputFileId);
1016
+ if (needsMergePass.length > 0) {
1017
+ logInfo(`[Resume] ${needsMergePass.length} chapters need merge pass, submitting merge batch...`);
1018
+ const mergeResult = await submitMergePass(needsMergePass);
1019
+ await updateBook(bookId, {
1020
+ summaryBatchId: mergeResult.batchId,
1021
+ summaryBatchFileId: mergeResult.inputFileId,
1022
+ summaryBatchChapters: JSON.stringify({
1023
+ ...storedData,
1024
+ metadata: mergeResult.metadata,
1025
+ completedSummaries: summaries,
1026
+ isMergePass: true
1027
+ })
1028
+ });
1029
+ return { status: "merge_submitted", batchId: mergeResult.batchId, phase: "summary" };
1030
+ }
1031
+ return await finalizeSummariesAndSubmitEmbeddings(bookId, summaries, storedData);
1032
+ };
1033
+ var resumeMergeBatch = async (bookId, summaryBatchId, summaryBatchFileId, storedData) => {
1034
+ const { checkBatchStatus, cleanupBatchFiles } = await import("./batch-embedder-ZJZLNLOK.js");
1035
+ const { downloadMergeResults } = await import("./batch-summarizer-BMIBVFAE.js");
1036
+ logInfo(`[Resume] Checking merge batch ${summaryBatchId} for book ${bookId}`);
1037
+ const status = await checkBatchStatus(summaryBatchId);
1038
+ logInfo(`[Resume] Merge batch status: ${status.status} (completed: ${status.completed}/${status.total})`);
1039
+ if (["validating", "in_progress", "finalizing"].includes(status.status)) {
1040
+ return { status: status.status, completed: status.completed, total: status.total, phase: "summary" };
1041
+ }
1042
+ if (status.status !== "completed") {
1043
+ throw new Error(`Unexpected merge batch status: ${status.status}`);
1044
+ }
1045
+ if (!status.outputFileId) {
1046
+ throw new Error(`Merge batch completed but produced no output (${status.failed}/${status.total} failed). Re-ingest to start over.`);
1047
+ }
1048
+ const mergedSummaries = await downloadMergeResults(
1049
+ status.outputFileId,
1050
+ storedData.metadata.map((m) => ({ chapterIndex: m.chapterIndex, title: m.title }))
1051
+ );
1052
+ await cleanupBatchFiles(summaryBatchFileId, status.outputFileId);
1053
+ const allSummaries = [...storedData.completedSummaries || [], ...mergedSummaries];
1054
+ return await finalizeSummariesAndSubmitEmbeddings(bookId, allSummaries, storedData);
1055
+ };
1056
+ var finalizeSummariesAndSubmitEmbeddings = async (bookId, summaries, storedData) => {
1057
+ const { submitBatchEmbeddings: submitBatchEmbeddings2 } = await import("./batch-embedder-ZJZLNLOK.js");
1058
+ const summaryRecords = summaries.map((s) => ({
1059
+ ...s,
1060
+ chapterIndex: storedData.selectedIndices[s.chapterIndex] ?? s.chapterIndex
1061
+ }));
1062
+ await updateBook(bookId, {
1063
+ summaries: JSON.stringify(summaryRecords)
1064
+ });
1065
+ const summaryChunks = summaryRecords.map((s) => ({
1066
+ id: `${bookId}-summary-${s.chapterIndex}`,
1067
+ bookId,
1068
+ chapterIndex: s.chapterIndex,
1069
+ chapterTitle: s.chapterTitle,
1070
+ chunkIndex: -1,
1071
+ content: s.fullSummary,
1072
+ type: "summary"
1073
+ }));
1074
+ logInfo(`[Resume] Created ${summaryChunks.length} summary chunks from ${summaries.length} summaries`);
1075
+ const allChunks = [...storedData.textChunks, ...summaryChunks];
1076
+ logInfo(`[Resume] Submitting ${allChunks.length} chunks for batch embedding`);
1077
+ const { batchId, inputFileId } = await submitBatchEmbeddings2(allChunks);
1078
+ await updateBook(bookId, {
1079
+ summaryBatchId: null,
1080
+ summaryBatchFileId: null,
1081
+ summaryBatchChapters: null,
1082
+ batchId,
1083
+ batchFileId: inputFileId,
1084
+ batchChunks: JSON.stringify(allChunks)
1085
+ });
1086
+ logInfo(`[Resume] Embedding batch submitted (${batchId}). Run resume again when batch completes.`);
1087
+ return { status: "embeddings_submitted", batchId, phase: "embedding" };
1088
+ };
1089
+ var resumeLocalIngest = async (bookId, resumePath, currentChunkCount) => {
1090
+ const state = await loadResumeState(bookId, resumePath);
1091
+ const total = state.chunks.length;
1092
+ const startIndex = Math.max(state.resumeIndex, currentChunkCount);
1093
+ if (startIndex >= total) {
1094
+ await finalizeResumeState(bookId, resumePath);
1095
+ throw new Error(`Resume state already completed for book ${bookId}.`);
1096
+ }
1097
+ logInfo(`[Resume] Resuming local embeddings at chunk ${startIndex + 1}/${total}`);
1098
+ const embedStart = Date.now();
1099
+ const remaining = state.chunks.slice(startIndex);
1100
+ const embeddedRemaining = await embedChunks(remaining, {
1101
+ onBatch: async (embeddedBatch, progress) => {
1102
+ const completed = startIndex + progress.completed;
1103
+ await addChunksToIndex(bookId, embeddedBatch);
1104
+ await updateBook(bookId, { chunkCount: completed });
1105
+ await writeFile(
1106
+ resumePath,
1107
+ JSON.stringify({ chunks: state.chunks, resumeIndex: completed })
1108
+ );
1109
+ }
1110
+ });
1111
+ logInfo(`[Resume] Embedded ${embeddedRemaining.length} remaining chunks (${formatDuration(Date.now() - embedStart)})`);
1112
+ const finalCount = startIndex + embeddedRemaining.length;
1113
+ await updateBook(bookId, {
1114
+ chunkCount: finalCount,
1115
+ indexedAt: Date.now()
1116
+ });
1117
+ await finalizeResumeState(bookId, resumePath);
1118
+ return { status: "completed", chunkCount: finalCount };
795
1119
  };
796
1120
 
797
1121
  // src/commands/ingest.ts
@@ -872,17 +1196,51 @@ var ingestCommand = async (filePath, options) => {
872
1196
  );
873
1197
  }
874
1198
  }
875
- const result = await ingestEpub(filePath, selectedChapterIndices, { summarize: options.summarize ?? false });
876
- stdout(`
1199
+ const result = await ingestEpub(filePath, selectedChapterIndices, { summarize: options.summarize ?? false, batch: options.batch ?? false });
1200
+ const shortId = result.id.slice(0, 8);
1201
+ if (result.status === "interrupted") {
1202
+ stdout(`
1203
+ Ingest interrupted.`);
1204
+ stdout(` mycroft book ingest status ${shortId} # check progress`);
1205
+ stdout(` mycroft book ingest resume ${shortId} # continue ingestion`);
1206
+ return;
1207
+ }
1208
+ if (options.batch) {
1209
+ const batchType = options.summarize ? "Summary batch" : "Embedding batch";
1210
+ stdout(`
1211
+ ${batchType} submitted. Book registered as ${result.id}`);
1212
+ stdout(` mycroft book ingest status ${shortId} # check batch progress`);
1213
+ stdout(` mycroft book ingest resume ${shortId} # continue when batch finishes`);
1214
+ } else {
1215
+ stdout(`
877
1216
  Done. Book indexed as ${result.id}`);
1217
+ }
878
1218
  };
879
1219
 
880
1220
  // src/commands/book/ingest.ts
881
1221
  var registerBookIngest = (program2) => {
882
- program2.command("ingest").description("Ingest an EPUB file").argument("<path>", "Path to the EPUB file").option("--manual", "Interactive chapter selection").option("--summary", "Enable AI chapter summaries").action(async (path, options) => {
1222
+ const ingest = program2.command("ingest").description("Ingest an EPUB file").argument("<path>", "Path to the EPUB file").option("--manual", "Interactive chapter selection").option("--summary", "Enable AI chapter summaries").option("--batch", "Use OpenAI Batch API for embeddings and summaries (50% cost savings, up to 24h)").addHelpText(
1223
+ "after",
1224
+ `
1225
+ EXAMPLES
1226
+ mycroft book ingest ./book.epub
1227
+ mycroft book ingest ./book.epub --summary
1228
+ mycroft book ingest ./book.epub --batch --summary
1229
+ mycroft book ingest status 8f2c1a4b
1230
+ mycroft book ingest resume 8f2c1a4b
1231
+
1232
+ NOTES
1233
+ --batch submits work to the OpenAI Batch API and returns immediately.
1234
+ When combined with --summary, summaries are batched first, then embeddings.
1235
+ Use "mycroft book ingest status <id>" to check progress.
1236
+ Use "mycroft book ingest resume <id>" to continue when a batch completes.
1237
+ Non-batch ingests can also be resumed if interrupted.
1238
+ `
1239
+ ).action(async (path, options) => {
883
1240
  const summarize = Boolean(options.summary);
884
- await ingestCommand(path, { manual: options.manual, summarize });
1241
+ await ingestCommand(path, { manual: options.manual, summarize, batch: options.batch });
885
1242
  });
1243
+ return ingest;
886
1244
  };
887
1245
 
888
1246
  // src/commands/list.ts
@@ -905,7 +1263,7 @@ var listCommand = async () => {
905
1263
  const author = book.author || "-";
906
1264
  const chunks = String(book.chunkCount ?? 0);
907
1265
  const indexed = formatDate(book.indexedAt);
908
- const status = book.indexedAt ? "[indexed]" : "[pending]";
1266
+ const status = book.indexedAt ? "[indexed]" : book.batchId ? "[batch pending]" : book.ingestState === "pending" ? "[resume pending]" : "[pending]";
909
1267
  stdout(`${shortId} | ${title} | ${author} | ${chunks} | ${indexed} | ${status}`);
910
1268
  }
911
1269
  };
@@ -948,6 +1306,7 @@ var showCommand = async (id) => {
948
1306
  stdout(`Indexed: ${book.indexedAt ? new Date(book.indexedAt).toISOString() : "-"}`);
949
1307
  stdout(`Narrative range: ${book.narrativeStartIndex ?? 0} to ${book.narrativeEndIndex ?? book.chapters.length - 1}`);
950
1308
  stdout(`Progress chapter: ${book.progressChapter ?? "-"}`);
1309
+ stdout(`Ingest status: ${book.ingestState ?? "-"}`);
951
1310
  stdout("\nChapters:");
952
1311
  book.chapters.forEach((title, index) => {
953
1312
  const marker = index === book.narrativeStartIndex ? "[start]" : index === book.narrativeEndIndex ? "[end]" : "";
@@ -1035,21 +1394,27 @@ ${context}`
1035
1394
  }
1036
1395
  };
1037
1396
 
1397
+ // src/commands/query-options.ts
1398
+ var parseQueryOptions = (options) => {
1399
+ const topK = Number(options.topK);
1400
+ if (!Number.isFinite(topK) || topK <= 0) {
1401
+ throw new Error("--top-k must be a positive number.");
1402
+ }
1403
+ let maxChapter;
1404
+ if (options.maxChapter !== void 0) {
1405
+ const parsed = Number(options.maxChapter);
1406
+ if (!Number.isFinite(parsed) || parsed < 0) {
1407
+ throw new Error("--max-chapter must be a non-negative number.");
1408
+ }
1409
+ maxChapter = parsed;
1410
+ }
1411
+ return { topK, maxChapter };
1412
+ };
1413
+
1038
1414
  // src/commands/book/ask.ts
1039
1415
  var registerBookAsk = (program2) => {
1040
1416
  program2.command("ask").description("Ask a question about a book").argument("<id>", "Book id or prefix").argument("<question>", "Question to ask").option("--top-k <n>", "Number of passages to retrieve", "5").option("--max-chapter <n>", "Spoiler-free limit (0-based within narrative)").action(async (id, question, options) => {
1041
- const topK = Number(options.topK);
1042
- if (!Number.isFinite(topK) || topK <= 0) {
1043
- throw new Error("--top-k must be a positive number.");
1044
- }
1045
- let maxChapter;
1046
- if (options.maxChapter !== void 0) {
1047
- const parsed = Number(options.maxChapter);
1048
- if (!Number.isFinite(parsed) || parsed < 0) {
1049
- throw new Error("--max-chapter must be a non-negative number.");
1050
- }
1051
- maxChapter = parsed;
1052
- }
1417
+ const { topK, maxChapter } = parseQueryOptions(options);
1053
1418
  await askCommand(id, question, { topK, maxChapter });
1054
1419
  });
1055
1420
  };
@@ -1092,18 +1457,7 @@ var searchCommand = async (id, query, options) => {
1092
1457
  // src/commands/book/search.ts
1093
1458
  var registerBookSearch = (program2) => {
1094
1459
  program2.command("search").description("Vector search without LLM").argument("<id>", "Book id or prefix").argument("<query>", "Search query").option("--top-k <n>", "Number of passages to retrieve", "5").option("--max-chapter <n>", "Spoiler-free limit (0-based within narrative)").action(async (id, query, options) => {
1095
- const topK = Number(options.topK);
1096
- if (!Number.isFinite(topK) || topK <= 0) {
1097
- throw new Error("--top-k must be a positive number.");
1098
- }
1099
- let maxChapter;
1100
- if (options.maxChapter !== void 0) {
1101
- const parsed = Number(options.maxChapter);
1102
- if (!Number.isFinite(parsed) || parsed < 0) {
1103
- throw new Error("--max-chapter must be a non-negative number.");
1104
- }
1105
- maxChapter = parsed;
1106
- }
1460
+ const { topK, maxChapter } = parseQueryOptions(options);
1107
1461
  await searchCommand(id, query, { topK, maxChapter });
1108
1462
  });
1109
1463
  };
@@ -1145,6 +1499,216 @@ var registerBookDelete = (program2) => {
1145
1499
  });
1146
1500
  };
1147
1501
 
1502
+ // src/commands/resume.ts
1503
+ var resumeCommand = async (id) => {
1504
+ requireOpenAIKey();
1505
+ await ensureDataDirs();
1506
+ const resolvedId = await resolveBookId(id);
1507
+ if (!resolvedId) {
1508
+ throw new Error(`Book not found: ${id}`);
1509
+ }
1510
+ const book = await getBook(resolvedId);
1511
+ if (!book) {
1512
+ throw new Error(`Book not found: ${id}`);
1513
+ }
1514
+ if (book.indexedAt) {
1515
+ stdout(`Book "${book.title}" is already indexed (${book.chunkCount} chunks).`);
1516
+ return;
1517
+ }
1518
+ const shortId = resolvedId.slice(0, 8);
1519
+ if (book.summaryBatchId) {
1520
+ const rawData = await getBookSummaryBatchChapters(resolvedId);
1521
+ if (!rawData) {
1522
+ throw new Error(`No stored summary batch data for book "${book.title}". Re-ingest with "mycroft book ingest --batch --summary".`);
1523
+ }
1524
+ const storedData = JSON.parse(rawData);
1525
+ let result2;
1526
+ if (storedData.isMergePass) {
1527
+ result2 = await resumeMergeBatch(resolvedId, book.summaryBatchId, book.summaryBatchFileId ?? book.summaryBatchId, storedData);
1528
+ } else {
1529
+ result2 = await resumeSummaryBatch(resolvedId, book.summaryBatchId, book.summaryBatchFileId ?? book.summaryBatchId, storedData);
1530
+ }
1531
+ if (result2.status === "embeddings_submitted") {
1532
+ stdout(`
1533
+ Summaries complete. Embedding batch submitted (${result2.batchId}).`);
1534
+ stdout(` mycroft book ingest status ${shortId} # check embedding batch progress`);
1535
+ stdout(` mycroft book ingest resume ${shortId} # complete ingestion once batch finishes`);
1536
+ } else if (result2.status === "merge_submitted") {
1537
+ stdout(`
1538
+ Section summaries complete. Merge batch submitted (${result2.batchId}).`);
1539
+ stdout(` mycroft book ingest status ${shortId} # check merge batch progress`);
1540
+ stdout(` mycroft book ingest resume ${shortId} # continue when batch finishes`);
1541
+ } else if (result2.status === "resubmitted") {
1542
+ stdout(`
1543
+ Summary batch failed and was re-submitted (${result2.batchId}).`);
1544
+ stdout(` mycroft book ingest status ${shortId} # check batch progress`);
1545
+ stdout(` mycroft book ingest resume ${shortId} # continue when batch finishes`);
1546
+ } else {
1547
+ stdout(`
1548
+ Summary batch still in progress (${result2.status}: ${result2.completed}/${result2.total}).`);
1549
+ stdout(` mycroft book ingest status ${shortId} # check batch progress`);
1550
+ stdout(` mycroft book ingest resume ${shortId} # retry when batch finishes`);
1551
+ }
1552
+ return;
1553
+ }
1554
+ if (book.batchId) {
1555
+ const rawChunks = await getBookBatchChunks(resolvedId);
1556
+ if (!rawChunks) {
1557
+ throw new Error(`No stored chunks found for book "${book.title}". Re-ingest with "mycroft book ingest --batch".`);
1558
+ }
1559
+ const chunks = JSON.parse(rawChunks);
1560
+ const result2 = await resumeIngest(resolvedId, chunks, book.batchId, book.batchFileId ?? book.batchId);
1561
+ if (result2.status === "completed") {
1562
+ stdout(`
1563
+ Done. Book "${book.title}" indexed as ${book.id}`);
1564
+ } else if (result2.status === "resubmitted") {
1565
+ stdout(`
1566
+ Batch failed and was re-submitted (${result2.batchId}).`);
1567
+ stdout(` mycroft book ingest status ${shortId} # check batch progress`);
1568
+ stdout(` mycroft book ingest resume ${shortId} # complete ingestion once batch finishes`);
1569
+ } else {
1570
+ stdout(`
1571
+ Batch still in progress (${result2.status}: ${result2.completed}/${result2.total}).`);
1572
+ stdout(` mycroft book ingest status ${shortId} # check batch progress`);
1573
+ stdout(` mycroft book ingest resume ${shortId} # retry when batch finishes`);
1574
+ }
1575
+ return;
1576
+ }
1577
+ if (!book.ingestResumePath || book.ingestState !== "pending") {
1578
+ throw new Error(`Book "${book.title}" has no resumable ingest. Re-ingest to start one.`);
1579
+ }
1580
+ const result = await resumeLocalIngest(resolvedId, book.ingestResumePath, book.chunkCount ?? 0);
1581
+ if (result.status === "completed") {
1582
+ stdout(`
1583
+ Done. Book "${book.title}" indexed as ${book.id}`);
1584
+ }
1585
+ };
1586
+
1587
+ // src/commands/book/resume.ts
1588
+ var registerBookResume = (program2, ingest) => {
1589
+ const target = ingest ?? program2.command("ingest");
1590
+ target.command("resume").description("Resume a pending ingestion").argument("<id>", "Book id or prefix").addHelpText(
1591
+ "after",
1592
+ `
1593
+ EXAMPLES
1594
+ mycroft book ingest resume 8f2c1a4b
1595
+
1596
+ NOTES
1597
+ Resumes either batch or non-batch ingests if interrupted.
1598
+ `
1599
+ ).action(async (id) => {
1600
+ await resumeCommand(id);
1601
+ });
1602
+ };
1603
+
1604
+ // src/commands/status.ts
1605
+ var statusCommand = async (id) => {
1606
+ await ensureDataDirs();
1607
+ const resolvedId = await resolveBookId(id);
1608
+ if (!resolvedId) {
1609
+ throw new Error(`Book not found: ${id}`);
1610
+ }
1611
+ const book = await getBook(resolvedId);
1612
+ if (!book) {
1613
+ throw new Error(`Book not found: ${id}`);
1614
+ }
1615
+ const shortId = resolvedId.slice(0, 8);
1616
+ stdout(`Book: ${book.title}`);
1617
+ stdout(`ID: ${book.id}`);
1618
+ if (book.indexedAt) {
1619
+ stdout(`
1620
+ Status: completed`);
1621
+ stdout(`Chunks: ${book.chunkCount}`);
1622
+ stdout(`Indexed: ${new Date(book.indexedAt).toLocaleString()}`);
1623
+ return;
1624
+ }
1625
+ if (book.summaryBatchId) {
1626
+ requireOpenAIKey();
1627
+ const { checkBatchStatus } = await import("./batch-embedder-ZJZLNLOK.js");
1628
+ const status = await checkBatchStatus(book.summaryBatchId);
1629
+ stdout(`
1630
+ Status: summary batch ${status.status}`);
1631
+ stdout(`Batch: ${book.summaryBatchId}`);
1632
+ stdout(`Progress: ${status.completed}/${status.total} requests${status.failed > 0 ? ` (${status.failed} failed)` : ""}`);
1633
+ if (status.status === "completed") {
1634
+ if (status.failed > 0 && status.completed === 0) {
1635
+ stdout(`
1636
+ All requests failed. Run resume to re-submit.`);
1637
+ } else {
1638
+ stdout(`
1639
+ Summary batch is ready.`);
1640
+ }
1641
+ stdout(` mycroft book ingest resume ${shortId} # process summaries and submit embedding batch`);
1642
+ } else if (["failed", "expired", "cancelled"].includes(status.status)) {
1643
+ stdout(`
1644
+ Summary batch ended with "${status.status}".`);
1645
+ stdout(` mycroft book ingest resume ${shortId} # re-submit summary batch`);
1646
+ } else {
1647
+ stdout(`
1648
+ Summary batch still processing.`);
1649
+ stdout(` mycroft book ingest status ${shortId} # check again later`);
1650
+ stdout(` mycroft book ingest resume ${shortId} # resume when ready`);
1651
+ }
1652
+ return;
1653
+ }
1654
+ if (book.batchId) {
1655
+ requireOpenAIKey();
1656
+ const { checkBatchStatus } = await import("./batch-embedder-ZJZLNLOK.js");
1657
+ const status = await checkBatchStatus(book.batchId);
1658
+ stdout(`
1659
+ Status: embedding batch ${status.status}`);
1660
+ stdout(`Batch: ${book.batchId}`);
1661
+ stdout(`Progress: ${status.completed}/${status.total} requests${status.failed > 0 ? ` (${status.failed} failed)` : ""}`);
1662
+ if (status.status === "completed") {
1663
+ if (status.failed > 0 && status.completed === 0) {
1664
+ stdout(`
1665
+ All requests failed. Run resume to re-submit.`);
1666
+ } else {
1667
+ stdout(`
1668
+ Embedding batch is ready.`);
1669
+ }
1670
+ stdout(` mycroft book ingest resume ${shortId} # complete indexing`);
1671
+ } else if (["failed", "expired", "cancelled"].includes(status.status)) {
1672
+ stdout(`
1673
+ Embedding batch ended with "${status.status}".`);
1674
+ stdout(` mycroft book ingest resume ${shortId} # re-submit embedding batch`);
1675
+ } else {
1676
+ stdout(`
1677
+ Embedding batch still processing.`);
1678
+ stdout(` mycroft book ingest status ${shortId} # check again later`);
1679
+ stdout(` mycroft book ingest resume ${shortId} # resume when ready`);
1680
+ }
1681
+ return;
1682
+ }
1683
+ if (book.ingestResumePath && book.ingestState === "pending") {
1684
+ stdout(`
1685
+ Status: interrupted`);
1686
+ stdout(`Chunks completed: ${book.chunkCount}`);
1687
+ stdout(` mycroft book ingest resume ${shortId} # continue ingestion`);
1688
+ return;
1689
+ }
1690
+ stdout(`
1691
+ Status: no active ingestion`);
1692
+ };
1693
+
1694
+ // src/commands/book/status.ts
1695
+ var registerBookStatus = (program2, ingest) => {
1696
+ const target = ingest ?? program2.command("ingest");
1697
+ target.command("status").description("Check ingestion status for a book").argument("<id>", "Book id or prefix").addHelpText(
1698
+ "after",
1699
+ `
1700
+ EXAMPLES
1701
+ mycroft book ingest status 8f2c1a4b
1702
+
1703
+ NOTES
1704
+ For batch ingests, queries the OpenAI API for live progress.
1705
+ For local ingests, shows how many chunks have been completed.
1706
+ `
1707
+ ).action(async (id) => {
1708
+ await statusCommand(id);
1709
+ });
1710
+ };
1711
+
1148
1712
  // src/commands/config.ts
1149
1713
  var configCommand = async () => {
1150
1714
  const path = configPath();
@@ -1159,7 +1723,7 @@ var registerConfigPath = (program2) => {
1159
1723
  };
1160
1724
 
1161
1725
  // src/commands/init-config.ts
1162
- import { mkdir as mkdir4, writeFile, access as access2 } from "fs/promises";
1726
+ import { mkdir as mkdir2, writeFile as writeFile2, access as access2 } from "fs/promises";
1163
1727
  var initConfigCommand = async () => {
1164
1728
  const path = configPath();
1165
1729
  await ensureConfigDirs(path);
@@ -1175,8 +1739,8 @@ var initConfigCommand = async () => {
1175
1739
  askEnabled: resolved.askEnabled,
1176
1740
  models: resolved.models
1177
1741
  };
1178
- await writeFile(path, JSON.stringify(template, null, 2), "utf-8");
1179
- await mkdir4(resolved.dataDir, { recursive: true });
1742
+ await writeFile2(path, JSON.stringify(template, null, 2), "utf-8");
1743
+ await mkdir2(resolved.dataDir, { recursive: true });
1180
1744
  stdout(`Created config at ${path}`);
1181
1745
  };
1182
1746
 
@@ -1205,7 +1769,7 @@ var registerConfigResolve = (program2) => {
1205
1769
  };
1206
1770
 
1207
1771
  // src/commands/onboard.ts
1208
- import { writeFile as writeFile2 } from "fs/promises";
1772
+ import { writeFile as writeFile3 } from "fs/promises";
1209
1773
  var isDefault = (input) => input === "" || input.toLowerCase() === "-y";
1210
1774
  var parseBoolean = (input, fallback) => {
1211
1775
  if (isDefault(input)) return fallback;
@@ -1233,7 +1797,7 @@ var onboardCommand = async () => {
1233
1797
  const chatInput = await prompt(`Chat model [${defaults.models.chat}]: `);
1234
1798
  const chat = isDefault(chatInput) ? defaults.models.chat : chatInput;
1235
1799
  await ensureConfigDirs(path);
1236
- await writeFile2(
1800
+ await writeFile3(
1237
1801
  path,
1238
1802
  JSON.stringify(
1239
1803
  {
@@ -1269,11 +1833,293 @@ var registerConfigOnboard = (program2) => {
1269
1833
  });
1270
1834
  };
1271
1835
 
1836
+ // src/services/chat.ts
1837
+ import { randomUUID as randomUUID2 } from "crypto";
1838
+ import { embed as embed3, generateText as generateText2 } from "ai";
1839
+ import { openai as openai5 } from "@ai-sdk/openai";
1840
+ var MAX_RECENT_MESSAGES = 12;
1841
+ var SUMMARY_TRIGGER_MESSAGES = 24;
1842
+ var SUMMARY_TARGET_WORDS2 = 160;
1843
+ var formatContext2 = (chunks) => chunks.map(
1844
+ (chunk, index) => `Excerpt [${index + 1}] (${chunk.chapterTitle || `Chapter ${chunk.chapterIndex + 1}`}):
1845
+ ${chunk.content}`
1846
+ ).join("\n\n");
1847
+ var estimateTokens2 = (text) => Math.ceil(text.length / 4);
1848
+ var summarizeMessages = async (messages) => {
1849
+ const transcript = messages.map((message) => `${message.role.toUpperCase()}: ${message.content}`).join("\n\n");
1850
+ const models = await getModels();
1851
+ const { text } = await generateText2({
1852
+ model: openai5(models.summary),
1853
+ prompt: `Summarize this conversation so far in ~${SUMMARY_TARGET_WORDS2} words. Focus on facts, decisions, and unresolved questions.
1854
+
1855
+ ${transcript}`
1856
+ });
1857
+ return text.trim();
1858
+ };
1859
+ var buildConversationContext = (session, messages) => {
1860
+ const summary = session.summary ? `Conversation summary:
1861
+ ${session.summary}` : "";
1862
+ const recent = messages.slice(-MAX_RECENT_MESSAGES).map((message) => `${message.role.toUpperCase()}: ${message.content}`).join("\n\n");
1863
+ return [summary, recent].filter(Boolean).join("\n\n");
1864
+ };
1865
+ var maybeSummarizeSession = async (session, messages, updatedAt) => {
1866
+ if (messages.length < SUMMARY_TRIGGER_MESSAGES) return;
1867
+ const summary = await summarizeMessages(messages.slice(0, -MAX_RECENT_MESSAGES));
1868
+ await updateChatSession(session.id, { summary, updatedAt });
1869
+ };
1870
+ var listSessions = async () => listChatSessions();
1871
+ var getSession = async (id) => getChatSession(id);
1872
+ var getSessionMessages = async (sessionId, limit) => getChatMessages(sessionId, limit);
1873
+ var startSession = async (bookId, title) => {
1874
+ await ensureDataDirs();
1875
+ const resolvedId = await resolveBookId(bookId);
1876
+ if (!resolvedId) {
1877
+ throw new Error(`Book not found: ${bookId}`);
1878
+ }
1879
+ const sessionId = randomUUID2();
1880
+ await insertChatSession({
1881
+ id: sessionId,
1882
+ bookId: resolvedId,
1883
+ title: title ?? null,
1884
+ summary: null
1885
+ });
1886
+ const session = await getChatSession(sessionId);
1887
+ if (!session) {
1888
+ throw new Error("Failed to create chat session.");
1889
+ }
1890
+ return session;
1891
+ };
1892
+ var chatAsk = async (sessionId, question, options) => {
1893
+ if (!await isAskEnabled()) {
1894
+ throw new Error("Ask is disabled in config (askEnabled: false). Enable it to use this command.");
1895
+ }
1896
+ requireOpenAIKey();
1897
+ await ensureDataDirs();
1898
+ const session = await getChatSession(sessionId);
1899
+ if (!session) {
1900
+ throw new Error(`Chat session not found: ${sessionId}`);
1901
+ }
1902
+ const book = await getBook(session.bookId);
1903
+ if (!book) {
1904
+ throw new Error(`Book not found: ${session.bookId}`);
1905
+ }
1906
+ const models = await getModels();
1907
+ const { embedding } = await embed3({
1908
+ model: openai5.embeddingModel(models.embedding),
1909
+ value: question
1910
+ });
1911
+ const narrativeStart = book.narrativeStartIndex ?? 0;
1912
+ const userProgress = book.progressChapter ?? null;
1913
+ const maxChapterIndex = options.maxChapter !== void 0 ? narrativeStart + options.maxChapter : userProgress !== null ? narrativeStart + userProgress : void 0;
1914
+ const retrievalLimit = options.topK * 3;
1915
+ const allMatches = await queryBookIndex(session.bookId, embedding, question, retrievalLimit, maxChapterIndex);
1916
+ const summaries = allMatches.filter((m) => m.type === "summary");
1917
+ const chunks = allMatches.filter((m) => m.type !== "summary");
1918
+ const topSummaries = summaries.slice(0, 2);
1919
+ const topChunks = chunks.slice(0, Math.max(0, options.topK - topSummaries.length));
1920
+ const selectedMatches = [...topSummaries, ...topChunks];
1921
+ const context = formatContext2(selectedMatches);
1922
+ const messages = await getChatMessages(sessionId);
1923
+ const conversation = buildConversationContext(session, messages);
1924
+ const now = Date.now();
1925
+ const userMessage = {
1926
+ id: randomUUID2(),
1927
+ sessionId,
1928
+ role: "user",
1929
+ content: question,
1930
+ tokenCount: estimateTokens2(question),
1931
+ createdAt: now
1932
+ };
1933
+ await insertChatMessage(userMessage);
1934
+ const prompt2 = [
1935
+ conversation ? `Conversation:
1936
+ ${conversation}` : "",
1937
+ `Question: ${question}`,
1938
+ context
1939
+ ].filter(Boolean).join("\n\n");
1940
+ const { text } = await generateText2({
1941
+ model: openai5(models.chat),
1942
+ system: `You are a reading companion helping readers understand this book.
1943
+
1944
+ Guidelines:
1945
+ - Use the provided chapter summaries and excerpts to answer questions
1946
+ - Chapter summaries provide high-level context about characters, events, and plot
1947
+ - Excerpts provide specific details and quotes
1948
+ - When asked for recaps or "what happened", synthesize from summaries
1949
+ - Don't cite table of contents, front matter, or structural elements
1950
+ - If truly unsure, briefly say so - but try to answer from available context first
1951
+ - Cite sources using [1], [2], etc. at the end of relevant sentences
1952
+ - The context may be limited to earlier chapters only - don't infer beyond what's provided`,
1953
+ prompt: prompt2
1954
+ });
1955
+ const assistantMessage = {
1956
+ id: randomUUID2(),
1957
+ sessionId,
1958
+ role: "assistant",
1959
+ content: text,
1960
+ tokenCount: estimateTokens2(text),
1961
+ createdAt: now
1962
+ };
1963
+ await insertChatMessage(assistantMessage);
1964
+ const updatedAt = Date.now();
1965
+ await updateChatSession(sessionId, { updatedAt });
1966
+ await maybeSummarizeSession(session, [...messages, userMessage, assistantMessage], updatedAt);
1967
+ return { answer: text, sources: selectedMatches };
1968
+ };
1969
+
1970
+ // src/commands/chat/start.ts
1971
+ var registerChatStart = (program2) => {
1972
+ program2.command("start").description("Start a chat session for a book").argument("<id>", "Book id or prefix").option("--title <title>", "Session title").action(async (id, options) => {
1973
+ const session = await startSession(id, options.title);
1974
+ stdout(`Started chat session ${session.id} for book ${session.bookId}`);
1975
+ });
1976
+ };
1977
+
1978
+ // src/commands/chat/utils.ts
1979
+ var resolveChatSessionId = async (input) => {
1980
+ const sessions = await listChatSessions();
1981
+ const exact = sessions.find((session) => session.id === input);
1982
+ if (exact) return exact.id;
1983
+ const matches = sessions.filter((session) => session.id.startsWith(input));
1984
+ if (matches.length === 1) return matches[0].id;
1985
+ if (matches.length > 1) {
1986
+ throw new Error(`Ambiguous session id prefix "${input}" (${matches.length} matches)`);
1987
+ }
1988
+ return null;
1989
+ };
1990
+
1991
+ // src/commands/chat/ask.ts
1992
+ var registerChatAsk = (program2) => {
1993
+ program2.command("ask").description("Ask a question in a chat session").argument("<session>", "Chat session id or prefix").argument("<question>", "Question to ask").option("--top-k <n>", "Number of passages to retrieve", "5").option("--max-chapter <n>", "Spoiler-free limit (0-based within narrative)").action(async (sessionId, question, options) => {
1994
+ const { topK, maxChapter } = parseQueryOptions(options);
1995
+ const resolvedId = await resolveChatSessionId(sessionId);
1996
+ if (!resolvedId) {
1997
+ throw new Error(`Chat session not found: ${sessionId}`);
1998
+ }
1999
+ const { answer, sources } = await chatAsk(resolvedId, question, { topK, maxChapter });
2000
+ stdout(answer);
2001
+ if (sources.length > 0) {
2002
+ stdout("\nSources:");
2003
+ sources.forEach((match, index) => {
2004
+ const title = match.chapterTitle || `Chapter ${match.chapterIndex + 1}`;
2005
+ const excerpt = match.content.slice(0, 120).replace(/\s+/g, " ");
2006
+ stdout(`[${index + 1}] ${title}: ${excerpt}`);
2007
+ });
2008
+ }
2009
+ });
2010
+ };
2011
+
2012
+ // src/commands/chat/list.ts
2013
+ var formatDate2 = (timestamp) => {
2014
+ if (!timestamp) return "-";
2015
+ return new Date(timestamp).toISOString().slice(0, 10);
2016
+ };
2017
+ var registerChatList = (program2) => {
2018
+ program2.command("list").description("List chat sessions").action(async () => {
2019
+ const sessions = await listSessions();
2020
+ if (sessions.length === 0) {
2021
+ stdout("No chat sessions yet.");
2022
+ return;
2023
+ }
2024
+ stdout("ID | Book | Updated | Title");
2025
+ stdout("---------|------|---------|------");
2026
+ for (const session of sessions) {
2027
+ const shortId = session.id.slice(0, 8);
2028
+ const book = session.bookTitle || session.bookId.slice(0, 8);
2029
+ const updated = formatDate2(session.updatedAt);
2030
+ const title = session.title || "-";
2031
+ stdout(`${shortId} | ${book} | ${updated} | ${title}`);
2032
+ }
2033
+ });
2034
+ };
2035
+
2036
+ // src/commands/chat/show.ts
2037
+ var registerChatShow = (program2) => {
2038
+ program2.command("show").description("Show chat session details").argument("<session>", "Chat session id or prefix").option("--tail <n>", "Show last N messages", "10").action(async (sessionId, options) => {
2039
+ const tail = Number(options.tail);
2040
+ if (!Number.isFinite(tail) || tail <= 0) {
2041
+ throw new Error("--tail must be a positive number.");
2042
+ }
2043
+ const resolvedId = await resolveChatSessionId(sessionId);
2044
+ if (!resolvedId) {
2045
+ throw new Error(`Chat session not found: ${sessionId}`);
2046
+ }
2047
+ const session = await getSession(resolvedId);
2048
+ if (!session) {
2049
+ throw new Error(`Chat session not found: ${sessionId}`);
2050
+ }
2051
+ stdout(`ID: ${session.id}`);
2052
+ stdout(`Book ID: ${session.bookId}`);
2053
+ stdout(`Title: ${session.title ?? "-"}`);
2054
+ const updated = session.updatedAt ? new Date(session.updatedAt).toISOString() : "-";
2055
+ stdout(`Updated: ${updated}`);
2056
+ const messages = await getSessionMessages(resolvedId, tail);
2057
+ if (messages.length === 0) {
2058
+ stdout("\nNo messages yet.");
2059
+ return;
2060
+ }
2061
+ stdout("\nMessages:");
2062
+ messages.forEach((message) => {
2063
+ stdout(`[${message.role}] ${message.content}`);
2064
+ });
2065
+ });
2066
+ };
2067
+
2068
+ // src/commands/chat/repl.ts
2069
+ var shouldExit = (input) => {
2070
+ const normalized = input.trim().toLowerCase();
2071
+ return normalized === "exit" || normalized === "quit" || normalized === ":q";
2072
+ };
2073
+ var registerChatRepl = (program2) => {
2074
+ program2.command("repl").description("Start interactive chat session").argument("<session>", "Chat session id or prefix").option("--top-k <n>", "Number of passages to retrieve", "5").option("--max-chapter <n>", "Spoiler-free limit (0-based within narrative)").action(async (sessionId, options) => {
2075
+ if (!isInteractive()) {
2076
+ throw new Error("Chat repl requires an interactive terminal.");
2077
+ }
2078
+ const { topK, maxChapter } = parseQueryOptions(options);
2079
+ const resolvedId = await resolveChatSessionId(sessionId);
2080
+ if (!resolvedId) {
2081
+ throw new Error(`Chat session not found: ${sessionId}`);
2082
+ }
2083
+ const session = await getSession(resolvedId);
2084
+ if (!session) {
2085
+ throw new Error(`Chat session not found: ${sessionId}`);
2086
+ }
2087
+ stdout(`Chatting in session ${session.id}. Type 'exit' to quit.`);
2088
+ while (true) {
2089
+ const question = await prompt("You: ");
2090
+ if (!question.trim()) continue;
2091
+ if (shouldExit(question)) break;
2092
+ const { answer, sources } = await chatAsk(session.id, question, { topK, maxChapter });
2093
+ stdout(`
2094
+ ${answer}`);
2095
+ if (sources.length > 0) {
2096
+ stdout("\nSources:");
2097
+ sources.forEach((match, index) => {
2098
+ const title = match.chapterTitle || `Chapter ${match.chapterIndex + 1}`;
2099
+ const excerpt = match.content.slice(0, 120).replace(/\s+/g, " ");
2100
+ stdout(`[${index + 1}] ${title}: ${excerpt}`);
2101
+ });
2102
+ }
2103
+ stdout("");
2104
+ }
2105
+ });
2106
+ };
2107
+
2108
+ // src/commands/chat/index.ts
2109
+ var registerChatCommands = (program2) => {
2110
+ const chat = program2.command("chat").description("Run multi-turn chat sessions");
2111
+ registerChatStart(chat);
2112
+ registerChatAsk(chat);
2113
+ registerChatList(chat);
2114
+ registerChatShow(chat);
2115
+ registerChatRepl(chat);
2116
+ };
2117
+
1272
2118
  // src/cli.ts
1273
2119
  var resolveVersion = async () => {
1274
2120
  try {
1275
- const currentDir = dirname2(fileURLToPath(import.meta.url));
1276
- const pkgPath = resolve2(currentDir, "../package.json");
2121
+ const currentDir = dirname(fileURLToPath(import.meta.url));
2122
+ const pkgPath = resolve(currentDir, "../package.json");
1277
2123
  const raw = await readFile2(pkgPath, "utf-8");
1278
2124
  return JSON.parse(raw).version || "0.1.0";
1279
2125
  } catch {
@@ -1291,17 +2137,20 @@ var configureProgram = async () => {
1291
2137
  };
1292
2138
  var registerCommands = () => {
1293
2139
  const book = program.command("book").description("Manage books and queries");
1294
- registerBookIngest(book);
2140
+ const ingest = registerBookIngest(book);
1295
2141
  registerBookList(book);
1296
2142
  registerBookShow(book);
1297
2143
  registerBookAsk(book);
1298
2144
  registerBookSearch(book);
1299
2145
  registerBookDelete(book);
2146
+ registerBookResume(book, ingest);
2147
+ registerBookStatus(book, ingest);
1300
2148
  const config = program.command("config").description("Manage configuration");
1301
2149
  registerConfigPath(config);
1302
2150
  registerConfigInit(config);
1303
2151
  registerConfigResolve(config);
1304
2152
  registerConfigOnboard(config);
2153
+ registerChatCommands(program);
1305
2154
  };
1306
2155
  program.exitOverride((error) => {
1307
2156
  if (error.code === "commander.helpDisplayed") {