rag-lite-ts 1.0.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (204) hide show
  1. package/README.md +651 -109
  2. package/dist/cli/indexer.js +262 -46
  3. package/dist/cli/search.js +54 -32
  4. package/dist/cli.js +185 -28
  5. package/dist/config.d.ts +34 -73
  6. package/dist/config.js +50 -255
  7. package/dist/core/abstract-embedder.d.ts +125 -0
  8. package/dist/core/abstract-embedder.js +264 -0
  9. package/dist/core/actionable-error-messages.d.ts +60 -0
  10. package/dist/core/actionable-error-messages.js +397 -0
  11. package/dist/core/adapters.d.ts +93 -0
  12. package/dist/core/adapters.js +139 -0
  13. package/dist/core/batch-processing-optimizer.d.ts +155 -0
  14. package/dist/core/batch-processing-optimizer.js +541 -0
  15. package/dist/core/chunker.d.ts +119 -0
  16. package/dist/core/chunker.js +73 -0
  17. package/dist/core/cli-database-utils.d.ts +53 -0
  18. package/dist/core/cli-database-utils.js +239 -0
  19. package/dist/core/config.d.ts +102 -0
  20. package/dist/core/config.js +247 -0
  21. package/dist/core/content-errors.d.ts +111 -0
  22. package/dist/core/content-errors.js +362 -0
  23. package/dist/core/content-manager.d.ts +343 -0
  24. package/dist/core/content-manager.js +1504 -0
  25. package/dist/core/content-performance-optimizer.d.ts +150 -0
  26. package/dist/core/content-performance-optimizer.js +516 -0
  27. package/dist/core/content-resolver.d.ts +104 -0
  28. package/dist/core/content-resolver.js +285 -0
  29. package/dist/core/cross-modal-search.d.ts +164 -0
  30. package/dist/core/cross-modal-search.js +342 -0
  31. package/dist/core/database-connection-manager.d.ts +109 -0
  32. package/dist/core/database-connection-manager.js +304 -0
  33. package/dist/core/db.d.ts +245 -0
  34. package/dist/core/db.js +952 -0
  35. package/dist/core/embedder-factory.d.ts +176 -0
  36. package/dist/core/embedder-factory.js +338 -0
  37. package/dist/{error-handler.d.ts → core/error-handler.d.ts} +23 -2
  38. package/dist/{error-handler.js → core/error-handler.js} +51 -8
  39. package/dist/core/index.d.ts +59 -0
  40. package/dist/core/index.js +69 -0
  41. package/dist/core/ingestion.d.ts +213 -0
  42. package/dist/core/ingestion.js +812 -0
  43. package/dist/core/interfaces.d.ts +408 -0
  44. package/dist/core/interfaces.js +106 -0
  45. package/dist/core/lazy-dependency-loader.d.ts +152 -0
  46. package/dist/core/lazy-dependency-loader.js +453 -0
  47. package/dist/core/mode-detection-service.d.ts +150 -0
  48. package/dist/core/mode-detection-service.js +565 -0
  49. package/dist/core/mode-model-validator.d.ts +92 -0
  50. package/dist/core/mode-model-validator.js +203 -0
  51. package/dist/core/model-registry.d.ts +120 -0
  52. package/dist/core/model-registry.js +415 -0
  53. package/dist/core/model-validator.d.ts +217 -0
  54. package/dist/core/model-validator.js +782 -0
  55. package/dist/{path-manager.d.ts → core/path-manager.d.ts} +5 -0
  56. package/dist/{path-manager.js → core/path-manager.js} +5 -0
  57. package/dist/core/polymorphic-search-factory.d.ts +154 -0
  58. package/dist/core/polymorphic-search-factory.js +344 -0
  59. package/dist/core/raglite-paths.d.ts +121 -0
  60. package/dist/core/raglite-paths.js +145 -0
  61. package/dist/core/reranking-config.d.ts +42 -0
  62. package/dist/core/reranking-config.js +156 -0
  63. package/dist/core/reranking-factory.d.ts +92 -0
  64. package/dist/core/reranking-factory.js +591 -0
  65. package/dist/core/reranking-strategies.d.ts +325 -0
  66. package/dist/core/reranking-strategies.js +720 -0
  67. package/dist/core/resource-cleanup.d.ts +163 -0
  68. package/dist/core/resource-cleanup.js +371 -0
  69. package/dist/core/resource-manager.d.ts +212 -0
  70. package/dist/core/resource-manager.js +564 -0
  71. package/dist/core/search-pipeline.d.ts +111 -0
  72. package/dist/core/search-pipeline.js +287 -0
  73. package/dist/core/search.d.ts +131 -0
  74. package/dist/core/search.js +296 -0
  75. package/dist/core/streaming-operations.d.ts +145 -0
  76. package/dist/core/streaming-operations.js +409 -0
  77. package/dist/core/types.d.ts +66 -0
  78. package/dist/core/types.js +6 -0
  79. package/dist/core/universal-embedder.d.ts +177 -0
  80. package/dist/core/universal-embedder.js +139 -0
  81. package/dist/core/validation-messages.d.ts +99 -0
  82. package/dist/core/validation-messages.js +334 -0
  83. package/dist/{vector-index.d.ts → core/vector-index.d.ts} +4 -0
  84. package/dist/{vector-index.js → core/vector-index.js} +21 -3
  85. package/dist/dom-polyfills.d.ts +6 -0
  86. package/dist/dom-polyfills.js +40 -0
  87. package/dist/factories/index.d.ts +43 -0
  88. package/dist/factories/index.js +44 -0
  89. package/dist/factories/text-factory.d.ts +560 -0
  90. package/dist/factories/text-factory.js +968 -0
  91. package/dist/file-processor.d.ts +90 -4
  92. package/dist/file-processor.js +723 -20
  93. package/dist/index-manager.d.ts +3 -2
  94. package/dist/index-manager.js +13 -11
  95. package/dist/index.d.ts +72 -8
  96. package/dist/index.js +102 -16
  97. package/dist/indexer.js +1 -1
  98. package/dist/ingestion.d.ts +44 -154
  99. package/dist/ingestion.js +75 -671
  100. package/dist/mcp-server.d.ts +35 -3
  101. package/dist/mcp-server.js +1186 -79
  102. package/dist/multimodal/clip-embedder.d.ts +314 -0
  103. package/dist/multimodal/clip-embedder.js +945 -0
  104. package/dist/multimodal/index.d.ts +6 -0
  105. package/dist/multimodal/index.js +6 -0
  106. package/dist/preprocess.js +1 -1
  107. package/dist/run-error-recovery-tests.d.ts +7 -0
  108. package/dist/run-error-recovery-tests.js +101 -0
  109. package/dist/search-standalone.js +1 -1
  110. package/dist/search.d.ts +51 -69
  111. package/dist/search.js +117 -412
  112. package/dist/test-utils.d.ts +8 -26
  113. package/dist/text/chunker.d.ts +33 -0
  114. package/dist/{chunker.js → text/chunker.js} +98 -75
  115. package/dist/{embedder.d.ts → text/embedder.d.ts} +22 -1
  116. package/dist/{embedder.js → text/embedder.js} +84 -10
  117. package/dist/text/index.d.ts +8 -0
  118. package/dist/text/index.js +9 -0
  119. package/dist/text/preprocessors/index.d.ts +17 -0
  120. package/dist/text/preprocessors/index.js +38 -0
  121. package/dist/text/preprocessors/mdx.d.ts +25 -0
  122. package/dist/text/preprocessors/mdx.js +101 -0
  123. package/dist/text/preprocessors/mermaid.d.ts +68 -0
  124. package/dist/text/preprocessors/mermaid.js +330 -0
  125. package/dist/text/preprocessors/registry.d.ts +56 -0
  126. package/dist/text/preprocessors/registry.js +180 -0
  127. package/dist/text/reranker.d.ts +59 -0
  128. package/dist/{reranker.js → text/reranker.js} +138 -53
  129. package/dist/text/sentence-transformer-embedder.d.ts +96 -0
  130. package/dist/text/sentence-transformer-embedder.js +340 -0
  131. package/dist/{tokenizer.d.ts → text/tokenizer.d.ts} +1 -0
  132. package/dist/{tokenizer.js → text/tokenizer.js} +7 -2
  133. package/dist/types.d.ts +40 -1
  134. package/dist/utils/vector-math.d.ts +31 -0
  135. package/dist/utils/vector-math.js +70 -0
  136. package/package.json +16 -4
  137. package/dist/api-errors.d.ts.map +0 -1
  138. package/dist/api-errors.js.map +0 -1
  139. package/dist/chunker.d.ts +0 -47
  140. package/dist/chunker.d.ts.map +0 -1
  141. package/dist/chunker.js.map +0 -1
  142. package/dist/cli/indexer.d.ts.map +0 -1
  143. package/dist/cli/indexer.js.map +0 -1
  144. package/dist/cli/search.d.ts.map +0 -1
  145. package/dist/cli/search.js.map +0 -1
  146. package/dist/cli.d.ts.map +0 -1
  147. package/dist/cli.js.map +0 -1
  148. package/dist/config.d.ts.map +0 -1
  149. package/dist/config.js.map +0 -1
  150. package/dist/db.d.ts +0 -90
  151. package/dist/db.d.ts.map +0 -1
  152. package/dist/db.js +0 -340
  153. package/dist/db.js.map +0 -1
  154. package/dist/embedder.d.ts.map +0 -1
  155. package/dist/embedder.js.map +0 -1
  156. package/dist/error-handler.d.ts.map +0 -1
  157. package/dist/error-handler.js.map +0 -1
  158. package/dist/file-processor.d.ts.map +0 -1
  159. package/dist/file-processor.js.map +0 -1
  160. package/dist/index-manager.d.ts.map +0 -1
  161. package/dist/index-manager.js.map +0 -1
  162. package/dist/index.d.ts.map +0 -1
  163. package/dist/index.js.map +0 -1
  164. package/dist/indexer.d.ts.map +0 -1
  165. package/dist/indexer.js.map +0 -1
  166. package/dist/ingestion.d.ts.map +0 -1
  167. package/dist/ingestion.js.map +0 -1
  168. package/dist/mcp-server.d.ts.map +0 -1
  169. package/dist/mcp-server.js.map +0 -1
  170. package/dist/path-manager.d.ts.map +0 -1
  171. package/dist/path-manager.js.map +0 -1
  172. package/dist/preprocess.d.ts.map +0 -1
  173. package/dist/preprocess.js.map +0 -1
  174. package/dist/preprocessors/index.d.ts.map +0 -1
  175. package/dist/preprocessors/index.js.map +0 -1
  176. package/dist/preprocessors/mdx.d.ts.map +0 -1
  177. package/dist/preprocessors/mdx.js.map +0 -1
  178. package/dist/preprocessors/mermaid.d.ts.map +0 -1
  179. package/dist/preprocessors/mermaid.js.map +0 -1
  180. package/dist/preprocessors/registry.d.ts.map +0 -1
  181. package/dist/preprocessors/registry.js.map +0 -1
  182. package/dist/reranker.d.ts +0 -40
  183. package/dist/reranker.d.ts.map +0 -1
  184. package/dist/reranker.js.map +0 -1
  185. package/dist/resource-manager-demo.d.ts +0 -7
  186. package/dist/resource-manager-demo.d.ts.map +0 -1
  187. package/dist/resource-manager-demo.js +0 -52
  188. package/dist/resource-manager-demo.js.map +0 -1
  189. package/dist/resource-manager.d.ts +0 -129
  190. package/dist/resource-manager.d.ts.map +0 -1
  191. package/dist/resource-manager.js +0 -389
  192. package/dist/resource-manager.js.map +0 -1
  193. package/dist/search-standalone.d.ts.map +0 -1
  194. package/dist/search-standalone.js.map +0 -1
  195. package/dist/search.d.ts.map +0 -1
  196. package/dist/search.js.map +0 -1
  197. package/dist/test-utils.d.ts.map +0 -1
  198. package/dist/test-utils.js.map +0 -1
  199. package/dist/tokenizer.d.ts.map +0 -1
  200. package/dist/tokenizer.js.map +0 -1
  201. package/dist/types.d.ts.map +0 -1
  202. package/dist/types.js.map +0 -1
  203. package/dist/vector-index.d.ts.map +0 -1
  204. package/dist/vector-index.js.map +0 -1
@@ -0,0 +1,812 @@
1
+ /**
2
+ * CORE MODULE — Shared between text-only (rag-lite-ts) and future multimodal (rag-lite-mm)
3
+ * Model-agnostic. No transformer or modality-specific logic.
4
+ */
5
+ import { discoverAndProcessFiles } from '../file-processor.js';
6
+ import { chunkDocument } from './chunker.js';
7
+ import { insertChunk, upsertDocument } from './db.js';
8
+ import { config } from './config.js';
9
+ import { DocumentPathManager } from './path-manager.js';
10
+ import { existsSync } from 'fs';
11
+ import { ContentManager } from './content-manager.js';
12
+ /**
13
+ * Main ingestion pipeline class
14
+ * Coordinates the entire process from file discovery to vector storage
15
+ * Uses explicit dependency injection for clean architecture
16
+ */
17
+ export class IngestionPipeline {
18
+ embedFn;
19
+ indexManager;
20
+ db;
21
+ defaultChunkConfig;
22
+ pathManager;
23
+ contentManager;
24
+ /**
25
+ * Creates a new IngestionPipeline with explicit dependency injection
26
+ * Enhanced with ContentManager integration for unified content system
27
+ *
28
+ * DEPENDENCY INJECTION PATTERN:
29
+ * This constructor requires all dependencies to be explicitly provided, enabling:
30
+ * - Clean separation between core ingestion logic and implementation-specific components
31
+ * - Support for different embedding models and content types
32
+ * - Testability through mock injection
33
+ * - Future extensibility for multimodal content processing
34
+ * - Unified content management for both filesystem and memory-based ingestion
35
+ *
36
+ * @param embedFn - Function to embed document chunks into vectors
37
+ * - Signature: (query: string, contentType?: string) => Promise<EmbeddingResult>
38
+ * - Must handle chunk text and return consistent embedding format
39
+ * - Examples:
40
+ * - Text: const embedFn = (text) => textEmbedder.embedSingle(text)
41
+ * - Multimodal: const embedFn = (content, type) => type === 'image' ? clipEmbedder.embedImage(content) : clipEmbedder.embedText(content)
42
+ * - Custom: const embedFn = (text) => customModel.embed(text)
43
+ *
44
+ * @param indexManager - Vector index manager for storing embeddings
45
+ * - Handles vector storage and indexing operations
46
+ * - Must support the embedding dimensions produced by embedFn
47
+ * - Example: new IndexManager('./index.bin')
48
+ *
49
+ * @param db - Database connection for metadata storage
50
+ * - Stores document and chunk metadata with content type support
51
+ * - Supports different content types through metadata fields
52
+ * - Example: await openDatabase('./db.sqlite')
53
+ *
54
+ * @param contentManager - Optional ContentManager for unified content system
55
+ * - Handles content storage routing and deduplication
56
+ * - If not provided, creates default instance with standard configuration
57
+ * - Example: new ContentManager(db, { contentDir: '.raglite/content' })
58
+ *
59
+ * USAGE EXAMPLES:
60
+ * ```typescript
61
+ * // Text-only ingestion pipeline with unified content system
62
+ * const textEmbedFn = await createTextEmbedder();
63
+ * const indexManager = new IndexManager('./index.bin');
64
+ * const db = await openDatabase('./db.sqlite');
65
+ * const contentManager = new ContentManager(db);
66
+ * const ingestion = new IngestionPipeline(textEmbedFn, indexManager, db, undefined, contentManager);
67
+ *
68
+ * // Simple usage (ContentManager created automatically)
69
+ * const ingestion = new IngestionPipeline(textEmbedFn, indexManager, db);
70
+ *
71
+ * // Custom embedding implementation with memory ingestion
72
+ * const customEmbedFn = async (text) => ({
73
+ * embedding_id: generateId(),
74
+ * vector: await myCustomModel.embed(text)
75
+ * });
76
+ * const ingestion = new IngestionPipeline(customEmbedFn, indexManager, db);
77
+ * await ingestion.ingestFromMemory(buffer, { displayName: 'file.txt' });
78
+ * ```
79
+ */
80
+ constructor(embedFn, indexManager, db, defaultChunkConfig, contentManager) {
81
+ this.embedFn = embedFn;
82
+ this.indexManager = indexManager;
83
+ this.db = db;
84
+ this.defaultChunkConfig = defaultChunkConfig;
85
+ // Validate required dependencies
86
+ if (!embedFn || typeof embedFn !== 'function') {
87
+ throw new Error('embedFn must be a valid function');
88
+ }
89
+ if (!indexManager) {
90
+ throw new Error('indexManager is required');
91
+ }
92
+ if (!db) {
93
+ throw new Error('db connection is required');
94
+ }
95
+ // Initialize path manager with default configuration
96
+ this.pathManager = new DocumentPathManager(config.path_storage_strategy, process.cwd());
97
+ // Initialize ContentManager (create default if not provided)
98
+ this.contentManager = contentManager || new ContentManager(this.db);
99
+ }
100
+ /**
101
+ * Ingest documents from a directory
102
+ * @param directoryPath - Path to directory containing documents
103
+ * @param options - Optional ingestion configuration
104
+ * @returns Promise resolving to ingestion results
105
+ */
106
+ async ingestDirectory(directoryPath, options = {}) {
107
+ if (!existsSync(directoryPath)) {
108
+ throw new Error(`Directory not found: ${directoryPath}`);
109
+ }
110
+ return this.ingestPath(directoryPath, options);
111
+ }
112
+ /**
113
+ * Ingest a single file
114
+ * @param filePath - Path to the file to ingest
115
+ * @param options - Optional ingestion configuration
116
+ * @returns Promise resolving to ingestion results
117
+ */
118
+ async ingestFile(filePath, options = {}) {
119
+ if (!existsSync(filePath)) {
120
+ throw new Error(`File not found: ${filePath}`);
121
+ }
122
+ return this.ingestPath(filePath, options);
123
+ }
124
+ /**
125
+ * Ingest content from memory buffer
126
+ * Enables MCP integration and real-time content processing
127
+ * @param content - Buffer containing the content to ingest
128
+ * @param metadata - Memory content metadata including display name and content type
129
+ * @param options - Optional ingestion configuration
130
+ * @returns Promise resolving to content ID for the ingested content
131
+ */
132
+ async ingestFromMemory(content, metadata, options = {}) {
133
+ const startTime = Date.now();
134
+ console.log(`\n=== Starting memory ingestion: ${metadata.displayName} ===`);
135
+ try {
136
+ // Phase 1: Content Storage via ContentManager
137
+ console.log('\n--- Phase 1: Content Storage ---');
138
+ const contentResult = await this.contentManager.ingestFromMemory(content, metadata);
139
+ if (contentResult.wasDeduped) {
140
+ console.log(`✓ Content deduplicated: ${metadata.displayName} (ID: ${contentResult.contentId})`);
141
+ return contentResult.contentId;
142
+ }
143
+ console.log(`✓ Content stored: ${metadata.displayName} (ID: ${contentResult.contentId})`);
144
+ // Phase 2: Document Processing
145
+ console.log('\n--- Phase 2: Document Processing ---');
146
+ // Determine content type for processing
147
+ const detectedContentType = metadata.contentType || 'text/plain';
148
+ const isImageContent = detectedContentType.startsWith('image/');
149
+ let document;
150
+ if (isImageContent) {
151
+ // Process image content using the existing image processing pipeline
152
+ console.log(`Processing image content: ${metadata.displayName} (${detectedContentType})`);
153
+ document = await this.processImageFromMemory(content, contentResult, metadata, options);
154
+ }
155
+ else if (detectedContentType === 'application/pdf') {
156
+ // Process PDF content
157
+ console.log(`Processing PDF content: ${metadata.displayName}`);
158
+ document = await this.processPDFFromMemory(content, contentResult, metadata, options);
159
+ }
160
+ else if (detectedContentType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document') {
161
+ // Process DOCX content
162
+ console.log(`Processing DOCX content: ${metadata.displayName}`);
163
+ document = await this.processDOCXFromMemory(content, contentResult, metadata, options);
164
+ }
165
+ else {
166
+ // Process as text content
167
+ console.log(`Processing text content: ${metadata.displayName} (${detectedContentType})`);
168
+ document = {
169
+ source: metadata.displayName,
170
+ title: metadata.displayName,
171
+ content: content.toString('utf8'), // Convert buffer to string for processing
172
+ metadata: {
173
+ contentType: detectedContentType,
174
+ contentId: contentResult.contentId,
175
+ storageType: contentResult.storageType,
176
+ originalPath: metadata.originalPath
177
+ }
178
+ };
179
+ }
180
+ // Phase 3: Document Chunking
181
+ console.log('\n--- Phase 3: Document Chunking ---');
182
+ const effectiveChunkConfig = options.chunkConfig || this.defaultChunkConfig || {
183
+ chunkSize: config.chunk_size,
184
+ chunkOverlap: config.chunk_overlap
185
+ };
186
+ const chunks = await chunkDocument(document, effectiveChunkConfig);
187
+ console.log(`✓ Created ${chunks.length} chunks from memory content`);
188
+ if (chunks.length === 0) {
189
+ console.log('No chunks created from memory content');
190
+ return contentResult.contentId;
191
+ }
192
+ // Phase 4: Embedding Generation
193
+ console.log('\n--- Phase 4: Embedding Generation ---');
194
+ const embeddings = [];
195
+ let embeddingErrors = 0;
196
+ for (let i = 0; i < chunks.length; i++) {
197
+ const chunk = chunks[i];
198
+ try {
199
+ // Convert MIME type to simple content type for embedding function
200
+ const contentTypeForEmbedding = this.getContentTypeForEmbedding(document.metadata?.contentType);
201
+ const embedding = await this.embedFn(chunk.text, contentTypeForEmbedding);
202
+ // Enhance embedding result with content type metadata
203
+ if (!embedding.contentType) {
204
+ embedding.contentType = contentTypeForEmbedding;
205
+ }
206
+ if (!embedding.metadata) {
207
+ embedding.metadata = document.metadata;
208
+ }
209
+ embeddings.push(embedding);
210
+ }
211
+ catch (error) {
212
+ console.warn(`Failed to embed chunk ${i + 1}:`, error instanceof Error ? error.message : String(error));
213
+ embeddingErrors++;
214
+ }
215
+ }
216
+ console.log(`✓ Generated ${embeddings.length} embeddings for memory content`);
217
+ if (embeddings.length === 0) {
218
+ console.log('No embeddings generated from memory content');
219
+ return contentResult.contentId;
220
+ }
221
+ // Phase 5: Database Storage
222
+ console.log('\n--- Phase 5: Database Storage ---');
223
+ // Insert document with content_id reference
224
+ const documentContentType = this.getContentTypeForEmbedding(document.metadata?.contentType);
225
+ const documentId = await upsertDocument(this.db, document.source, document.title, documentContentType, document.metadata, contentResult.contentId);
226
+ // Insert chunks with embeddings
227
+ let chunksStored = 0;
228
+ for (let i = 0; i < chunks.length && i < embeddings.length; i++) {
229
+ const chunk = chunks[i];
230
+ const embedding = embeddings[i];
231
+ try {
232
+ await insertChunk(this.db, embedding.embedding_id, documentId, chunk.text, chunk.chunkIndex, documentContentType, document.metadata);
233
+ chunksStored++;
234
+ }
235
+ catch (error) {
236
+ console.error(`Failed to store chunk ${i + 1}:`, error instanceof Error ? error.message : String(error));
237
+ }
238
+ }
239
+ console.log(`✓ Stored document and ${chunksStored} chunks in database`);
240
+ // Phase 6: Vector Index Updates
241
+ console.log('\n--- Phase 6: Vector Index Updates ---');
242
+ await this.updateVectorIndex(embeddings);
243
+ const endTime = Date.now();
244
+ const processingTimeMs = endTime - startTime;
245
+ console.log('\n=== Memory Ingestion Complete ===');
246
+ console.log(`Content ID: ${contentResult.contentId}`);
247
+ console.log(`Chunks created: ${chunks.length}`);
248
+ console.log(`Embeddings generated: ${embeddings.length}`);
249
+ console.log(`Chunks stored: ${chunksStored}`);
250
+ console.log(`Embedding errors: ${embeddingErrors}`);
251
+ console.log(`Total time: ${(processingTimeMs / 1000).toFixed(2)}s`);
252
+ return contentResult.contentId;
253
+ }
254
+ catch (error) {
255
+ console.error('\n=== Memory Ingestion Failed ===');
256
+ console.error(`Error: ${error instanceof Error ? error.message : 'Unknown error'}`);
257
+ throw new Error(`Memory ingestion failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
258
+ }
259
+ }
260
+ /**
261
+ * Ingest documents from a path (file or directory)
262
+ * Implements the complete pipeline: file processing → chunking → embedding → storage
263
+ * Enhanced to handle mixed content types (text and images) in multimodal mode
264
+ */
265
+ async ingestPath(path, options = {}) {
266
+ const startTime = Date.now();
267
+ console.log(`\n=== Starting ingestion from: ${path} ===`);
268
+ try {
269
+ // Phase 1: File Discovery and Processing with Content-Type Detection
270
+ console.log('\n--- Phase 1: File Discovery and Processing ---');
271
+ const fileResult = await discoverAndProcessFiles(path, options.fileOptions, this.pathManager);
272
+ if (fileResult.documents.length === 0) {
273
+ console.log('No documents found to process');
274
+ return {
275
+ documentsProcessed: 0,
276
+ chunksCreated: 0,
277
+ embeddingsGenerated: 0,
278
+ documentErrors: fileResult.processingResult.errors.length,
279
+ embeddingErrors: 0,
280
+ processingTimeMs: Date.now() - startTime,
281
+ contentIds: []
282
+ };
283
+ }
284
+ // Content-type detection and routing
285
+ const contentTypeStats = this.analyzeContentTypes(fileResult.documents);
286
+ console.log(`📊 Content analysis: ${contentTypeStats.text} text, ${contentTypeStats.image} image, ${contentTypeStats.other} other files`);
287
+ // Phase 2: Document Chunking with Content-Type Awareness
288
+ console.log('\n--- Phase 2: Document Chunking ---');
289
+ const effectiveChunkConfig = options.chunkConfig || this.defaultChunkConfig || {
290
+ chunkSize: config.chunk_size,
291
+ chunkOverlap: config.chunk_overlap
292
+ };
293
+ const chunkingResult = await this.chunkDocumentsWithContentTypes(fileResult.documents, effectiveChunkConfig);
294
+ if (chunkingResult.totalChunks === 0) {
295
+ console.log('No chunks created from documents');
296
+ return {
297
+ documentsProcessed: fileResult.documents.length,
298
+ chunksCreated: 0,
299
+ embeddingsGenerated: 0,
300
+ documentErrors: fileResult.processingResult.errors.length,
301
+ embeddingErrors: 0,
302
+ processingTimeMs: Date.now() - startTime,
303
+ contentIds: []
304
+ };
305
+ }
306
+ // Phase 3: Embedding Generation with Content-Type Support
307
+ console.log('\n--- Phase 3: Embedding Generation ---');
308
+ const embeddingResult = await this.generateEmbeddingsWithContentTypes(chunkingResult.allChunks);
309
+ // Phase 4: Database and Index Storage with Content-Type Metadata
310
+ console.log('\n--- Phase 4: Storage Operations ---');
311
+ const contentIds = await this.storeDocumentsAndChunksWithContentTypes(chunkingResult.documentChunks, embeddingResult.embeddings);
312
+ // Phase 5: Vector Index Updates
313
+ console.log('\n--- Phase 5: Vector Index Updates ---');
314
+ await this.updateVectorIndex(embeddingResult.embeddings);
315
+ const endTime = Date.now();
316
+ const processingTimeMs = endTime - startTime;
317
+ const result = {
318
+ documentsProcessed: fileResult.documents.length,
319
+ chunksCreated: chunkingResult.totalChunks,
320
+ embeddingsGenerated: embeddingResult.embeddings.length,
321
+ documentErrors: fileResult.processingResult.errors.length,
322
+ embeddingErrors: embeddingResult.errors,
323
+ processingTimeMs,
324
+ contentIds
325
+ };
326
+ console.log('\n=== Ingestion Complete ===');
327
+ console.log(`Documents processed: ${result.documentsProcessed}`);
328
+ console.log(`Chunks created: ${result.chunksCreated}`);
329
+ console.log(`Embeddings generated: ${result.embeddingsGenerated}`);
330
+ console.log(`Document errors: ${result.documentErrors}`);
331
+ console.log(`Embedding errors: ${result.embeddingErrors}`);
332
+ console.log(`Total time: ${(processingTimeMs / 1000).toFixed(2)}s`);
333
+ return result;
334
+ }
335
+ catch (error) {
336
+ console.error('\n=== Ingestion Failed ===');
337
+ console.error(`Error: ${error instanceof Error ? error.message : 'Unknown error'}`);
338
+ throw new Error(`Ingestion failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
339
+ }
340
+ }
341
+ /**
342
+ * Analyze content types in the document collection
343
+ * @private
344
+ */
345
+ analyzeContentTypes(documents) {
346
+ const stats = { text: 0, image: 0, other: 0 };
347
+ for (const document of documents) {
348
+ const contentType = document.metadata?.contentType || 'text';
349
+ switch (contentType) {
350
+ case 'text':
351
+ stats.text++;
352
+ break;
353
+ case 'image':
354
+ stats.image++;
355
+ break;
356
+ default:
357
+ stats.other++;
358
+ break;
359
+ }
360
+ }
361
+ return stats;
362
+ }
363
+ /**
364
+ * Chunk all documents and organize results with content-type awareness
365
+ * Enhanced to handle different content types appropriately
366
+ */
367
+ async chunkDocumentsWithContentTypes(documents, chunkConfig) {
368
+ const documentChunks = [];
369
+ const allChunks = [];
370
+ let totalChunks = 0;
371
+ console.log(`Processing ${documents.length} document${documents.length === 1 ? '' : 's'} for chunking...`);
372
+ for (let i = 0; i < documents.length; i++) {
373
+ const document = documents[i];
374
+ try {
375
+ const contentType = document.metadata?.contentType || 'text';
376
+ // Handle different content types appropriately
377
+ let chunks;
378
+ if (contentType === 'image') {
379
+ // For images, create a single chunk with the full content (description + metadata)
380
+ chunks = [{
381
+ text: document.content,
382
+ chunkIndex: 0,
383
+ contentType: 'image',
384
+ metadata: document.metadata
385
+ }];
386
+ }
387
+ else {
388
+ // For text documents, use normal chunking
389
+ const textChunks = await chunkDocument(document, chunkConfig);
390
+ chunks = textChunks.map(chunk => ({
391
+ ...chunk,
392
+ contentType: 'text',
393
+ metadata: document.metadata
394
+ }));
395
+ }
396
+ documentChunks.push({ document, chunks });
397
+ // Collect all chunks with their content type information
398
+ for (const chunk of chunks) {
399
+ allChunks.push({
400
+ text: chunk.text,
401
+ contentType: chunk.contentType,
402
+ metadata: chunk.metadata
403
+ });
404
+ }
405
+ totalChunks += chunks.length;
406
+ // Progress logging - more frequent for better user experience
407
+ if (documents.length <= 10 || (i + 1) % Math.max(1, Math.floor(documents.length / 10)) === 0 || i === documents.length - 1) {
408
+ const percentage = Math.round(((i + 1) / documents.length) * 100);
409
+ console.log(`Processed ${i + 1} of ${documents.length} documents (${percentage}%) - ${totalChunks} chunks created`);
410
+ }
411
+ }
412
+ catch (error) {
413
+ console.error(`Failed to chunk document ${document.source}:`, error instanceof Error ? error.message : String(error));
414
+ // Continue with other documents
415
+ continue;
416
+ }
417
+ }
418
+ console.log(`✓ Chunking complete: Created ${totalChunks} chunks from ${documentChunks.length} documents`);
419
+ return { documentChunks, allChunks, totalChunks };
420
+ }
421
+ /**
422
+ * Chunk all documents and organize results (legacy method for backward compatibility)
423
+ * @deprecated Use chunkDocumentsWithContentTypes for multimodal support
424
+ */
425
+ async chunkDocuments(documents, chunkConfig) {
426
+ const result = await this.chunkDocumentsWithContentTypes(documents, chunkConfig);
427
+ // Convert to legacy format for backward compatibility
428
+ return {
429
+ documentChunks: result.documentChunks,
430
+ allChunks: result.allChunks.map(chunk => chunk.text),
431
+ totalChunks: result.totalChunks
432
+ };
433
+ }
434
+ /**
435
+ * Generate embeddings for all chunks with content-type support
436
+ * Enhanced to handle different content types and pass metadata to embedding function
437
+ */
438
+ async generateEmbeddingsWithContentTypes(chunks) {
439
+ console.log(`Generating embeddings for ${chunks.length} chunk${chunks.length === 1 ? '' : 's'}...`);
440
+ console.log('This may take a few minutes depending on the number of chunks...');
441
+ try {
442
+ // Generate embeddings using injected embed function with content type support
443
+ const embeddings = [];
444
+ let errors = 0;
445
+ for (let i = 0; i < chunks.length; i++) {
446
+ const chunk = chunks[i];
447
+ try {
448
+ // Convert MIME type to simple content type for embedding function
449
+ const contentTypeForEmbedding = this.getContentTypeForEmbedding(chunk.contentType);
450
+ const embedding = await this.embedFn(chunk.text, contentTypeForEmbedding);
451
+ // Enhance embedding result with content type metadata if not already present
452
+ if (!embedding.contentType) {
453
+ embedding.contentType = contentTypeForEmbedding;
454
+ }
455
+ if (!embedding.metadata && chunk.metadata) {
456
+ embedding.metadata = chunk.metadata;
457
+ }
458
+ embeddings.push(embedding);
459
+ }
460
+ catch (error) {
461
+ console.warn(`Failed to embed ${chunk.contentType} chunk ${i + 1}:`, error instanceof Error ? error.message : String(error));
462
+ errors++;
463
+ }
464
+ // Progress logging
465
+ if (chunks.length > 10 && (i + 1) % Math.max(1, Math.floor(chunks.length / 10)) === 0) {
466
+ const percentage = Math.round(((i + 1) / chunks.length) * 100);
467
+ console.log(`Generated ${i + 1} of ${chunks.length} embeddings (${percentage}%)`);
468
+ }
469
+ }
470
+ if (errors > 0) {
471
+ console.warn(`⚠ Warning: ${errors} chunk${errors === 1 ? '' : 's'} failed embedding and ${errors === 1 ? 'was' : 'were'} skipped`);
472
+ }
473
+ console.log(`✓ Generated ${embeddings.length} embeddings successfully`);
474
+ return { embeddings, errors };
475
+ }
476
+ catch (error) {
477
+ console.error('Critical embedding failure:', error instanceof Error ? error.message : String(error));
478
+ throw new Error(`Embedding generation failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
479
+ }
480
+ }
481
+ /**
482
+ * Generate embeddings for all chunks with error handling (legacy method for backward compatibility)
483
+ * @deprecated Use generateEmbeddingsWithContentTypes for multimodal support
484
+ */
485
+ async generateEmbeddings(chunkTexts) {
486
+ // Convert to new format for backward compatibility
487
+ const chunks = chunkTexts.map(text => ({ text, contentType: 'text' }));
488
+ return this.generateEmbeddingsWithContentTypes(chunks);
489
+ }
490
+ /**
491
+ * Store documents and chunks in database with content-type support
492
+ * Enhanced to handle content type metadata and multimodal content
493
+ * @returns Array of content IDs for successfully stored documents
494
+ */
495
+ async storeDocumentsAndChunksWithContentTypes(documentChunks, embeddings) {
496
+ console.log(`Storing ${documentChunks.length} document${documentChunks.length === 1 ? '' : 's'} and chunks in database...`);
497
+ // Create a mapping of chunk text to embedding for efficient lookup
498
+ const embeddingMap = new Map();
499
+ let embeddingIndex = 0;
500
+ // Build mapping - this assumes embeddings are in the same order as chunks were processed
501
+ for (const { chunks } of documentChunks) {
502
+ for (const chunk of chunks) {
503
+ if (embeddingIndex < embeddings.length) {
504
+ embeddingMap.set(chunk.text, embeddings[embeddingIndex]);
505
+ embeddingIndex++;
506
+ }
507
+ }
508
+ }
509
+ let totalChunksStored = 0;
510
+ let documentsStored = 0;
511
+ const contentIds = [];
512
+ // Process each document sequentially
513
+ for (const { document, chunks } of documentChunks) {
514
+ try {
515
+ // Generate content ID for filesystem content using ContentManager
516
+ let contentId = document.metadata?.contentId;
517
+ if (!contentId) {
518
+ try {
519
+ // Use ContentManager to create filesystem reference and get content ID
520
+ const contentResult = await this.contentManager.ingestFromFilesystem(document.source);
521
+ contentId = contentResult.contentId;
522
+ // Update document metadata with content ID
523
+ if (!document.metadata) {
524
+ document.metadata = {};
525
+ }
526
+ document.metadata.contentId = contentId;
527
+ document.metadata.storageType = contentResult.storageType;
528
+ }
529
+ catch (contentError) {
530
+ console.warn(`Failed to create content reference for ${document.source}:`, contentError instanceof Error ? contentError.message : String(contentError));
531
+ // Continue without content ID - fallback to legacy behavior
532
+ }
533
+ }
534
+ // Insert or get existing document with content type support and content_id reference
535
+ const documentContentType = document.metadata?.contentType || 'text';
536
+ const documentId = await upsertDocument(this.db, document.source, document.title, documentContentType, document.metadata, contentId);
537
+ documentsStored++;
538
+ // Add content ID to results if available
539
+ if (contentId) {
540
+ contentIds.push(contentId);
541
+ }
542
+ // Insert all chunks for this document with content type support
543
+ let chunksStoredForDoc = 0;
544
+ for (const chunk of chunks) {
545
+ const embedding = embeddingMap.get(chunk.text);
546
+ if (embedding) {
547
+ try {
548
+ const chunkContentType = chunk.contentType || documentContentType;
549
+ const chunkMetadata = chunk.metadata || document.metadata;
550
+ await insertChunk(this.db, embedding.embedding_id, documentId, chunk.text, chunk.chunkIndex, chunkContentType, chunkMetadata);
551
+ chunksStoredForDoc++;
552
+ totalChunksStored++;
553
+ }
554
+ catch (chunkError) {
555
+ console.error(`Failed to store ${chunk.contentType || 'text'} chunk ${chunk.chunkIndex} for document ${document.source}:`, chunkError instanceof Error ? chunkError.message : String(chunkError));
556
+ // Continue with other chunks
557
+ }
558
+ }
559
+ else {
560
+ console.warn(`No embedding found for chunk ${chunk.chunkIndex} in document ${document.source}`);
561
+ }
562
+ }
563
+ // Progress logging for storage
564
+ if (documentChunks.length <= 20 || documentsStored % Math.max(1, Math.floor(documentChunks.length / 10)) === 0 || documentsStored === documentChunks.length) {
565
+ const percentage = Math.round((documentsStored / documentChunks.length) * 100);
566
+ console.log(`Stored ${documentsStored} of ${documentChunks.length} documents (${percentage}%) - ${totalChunksStored} chunks total`);
567
+ }
568
+ }
569
+ catch (docError) {
570
+ console.error(`Failed to store document ${document.source}:`, docError instanceof Error ? docError.message : String(docError));
571
+ // Continue with other documents
572
+ }
573
+ }
574
+ console.log(`✓ Storage complete: ${documentsStored} documents, ${totalChunksStored} chunks saved to database`);
575
+ return contentIds;
576
+ }
577
+ /**
578
+ * Store documents and chunks in database (legacy method for backward compatibility)
579
+ * @deprecated Use storeDocumentsAndChunksWithContentTypes for multimodal support
580
+ */
581
+ async storeDocumentsAndChunks(documentChunks, embeddings) {
582
+ await this.storeDocumentsAndChunksWithContentTypes(documentChunks, embeddings);
583
+ }
584
+ /**
585
+ * Update vector index with new embeddings
586
+ */
587
+ async updateVectorIndex(embeddings) {
588
+ if (embeddings.length === 0) {
589
+ console.log('No embeddings to add to vector index');
590
+ return;
591
+ }
592
+ console.log(`Adding ${embeddings.length} vector${embeddings.length === 1 ? '' : 's'} to search index...`);
593
+ try {
594
+ await this.indexManager.addVectors(embeddings);
595
+ console.log(`✓ Vector index updated successfully with ${embeddings.length} new vectors`);
596
+ }
597
+ catch (error) {
598
+ console.error('Failed to update vector index:', error instanceof Error ? error.message : String(error));
599
+ throw error;
600
+ }
601
+ }
602
+ /**
603
+ * Converts MIME type to simple content type for embedding function
604
+ * @param mimeType - MIME type string (e.g., 'text/plain', 'image/jpeg')
605
+ * @returns Simple content type ('text', 'image', etc.)
606
+ */
607
+ getContentTypeForEmbedding(mimeType) {
608
+ if (!mimeType) {
609
+ return 'text';
610
+ }
611
+ // Convert MIME types to simple content types
612
+ if (mimeType.startsWith('text/')) {
613
+ return 'text';
614
+ }
615
+ else if (mimeType.startsWith('image/')) {
616
+ return 'image';
617
+ }
618
+ else if (mimeType === 'application/pdf') {
619
+ return 'text'; // PDFs are processed as text
620
+ }
621
+ else if (mimeType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document') {
622
+ return 'text'; // DOCX files are processed as text
623
+ }
624
+ else {
625
+ return 'text'; // Default to text for unknown types
626
+ }
627
+ }
628
+ /**
629
+ * Save the vector index to disk
630
+ */
631
+ async saveIndex() {
632
+ await this.indexManager.saveIndex();
633
+ }
634
+ /**
635
+ * Process image content from memory using the existing image processing pipeline
636
+ * @private
637
+ */
638
+ async processImageFromMemory(content, contentResult, metadata, options) {
639
+ try {
640
+ // Import image processing functions
641
+ const { generateImageDescriptionForFile, extractImageMetadataForFile } = await import('../file-processor.js');
642
+ // Use the content path from the content manager (where the image is stored)
643
+ const imagePath = contentResult.contentPath;
644
+ // Extract image metadata
645
+ let imageMetadata = {};
646
+ try {
647
+ imageMetadata = await extractImageMetadataForFile(imagePath);
648
+ }
649
+ catch (error) {
650
+ console.warn(`Failed to extract image metadata for ${metadata.displayName}:`, error instanceof Error ? error.message : String(error));
651
+ // Continue with empty metadata
652
+ }
653
+ // Generate text description for the image
654
+ let descriptionResult = { description: 'Image content', model: 'none', confidence: 0 };
655
+ try {
656
+ const imageToTextOptions = {}; // Use default options for now
657
+ descriptionResult = await generateImageDescriptionForFile(imagePath, imageToTextOptions);
658
+ console.log(`✓ Generated image description: "${descriptionResult.description}"`);
659
+ }
660
+ catch (error) {
661
+ console.warn(`Failed to generate image description for ${metadata.displayName}:`, error instanceof Error ? error.message : String(error));
662
+ // Continue with fallback description
663
+ }
664
+ // Update metadata with description information
665
+ imageMetadata.description = descriptionResult.description;
666
+ imageMetadata.descriptionModel = descriptionResult.model;
667
+ imageMetadata.descriptionConfidence = descriptionResult.confidence;
668
+ // Create document with image description as content
669
+ const title = metadata.displayName;
670
+ // Create content that includes description and key metadata
671
+ const contentParts = [
672
+ `Image: ${title}`,
673
+ `Description: ${descriptionResult.description}`
674
+ ];
675
+ if (imageMetadata.dimensions) {
676
+ contentParts.push(`Dimensions: ${imageMetadata.dimensions.width}x${imageMetadata.dimensions.height}`);
677
+ }
678
+ if (imageMetadata.format) {
679
+ contentParts.push(`Format: ${imageMetadata.format}`);
680
+ }
681
+ const documentContent = contentParts.join('\n');
682
+ return {
683
+ source: metadata.displayName,
684
+ title,
685
+ content: documentContent.trim(),
686
+ metadata: {
687
+ contentType: 'image',
688
+ contentId: contentResult.contentId,
689
+ storageType: contentResult.storageType,
690
+ originalPath: metadata.originalPath,
691
+ ...imageMetadata // Spread all image metadata fields
692
+ }
693
+ };
694
+ }
695
+ catch (error) {
696
+ console.warn(`Failed to process image from memory, falling back to basic processing:`, error instanceof Error ? error.message : String(error));
697
+ // Fallback to basic document creation
698
+ return {
699
+ source: metadata.displayName,
700
+ title: metadata.displayName,
701
+ content: `Image: ${metadata.displayName}\nPath: ${contentResult.contentPath}`,
702
+ metadata: {
703
+ contentType: 'image',
704
+ contentId: contentResult.contentId,
705
+ storageType: contentResult.storageType,
706
+ originalPath: metadata.originalPath,
707
+ processingError: error instanceof Error ? error.message : String(error)
708
+ }
709
+ };
710
+ }
711
+ }
712
+ /**
713
+ * Process PDF content from memory using the existing PDF processing pipeline
714
+ * @private
715
+ */
716
+ async processPDFFromMemory(content, contentResult, metadata, options) {
717
+ try {
718
+ // Import PDF processing
719
+ const pdfParse = require('pdf-parse');
720
+ // Parse PDF content directly from buffer
721
+ const pdfData = await pdfParse(content);
722
+ console.log(`✓ Extracted ${pdfData.text.length} characters from PDF`);
723
+ return {
724
+ source: metadata.displayName,
725
+ title: metadata.displayName,
726
+ content: pdfData.text.trim(),
727
+ metadata: {
728
+ contentType: 'application/pdf',
729
+ contentId: contentResult.contentId,
730
+ storageType: contentResult.storageType,
731
+ originalPath: metadata.originalPath,
732
+ pages: pdfData.numpages,
733
+ pdfInfo: pdfData.info
734
+ }
735
+ };
736
+ }
737
+ catch (error) {
738
+ console.warn(`Failed to process PDF from memory, falling back to basic processing:`, error instanceof Error ? error.message : String(error));
739
+ // Fallback to basic document creation
740
+ return {
741
+ source: metadata.displayName,
742
+ title: metadata.displayName,
743
+ content: `PDF Document: ${metadata.displayName}\nPath: ${contentResult.contentPath}`,
744
+ metadata: {
745
+ contentType: 'application/pdf',
746
+ contentId: contentResult.contentId,
747
+ storageType: contentResult.storageType,
748
+ originalPath: metadata.originalPath,
749
+ processingError: error instanceof Error ? error.message : String(error)
750
+ }
751
+ };
752
+ }
753
+ }
754
+ /**
755
+ * Process DOCX content from memory using the existing DOCX processing pipeline
756
+ * @private
757
+ */
758
+ async processDOCXFromMemory(content, contentResult, metadata, options) {
759
+ try {
760
+ // Import DOCX processing
761
+ const mammoth = await import('mammoth');
762
+ // Parse DOCX content directly from buffer
763
+ const docxResult = await mammoth.extractRawText({ buffer: content });
764
+ console.log(`✓ Extracted ${docxResult.value.length} characters from DOCX`);
765
+ return {
766
+ source: metadata.displayName,
767
+ title: metadata.displayName,
768
+ content: docxResult.value.trim(),
769
+ metadata: {
770
+ contentType: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
771
+ contentId: contentResult.contentId,
772
+ storageType: contentResult.storageType,
773
+ originalPath: metadata.originalPath,
774
+ messages: docxResult.messages
775
+ }
776
+ };
777
+ }
778
+ catch (error) {
779
+ console.warn(`Failed to process DOCX from memory, falling back to basic processing:`, error instanceof Error ? error.message : String(error));
780
+ // Fallback to basic document creation
781
+ return {
782
+ source: metadata.displayName,
783
+ title: metadata.displayName,
784
+ content: `DOCX Document: ${metadata.displayName}\nPath: ${contentResult.contentPath}`,
785
+ metadata: {
786
+ contentType: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
787
+ contentId: contentResult.contentId,
788
+ storageType: contentResult.storageType,
789
+ originalPath: metadata.originalPath,
790
+ processingError: error instanceof Error ? error.message : String(error)
791
+ }
792
+ };
793
+ }
794
+ }
795
+ /**
796
+ * Clean up resources - explicit cleanup method
797
+ */
798
+ async cleanup() {
799
+ try {
800
+ // Clean up ContentManager to prevent resource leaks
801
+ if (this.contentManager && typeof this.contentManager.cleanup === 'function') {
802
+ this.contentManager.cleanup();
803
+ }
804
+ await this.db.close();
805
+ await this.indexManager.close();
806
+ }
807
+ catch (error) {
808
+ console.error('Error during IngestionPipeline cleanup:', error instanceof Error ? error.message : String(error));
809
+ }
810
+ }
811
+ }
812
+ //# sourceMappingURL=ingestion.js.map