@illuma-ai/agents 1.4.0-alpha.5 → 1.4.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/cjs/content/ArtifactStore.cjs +579 -0
  2. package/dist/cjs/content/ArtifactStore.cjs.map +1 -0
  3. package/dist/cjs/content/ContentStore.cjs +638 -0
  4. package/dist/cjs/content/ContentStore.cjs.map +1 -0
  5. package/dist/cjs/content/contentAnalyzer.cjs +91 -0
  6. package/dist/cjs/content/contentAnalyzer.cjs.map +1 -0
  7. package/dist/cjs/content/index.cjs +20 -0
  8. package/dist/cjs/content/index.cjs.map +1 -0
  9. package/dist/cjs/content/mcpAutoCache.cjs +115 -0
  10. package/dist/cjs/content/mcpAutoCache.cjs.map +1 -0
  11. package/dist/cjs/main.cjs +10 -0
  12. package/dist/cjs/main.cjs.map +1 -1
  13. package/dist/cjs/providers/tools-server/ToolsServerCapabilityProvider.cjs.map +1 -1
  14. package/dist/cjs/tools/proxyTool.cjs.map +1 -1
  15. package/dist/esm/content/ArtifactStore.mjs +576 -0
  16. package/dist/esm/content/ArtifactStore.mjs.map +1 -0
  17. package/dist/esm/content/ContentStore.mjs +635 -0
  18. package/dist/esm/content/ContentStore.mjs.map +1 -0
  19. package/dist/esm/content/contentAnalyzer.mjs +87 -0
  20. package/dist/esm/content/contentAnalyzer.mjs.map +1 -0
  21. package/dist/esm/content/index.mjs +5 -0
  22. package/dist/esm/content/index.mjs.map +1 -0
  23. package/dist/esm/content/mcpAutoCache.mjs +111 -0
  24. package/dist/esm/content/mcpAutoCache.mjs.map +1 -0
  25. package/dist/esm/main.mjs +3 -0
  26. package/dist/esm/main.mjs.map +1 -1
  27. package/dist/esm/providers/tools-server/ToolsServerCapabilityProvider.mjs.map +1 -1
  28. package/dist/esm/tools/proxyTool.mjs.map +1 -1
  29. package/dist/types/content/ArtifactStore.d.ts +223 -0
  30. package/dist/types/content/ContentStore.d.ts +140 -0
  31. package/dist/types/content/contentAnalyzer.d.ts +38 -0
  32. package/dist/types/content/index.d.ts +24 -0
  33. package/dist/types/content/mcpAutoCache.d.ts +89 -0
  34. package/dist/types/content/types.d.ts +75 -0
  35. package/dist/types/index.d.ts +5 -0
  36. package/package.json +6 -1
  37. package/src/content/ArtifactStore.ts +782 -0
  38. package/src/content/ContentStore.ts +753 -0
  39. package/src/content/contentAnalyzer.ts +105 -0
  40. package/src/content/index.ts +51 -0
  41. package/src/content/mcpAutoCache.ts +185 -0
  42. package/src/content/types.ts +82 -0
  43. package/src/index.ts +19 -0
  44. package/src/providers/__tests__/ToolsServerCapabilityProvider.test.ts +3 -1
  45. package/src/providers/tools-server/ToolsServerCapabilityProvider.ts +3 -2
  46. package/src/tools/proxyTool.ts +1 -1
@@ -0,0 +1,579 @@
1
+ 'use strict';
2
+
3
+ var ContentStore = require('./ContentStore.cjs');
4
+
5
+ const noopLogger = {
6
+ debug: () => { },
7
+ info: () => { },
8
+ warn: () => { },
9
+ error: () => { },
10
+ };
11
+ /** S3 base path for all artifact files. */
12
+ const ARTIFACTS_BASE_PATH = 'artifacts';
13
+ /**
14
+ * Sanitize a filename for safe S3 key usage.
15
+ * Replaces non-alphanumeric characters (except . _ - /) with underscores.
16
+ */
17
+ function sanitizeName(name) {
18
+ return name.replace(/[^a-zA-Z0-9._\-/]/g, '_');
19
+ }
20
+ /**
21
+ * Constructs the canonical S3 file name for an artifact.
22
+ * Format: `{contentId}__{sanitizedName}` — contentId prefix ensures uniqueness.
23
+ */
24
+ function buildS3FileName(contentId, name) {
25
+ return `${contentId}__${sanitizeName(name)}`;
26
+ }
27
+ /**
28
+ * Constructs the S3 base path scoped to a conversation.
29
+ * Pattern: `artifacts/{conversationId}`
30
+ * The S3 strategy prepends userId internally via getS3Key().
31
+ */
32
+ function buildS3BasePath(conversationId) {
33
+ return `${ARTIFACTS_BASE_PATH}/${conversationId}`;
34
+ }
35
+ /**
36
+ * File-backed artifact store extending ContentStore with S3 persistence.
37
+ *
38
+ * Every write immediately persists to Redis (fast cache) AND S3 (permanent store).
39
+ * S3 writes are fire-and-forget async — the agent gets an instant response from Redis.
40
+ * On Redis cache miss, content is transparently restored from S3 via MongoDB lookup.
41
+ *
42
+ * Key structure (consistent across all layers):
43
+ * - Redis: `CONTENT_STORE::{conversationId}::{contentId}`
44
+ * - S3: `artifacts/{conversationId}/{userId}/{contentId}__{name}`
45
+ * - MongoDB: `file_id: "artifact-{contentId}"`, `metadata.contentId: "{contentId}"`
46
+ *
47
+ * @example
48
+ * ```ts
49
+ * import Keyv from 'keyv';
50
+ * import { ArtifactStore, CONTENT_TTL_MS } from '@illuma-ai/agents/content';
51
+ *
52
+ * const cache = new Keyv({ namespace: `content-store::${conversationId}`, ttl: CONTENT_TTL_MS });
53
+ * const store = new ArtifactStore(cache, conversationId, userId, s3Strategy, fileModel, logger);
54
+ * const id = await store.store({ name: 'App.tsx', type: 'text/x-typescript', content: code, source: 'agent' });
55
+ * // Content is in the cache immediately; S3 + the injected FileModel persist in background.
56
+ * const result = await store.readLines(id, 1, 50);
57
+ * ```
58
+ */
59
+ class ArtifactStore extends ContentStore.ContentStore {
60
+ conversationId;
61
+ userId;
62
+ s3;
63
+ fileModel;
64
+ logger;
65
+ constructor(cache, conversationId, userId, s3, fileModel, logger = noopLogger) {
66
+ super(cache);
67
+ this.conversationId = conversationId;
68
+ this.userId = userId;
69
+ this.s3 = s3;
70
+ this.fileModel = fileModel;
71
+ this.logger = logger;
72
+ }
73
+ /** File ID prefix for MongoDB file_id. Override in subclasses. */
74
+ getFileIdPrefix() {
75
+ return 'artifact-';
76
+ }
77
+ /** Context label stored on MongoDB File records. Override in subclasses. */
78
+ getContextLabel() {
79
+ return 'artifact';
80
+ }
81
+ /** S3 base path prefix. Override in subclasses. */
82
+ getS3BasePath() {
83
+ return buildS3BasePath(this.conversationId);
84
+ }
85
+ /** Build S3 file name. Override in subclasses. */
86
+ getS3FileName(contentId, name) {
87
+ return buildS3FileName(contentId, name);
88
+ }
89
+ /** Build the canonical file_id for a content entry. */
90
+ buildFileId(contentId) {
91
+ return `${this.getFileIdPrefix()}${contentId}`;
92
+ }
93
+ /**
94
+ * Store new content in Redis and persist to S3 + MongoDB in background.
95
+ * Returns immediately after Redis write — agent doesn't wait for S3.
96
+ *
97
+ * @param entry - The content to store.
98
+ * @returns The generated content ID.
99
+ */
100
+ async store(entry) {
101
+ // 1. Fast path: store in Redis via parent (agent gets instant response)
102
+ const contentId = await super.store(entry);
103
+ // 2. Update Redis metadata with ownership info for S3 restore path
104
+ const stored = await this.getStored(contentId);
105
+ if (stored) {
106
+ stored.metadata.userId = this.userId;
107
+ stored.metadata.conversationId = this.conversationId;
108
+ stored.metadata.fileId = this.buildFileId(contentId);
109
+ await this.cache.set(contentId, JSON.stringify(stored));
110
+ // Update index with enriched metadata
111
+ const index = await this.getIndex();
112
+ if (index[contentId]) {
113
+ index[contentId] = stored.metadata;
114
+ await this.cache.set(this.indexKey, JSON.stringify(index));
115
+ }
116
+ }
117
+ // 3. Background: persist to S3 + MongoDB (fire-and-forget)
118
+ this.persistToS3(contentId, entry).catch((err) => {
119
+ this.logger.warn(`[ArtifactStore] S3 persist failed for ${contentId}:`, err.message);
120
+ });
121
+ return contentId;
122
+ }
123
+ /**
124
+ * Overwrite content for an existing entry. Updates Redis + syncs to S3.
125
+ *
126
+ * @param contentId - The content entry ID.
127
+ * @param content - New content to write.
128
+ * @throws If content ID is not found in Redis or S3.
129
+ */
130
+ async write(contentId, content) {
131
+ // Ensure content is loaded into Redis (may need S3 fallback)
132
+ await this.ensureLoaded(contentId);
133
+ // Update Redis via parent
134
+ await super.write(contentId, content);
135
+ // Background: sync updated content to S3
136
+ this.syncToS3(contentId).catch((err) => {
137
+ this.logger.warn(`[ArtifactStore] S3 sync failed for write on ${contentId}:`, err.message);
138
+ });
139
+ }
140
+ /**
141
+ * Surgical string replacement. Updates Redis + syncs to S3.
142
+ *
143
+ * @param contentId - The content entry ID.
144
+ * @param oldStr - Exact string to find.
145
+ * @param newStr - Replacement string.
146
+ * @returns Edit result with diff and affected line info.
147
+ */
148
+ async strReplace(contentId, oldStr, newStr) {
149
+ // Ensure content is loaded into Redis (may need S3 fallback)
150
+ await this.ensureLoaded(contentId);
151
+ // Edit in Redis via parent
152
+ const result = await super.strReplace(contentId, oldStr, newStr);
153
+ if (result.success) {
154
+ // Background: sync updated content to S3
155
+ this.syncToS3(contentId).catch((err) => {
156
+ this.logger.warn(`[ArtifactStore] S3 sync failed for edit on ${contentId}:`, err.message);
157
+ });
158
+ }
159
+ return result;
160
+ }
161
+ /**
162
+ * Read lines with S3 fallback. If Redis has expired, loads from S3 first.
163
+ *
164
+ * @param contentId - The content entry ID.
165
+ * @param startLine - First line to read (1-based).
166
+ * @param endLine - Last line to read (inclusive).
167
+ * @returns Read result or null if not found in any layer.
168
+ */
169
+ async readLines(contentId, startLine, endLine) {
170
+ // Try Redis first via parent
171
+ let result = await super.readLines(contentId, startLine, endLine);
172
+ if (result) {
173
+ return result;
174
+ }
175
+ // Redis miss — try S3 fallback
176
+ const restored = await this.restoreFromS3(contentId);
177
+ if (!restored) {
178
+ return null;
179
+ }
180
+ // Retry read from Redis (now populated)
181
+ result = await super.readLines(contentId, startLine, endLine);
182
+ return result;
183
+ }
184
+ /**
185
+ * Read full content with S3 fallback. If Redis has expired, loads from S3 first.
186
+ * No line cap — returns raw content for frontend display (e.g., CodeViz).
187
+ *
188
+ * @param contentId - The content entry ID.
189
+ * @returns Raw content with total line/char counts, or null if not found in any layer.
190
+ */
191
+ async readAll(contentId) {
192
+ // Try Redis first via parent
193
+ let result = await super.readAll(contentId);
194
+ if (result) {
195
+ return result;
196
+ }
197
+ // Redis miss — try S3 fallback
198
+ const restored = await this.restoreFromS3(contentId);
199
+ if (!restored) {
200
+ return null;
201
+ }
202
+ // Retry read from Redis (now populated)
203
+ result = await super.readAll(contentId);
204
+ return result;
205
+ }
206
+ /**
207
+ * Search with S3 fallback. If Redis has expired, loads from S3 first.
208
+ *
209
+ * @param contentId - The content entry ID.
210
+ * @param pattern - Text or regex pattern to match.
211
+ * @param maxResults - Maximum matches to return.
212
+ * @returns Array of matches or null if not found.
213
+ */
214
+ async search(contentId, pattern, maxResults) {
215
+ // Try Redis first via parent
216
+ let result = await super.search(contentId, pattern, maxResults);
217
+ if (result) {
218
+ return result;
219
+ }
220
+ // Redis miss — try S3 fallback
221
+ const restored = await this.restoreFromS3(contentId);
222
+ if (!restored) {
223
+ return null;
224
+ }
225
+ result = await super.search(contentId, pattern, maxResults);
226
+ return result;
227
+ }
228
+ /**
229
+ * Get metadata with S3 fallback.
230
+ *
231
+ * @param contentId - The content entry ID.
232
+ * @returns Metadata or null if not found in any layer.
233
+ */
234
+ async info(contentId) {
235
+ // Try Redis first via parent
236
+ let result = await super.info(contentId);
237
+ if (result) {
238
+ return result;
239
+ }
240
+ // Redis miss — try S3 fallback
241
+ const restored = await this.restoreFromS3(contentId);
242
+ if (!restored) {
243
+ return null;
244
+ }
245
+ result = await super.info(contentId);
246
+ return result;
247
+ }
248
+ /**
249
+ * Delete an artifact from all layers: Redis + S3 + MongoDB.
250
+ *
251
+ * @param contentId - The content entry ID.
252
+ */
253
+ async deleteFile(contentId) {
254
+ // 1. Delete from Redis
255
+ await super.delete(contentId);
256
+ // 2. Delete from MongoDB + S3 (background, best-effort)
257
+ const fileId = this.buildFileId(contentId);
258
+ try {
259
+ const fileRecord = await this.fileModel.findFile({
260
+ file_id: fileId,
261
+ user: this.userId,
262
+ });
263
+ if (fileRecord && fileRecord.filepath) {
264
+ // Delete from S3
265
+ try {
266
+ await this.s3.deleteFile(this.userId, fileRecord.filepath);
267
+ }
268
+ catch (err) {
269
+ this.logger.warn(`[ArtifactStore] S3 delete failed for ${contentId}:`, err.message);
270
+ }
271
+ // Delete from MongoDB
272
+ await this.fileModel.deleteFile(fileId);
273
+ }
274
+ }
275
+ catch (err) {
276
+ this.logger.warn(`[ArtifactStore] MongoDB cleanup failed for ${contentId}:`, err.message);
277
+ }
278
+ }
279
+ /**
280
+ * List all files in this conversation. Merges Redis index with MongoDB File
281
+ * records found via `Conversation.files` (the single source of truth).
282
+ *
283
+ * Query flow:
284
+ * 1. Redis index — fast cache of recently-accessed content entries (in-memory, no DB hit)
285
+ * 2. Conversation.files — canonical file_id list via `getConversationFileIds()`
286
+ * SCALE: Single indexed `findOne` on `{ conversationId }` — O(1)
287
+ * 3. Backward compat fallback — `File.find({ conversationId, user })` for pre-migration
288
+ * data not yet in `Conversation.files`. Uses index `{ user, conversationId, updatedAt }`.
289
+ * Can be removed once all File records are migrated.
290
+ * 4. Batch fetch — `File.find({ file_id: { $in: mergedIds }, user })` to hydrate full
291
+ * File documents. Uses index `{ file_id, user }`.
292
+ *
293
+ * Deduplication: Redis entries win — if a contentId is already in Redis, the MongoDB
294
+ * record is skipped. Non-artifact files are keyed by `file:{file_id}` to avoid dupes.
295
+ *
296
+ * @returns Array of metadata for all files in this conversation.
297
+ */
298
+ async listFiles() {
299
+ // 1. Redis entries — fast cache of content store entries
300
+ const redisEntries = await super.list();
301
+ const seen = new Set(redisEntries.map((e) => e.id));
302
+ // 2. Conversation.files — the primary source of truth
303
+ // SCALE: Single indexed findOne on { conversationId } — O(1)
304
+ let convoFileIds = [];
305
+ try {
306
+ convoFileIds = await this.fileModel.getConversationFileIds(this.conversationId);
307
+ }
308
+ catch (err) {
309
+ this.logger.warn(`[ArtifactStore] getConversationFileIds failed for ${this.conversationId}:`, err.message);
310
+ }
311
+ const convoFileIdSet = new Set(convoFileIds);
312
+ // 3. Backward compat fallback — files with File.conversationId set directly
313
+ // (pre-migration artifacts/code executor files not yet in Conversation.files).
314
+ // SCALE: Uses compound index { user: 1, conversationId: 1, updatedAt: -1 }
315
+ let fallbackFileIds = [];
316
+ try {
317
+ const directFiles = await this.fileModel.findFiles({
318
+ conversationId: this.conversationId,
319
+ user: this.userId,
320
+ });
321
+ fallbackFileIds = directFiles
322
+ .map((f) => f.file_id)
323
+ .filter((fid) => fid && !convoFileIdSet.has(fid));
324
+ }
325
+ catch (err) {
326
+ this.logger.warn(`[ArtifactStore] Fallback conversationId query failed for ${this.conversationId}:`, err.message);
327
+ }
328
+ // 4. Merge into single set and batch fetch File records
329
+ // SCALE: Uses compound index { file_id: 1, user: 1 }
330
+ const allFileIds = [...new Set([...convoFileIds, ...fallbackFileIds])];
331
+ if (allFileIds.length === 0) {
332
+ return redisEntries;
333
+ }
334
+ try {
335
+ const files = await this.fileModel.findFiles({
336
+ file_id: { $in: allFileIds },
337
+ user: this.userId,
338
+ });
339
+ for (const file of files) {
340
+ const existingContentId = file.metadata
341
+ ? file.metadata.contentId
342
+ : undefined;
343
+ // Artifact file (has contentId) — add as content entry if not already in Redis
344
+ if (existingContentId) {
345
+ if (seen.has(existingContentId)) {
346
+ continue;
347
+ }
348
+ seen.add(existingContentId);
349
+ redisEntries.push({
350
+ id: existingContentId,
351
+ name: file.filename || 'unknown',
352
+ type: file.type || 'text/plain',
353
+ source: file.metadata
354
+ .artifactSource || 'unknown',
355
+ totalLines: 0,
356
+ totalChars: file.bytes || 0,
357
+ createdAt: file.createdAt
358
+ ? new Date(file.createdAt).getTime()
359
+ : Date.now(),
360
+ fileId: file.file_id,
361
+ userId: this.userId,
362
+ conversationId: this.conversationId,
363
+ });
364
+ continue;
365
+ }
366
+ // Non-artifact file (attachment, code executor output, etc.)
367
+ const fileId = file.file_id;
368
+ if (seen.has(`file:${fileId}`)) {
369
+ continue;
370
+ }
371
+ seen.add(`file:${fileId}`);
372
+ // Determine source label from file context/metadata
373
+ const context = file.context;
374
+ const hasFileIdentifier = !!file.metadata
375
+ .fileIdentifier;
376
+ let source = 'attachment';
377
+ if (context === 'artifact') {
378
+ source = 'agent';
379
+ }
380
+ else if (hasFileIdentifier) {
381
+ source = 'code_executor';
382
+ }
383
+ const hasText = !!(file.text && file.text.length > 0);
384
+ redisEntries.push({
385
+ id: `file:${fileId}`,
386
+ name: file.filename || 'unknown',
387
+ type: file.type || 'text/plain',
388
+ source,
389
+ totalLines: 0,
390
+ totalChars: file.bytes || 0,
391
+ createdAt: file.createdAt
392
+ ? new Date(file.createdAt).getTime()
393
+ : Date.now(),
394
+ fileId,
395
+ userId: this.userId,
396
+ conversationId: this.conversationId,
397
+ needsIngestion: hasText,
398
+ });
399
+ }
400
+ }
401
+ catch (err) {
402
+ this.logger.warn(`[ArtifactStore] File batch fetch failed for ${this.conversationId}:`, err.message);
403
+ }
404
+ return redisEntries;
405
+ }
406
+ /**
407
+ * Persist a new content entry to S3 and create a MongoDB File record.
408
+ * Called in background after Redis store — agent doesn't wait for this.
409
+ *
410
+ * @param contentId - The content entry ID.
411
+ * @param entry - The original store entry with content and metadata.
412
+ */
413
+ async persistToS3(contentId, entry) {
414
+ const buffer = Buffer.from(entry.content, 'utf-8');
415
+ const fileName = this.getS3FileName(contentId, entry.name);
416
+ const basePath = this.getS3BasePath();
417
+ const filepath = await this.s3.saveBuffer({
418
+ userId: this.userId,
419
+ buffer,
420
+ fileName,
421
+ basePath,
422
+ });
423
+ const fileId = this.buildFileId(contentId);
424
+ await this.fileModel.createFile({
425
+ user: this.userId,
426
+ file_id: fileId,
427
+ conversationId: this.conversationId,
428
+ filename: entry.name,
429
+ filepath,
430
+ type: entry.type || 'text/plain',
431
+ bytes: buffer.length,
432
+ source: 's3',
433
+ context: this.getContextLabel(),
434
+ object: 'file',
435
+ usage: 0,
436
+ metadata: {
437
+ contentId,
438
+ artifactSource: entry.source,
439
+ },
440
+ },
441
+ /* disableTTL */ true);
442
+ // Link artifact to Conversation.files — the single source of truth for
443
+ // which files belong to a conversation. Fire-and-forget: non-critical
444
+ // because the fallback query in listFiles() will still find via File.conversationId.
445
+ try {
446
+ await this.fileModel.addFilesToConversation(this.conversationId, [
447
+ fileId,
448
+ ]);
449
+ }
450
+ catch (err) {
451
+ this.logger.warn(`[ArtifactStore] addFilesToConversation failed for ${fileId}:`, err.message);
452
+ }
453
+ // Update Redis metadata with the S3-backed fileId
454
+ const stored = await this.getStored(contentId);
455
+ if (stored) {
456
+ stored.metadata.fileId = fileId;
457
+ await this.cache.set(contentId, JSON.stringify(stored));
458
+ // Update index
459
+ const index = await this.getIndex();
460
+ if (index[contentId]) {
461
+ index[contentId] = stored.metadata;
462
+ await this.cache.set(this.indexKey, JSON.stringify(index));
463
+ }
464
+ }
465
+ }
466
+ /**
467
+ * Sync updated Redis content to S3 (overwrite same key).
468
+ * Called in background after write/edit operations.
469
+ *
470
+ * @param contentId - The content entry ID to sync.
471
+ */
472
+ async syncToS3(contentId) {
473
+ const stored = await this.getStored(contentId);
474
+ if (!stored) {
475
+ return;
476
+ }
477
+ const buffer = Buffer.from(stored.content, 'utf-8');
478
+ const fileName = this.getS3FileName(contentId, stored.metadata.name);
479
+ const basePath = this.getS3BasePath();
480
+ const filepath = await this.s3.saveBuffer({
481
+ userId: this.userId,
482
+ buffer,
483
+ fileName,
484
+ basePath,
485
+ });
486
+ // Update MongoDB File record with new bytes and filepath
487
+ const fileId = this.buildFileId(contentId);
488
+ try {
489
+ await this.fileModel.updateFile({
490
+ file_id: fileId,
491
+ bytes: buffer.length,
492
+ filepath,
493
+ });
494
+ }
495
+ catch (err) {
496
+ this.logger.warn(`[ArtifactStore] MongoDB update failed for ${contentId}:`, err.message);
497
+ }
498
+ }
499
+ /**
500
+ * Restore content from S3 into Redis on cache miss.
501
+ * Looks up the MongoDB File record to find the S3 path, downloads content,
502
+ * and re-populates the Redis cache with the same key structure.
503
+ *
504
+ * @param contentId - The content entry ID to restore.
505
+ * @returns The restored StoredEntry, or null if not found in S3/MongoDB.
506
+ */
507
+ async restoreFromS3(contentId) {
508
+ const fileId = this.buildFileId(contentId);
509
+ try {
510
+ // Find the MongoDB record for this artifact
511
+ const fileRecord = await this.fileModel.findFile({
512
+ file_id: fileId,
513
+ user: this.userId,
514
+ });
515
+ if (!fileRecord || !fileRecord.filepath) {
516
+ return null;
517
+ }
518
+ // Download content from S3
519
+ const stream = await this.s3.getFileStream(fileRecord.filepath);
520
+ const chunks = [];
521
+ for await (const chunk of stream) {
522
+ chunks.push(Buffer.isBuffer(chunk)
523
+ ? chunk
524
+ : Buffer.from(chunk));
525
+ }
526
+ const content = Buffer.concat(chunks).toString('utf-8');
527
+ // Reconstruct metadata from MongoDB record + content
528
+ const metadata = {
529
+ id: contentId,
530
+ name: fileRecord.filename || 'unknown',
531
+ type: fileRecord.type || 'text/plain',
532
+ source: fileRecord.metadata
533
+ .artifactSource || 'unknown',
534
+ totalLines: content.split('\n').length,
535
+ totalChars: content.length,
536
+ createdAt: fileRecord.createdAt
537
+ ? new Date(fileRecord.createdAt).getTime()
538
+ : Date.now(),
539
+ fileId,
540
+ userId: this.userId,
541
+ conversationId: this.conversationId,
542
+ };
543
+ // Re-populate Redis cache
544
+ const stored = { content, metadata };
545
+ await this.cache.set(contentId, JSON.stringify(stored));
546
+ // Update index
547
+ const index = await this.getIndex();
548
+ index[contentId] = metadata;
549
+ await this.cache.set(this.indexKey, JSON.stringify(index));
550
+ return stored;
551
+ }
552
+ catch (err) {
553
+ this.logger.warn(`[ArtifactStore] S3 restore failed for ${contentId}:`, err.message);
554
+ return null;
555
+ }
556
+ }
557
+ /**
558
+ * Ensure content is loaded into Redis. If not in Redis, attempt S3 restore.
559
+ * Used before write/edit operations that need content to be present.
560
+ *
561
+ * @param contentId - The content entry ID.
562
+ * @throws If content is not found in Redis or S3.
563
+ */
564
+ async ensureLoaded(contentId) {
565
+ const existing = await this.getStored(contentId);
566
+ if (existing) {
567
+ return;
568
+ }
569
+ // Try S3 restore
570
+ const restored = await this.restoreFromS3(contentId);
571
+ if (!restored) {
572
+ throw new Error(`Content "${contentId}" not found in Redis or S3`);
573
+ }
574
+ }
575
+ }
576
+
577
+ exports.ArtifactStore = ArtifactStore;
578
+ exports.sanitizeName = sanitizeName;
579
+ //# sourceMappingURL=ArtifactStore.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ArtifactStore.cjs","sources":["../../../src/content/ArtifactStore.ts"],"sourcesContent":["import type Keyv from 'keyv';\nimport { ContentStore } from './ContentStore';\nimport type {\n StoreEntry,\n StoredEntry,\n ContentMetadata,\n EditResult,\n ReadResult,\n ReadAllResult,\n SearchMatch,\n} from './types';\n\n/**\n * Minimal logger surface. Callers inject any winston/pino/console-like\n * logger that honors the four level methods; defaults to a no-op so the\n * library runs without a configured logger.\n */\nexport interface Logger {\n debug(...args: unknown[]): void;\n info(...args: unknown[]): void;\n warn(...args: unknown[]): void;\n error(...args: unknown[]): void;\n}\n\nconst noopLogger: Logger = {\n debug: () => {},\n info: () => {},\n warn: () => {},\n error: () => {},\n};\n\n/**\n * Callback interface for S3 operations.\n * Injected at construction so ArtifactStore stays host-agnostic — the\n * consumer wires this to its own S3 strategy / presigner.\n */\nexport interface S3Strategy {\n /** Upload a buffer to S3, return the stored filepath (signed URL or key). */\n saveBuffer(params: {\n userId: string;\n buffer: Buffer;\n fileName: string;\n basePath: string;\n }): Promise<string>;\n\n /** Download file content from S3 as a readable stream. */\n getFileStream(filePath: string): Promise<NodeJS.ReadableStream>;\n\n /** Delete a file from S3. Requires userId for ownership validation. */\n deleteFile(userId: string, filePath: string): Promise<void>;\n}\n\n/**\n * Callback interface for MongoDB File model operations.\n * Injected to keep ArtifactStore decoupled from Mongoose models.\n */\nexport interface FileModel {\n /** Create or upsert a File document. */\n createFile(\n data: Record<string, unknown>,\n disableTTL: boolean\n ): Promise<Record<string, unknown>>;\n\n /** Find a single File by filter. */\n findFile(\n filter: Record<string, unknown>\n ): Promise<Record<string, unknown> | null>;\n\n /** Find multiple Files by filter. */\n findFiles(\n filter: Record<string, unknown>\n ): Promise<Record<string, unknown>[]>;\n\n /** Update a File document (must include file_id). */\n updateFile(\n data: Record<string, unknown>\n ): Promise<Record<string, unknown> | null>;\n\n /** Delete a File by file_id. */\n deleteFile(fileId: string): Promise<Record<string, unknown> | null>;\n\n /**\n * Get the file_ids linked to a conversation via the Conversation.files array.\n * Returns an empty array if the conversation is not found or has no files.\n * This is the primary source of truth for \"which files belong to this conversation\".\n */\n getConversationFileIds(conversationId: string): Promise<string[]>;\n\n /**\n * Link file_ids to a conversation via $addToSet on Conversation.files.\n * Idempotent — calling with already-linked file_ids is a no-op.\n * Used after creating a File record to ensure it appears in \"Files in Context\".\n */\n addFilesToConversation(\n conversationId: string,\n fileIds: string[]\n ): Promise<void>;\n}\n\n/** S3 base path for all artifact files. */\nconst ARTIFACTS_BASE_PATH = 'artifacts';\n\n/**\n * Sanitize a filename for safe S3 key usage.\n * Replaces non-alphanumeric characters (except . _ - /) with underscores.\n */\nexport function sanitizeName(name: string): string {\n return name.replace(/[^a-zA-Z0-9._\\-/]/g, '_');\n}\n\n/**\n * Constructs the canonical S3 file name for an artifact.\n * Format: `{contentId}__{sanitizedName}` — contentId prefix ensures uniqueness.\n */\nfunction buildS3FileName(contentId: string, name: string): string {\n return `${contentId}__${sanitizeName(name)}`;\n}\n\n/**\n * Constructs the S3 base path scoped to a conversation.\n * Pattern: `artifacts/{conversationId}`\n * The S3 strategy prepends userId internally via getS3Key().\n */\nfunction buildS3BasePath(conversationId: string): string {\n return `${ARTIFACTS_BASE_PATH}/${conversationId}`;\n}\n\n/**\n * File-backed artifact store extending ContentStore with S3 persistence.\n *\n * Every write immediately persists to Redis (fast cache) AND S3 (permanent store).\n * S3 writes are fire-and-forget async — the agent gets an instant response from Redis.\n * On Redis cache miss, content is transparently restored from S3 via MongoDB lookup.\n *\n * Key structure (consistent across all layers):\n * - Redis: `CONTENT_STORE::{conversationId}::{contentId}`\n * - S3: `artifacts/{conversationId}/{userId}/{contentId}__{name}`\n * - MongoDB: `file_id: \"artifact-{contentId}\"`, `metadata.contentId: \"{contentId}\"`\n *\n * @example\n * ```ts\n * import Keyv from 'keyv';\n * import { ArtifactStore, CONTENT_TTL_MS } from '@illuma-ai/agents/content';\n *\n * const cache = new Keyv({ namespace: `content-store::${conversationId}`, ttl: CONTENT_TTL_MS });\n * const store = new ArtifactStore(cache, conversationId, userId, s3Strategy, fileModel, logger);\n * const id = await store.store({ name: 'App.tsx', type: 'text/x-typescript', content: code, source: 'agent' });\n * // Content is in the cache immediately; S3 + the injected FileModel persist in background.\n * const result = await store.readLines(id, 1, 50);\n * ```\n */\nexport class ArtifactStore extends ContentStore {\n protected logger: Logger;\n\n constructor(\n cache: Keyv,\n protected conversationId: string,\n protected userId: string,\n protected s3: S3Strategy,\n protected fileModel: FileModel,\n logger: Logger = noopLogger\n ) {\n super(cache);\n this.logger = logger;\n }\n\n /** File ID prefix for MongoDB file_id. Override in subclasses. */\n protected getFileIdPrefix(): string {\n return 'artifact-';\n }\n\n /** Context label stored on MongoDB File records. Override in subclasses. */\n protected getContextLabel(): string {\n return 'artifact';\n }\n\n /** S3 base path prefix. Override in subclasses. */\n protected getS3BasePath(): string {\n return buildS3BasePath(this.conversationId);\n }\n\n /** Build S3 file name. Override in subclasses. */\n protected getS3FileName(contentId: string, name: string): string {\n return buildS3FileName(contentId, name);\n }\n\n /** Build the canonical file_id for a content entry. */\n protected buildFileId(contentId: string): string {\n return `${this.getFileIdPrefix()}${contentId}`;\n }\n\n /**\n * Store new content in Redis and persist to S3 + MongoDB in background.\n * Returns immediately after Redis write — agent doesn't wait for S3.\n *\n * @param entry - The content to store.\n * @returns The generated content ID.\n */\n async store(entry: StoreEntry): Promise<string> {\n // 1. Fast path: store in Redis via parent (agent gets instant response)\n const contentId = await super.store(entry);\n\n // 2. Update Redis metadata with ownership info for S3 restore path\n const stored = await this.getStored(contentId);\n if (stored) {\n stored.metadata.userId = this.userId;\n stored.metadata.conversationId = this.conversationId;\n stored.metadata.fileId = this.buildFileId(contentId);\n await this.cache.set(contentId, JSON.stringify(stored));\n\n // Update index with enriched metadata\n const index = await this.getIndex();\n if (index[contentId]) {\n index[contentId] = stored.metadata;\n await this.cache.set(this.indexKey, JSON.stringify(index));\n }\n }\n\n // 3. Background: persist to S3 + MongoDB (fire-and-forget)\n this.persistToS3(contentId, entry).catch((err) => {\n this.logger.warn(\n `[ArtifactStore] S3 persist failed for ${contentId}:`,\n (err as Error).message\n );\n });\n\n return contentId;\n }\n\n /**\n * Overwrite content for an existing entry. Updates Redis + syncs to S3.\n *\n * @param contentId - The content entry ID.\n * @param content - New content to write.\n * @throws If content ID is not found in Redis or S3.\n */\n async write(contentId: string, content: string): Promise<void> {\n // Ensure content is loaded into Redis (may need S3 fallback)\n await this.ensureLoaded(contentId);\n\n // Update Redis via parent\n await super.write(contentId, content);\n\n // Background: sync updated content to S3\n this.syncToS3(contentId).catch((err) => {\n this.logger.warn(\n `[ArtifactStore] S3 sync failed for write on ${contentId}:`,\n (err as Error).message\n );\n });\n }\n\n /**\n * Surgical string replacement. Updates Redis + syncs to S3.\n *\n * @param contentId - The content entry ID.\n * @param oldStr - Exact string to find.\n * @param newStr - Replacement string.\n * @returns Edit result with diff and affected line info.\n */\n async strReplace(\n contentId: string,\n oldStr: string,\n newStr: string\n ): Promise<EditResult> {\n // Ensure content is loaded into Redis (may need S3 fallback)\n await this.ensureLoaded(contentId);\n\n // Edit in Redis via parent\n const result = await super.strReplace(contentId, oldStr, newStr);\n\n if (result.success) {\n // Background: sync updated content to S3\n this.syncToS3(contentId).catch((err) => {\n this.logger.warn(\n `[ArtifactStore] S3 sync failed for edit on ${contentId}:`,\n (err as Error).message\n );\n });\n }\n\n return result;\n }\n\n /**\n * Read lines with S3 fallback. If Redis has expired, loads from S3 first.\n *\n * @param contentId - The content entry ID.\n * @param startLine - First line to read (1-based).\n * @param endLine - Last line to read (inclusive).\n * @returns Read result or null if not found in any layer.\n */\n async readLines(\n contentId: string,\n startLine?: number,\n endLine?: number\n ): Promise<ReadResult | null> {\n // Try Redis first via parent\n let result = await super.readLines(contentId, startLine, endLine);\n if (result) {\n return result;\n }\n\n // Redis miss — try S3 fallback\n const restored = await this.restoreFromS3(contentId);\n if (!restored) {\n return null;\n }\n\n // Retry read from Redis (now populated)\n result = await super.readLines(contentId, startLine, endLine);\n return result;\n }\n\n /**\n * Read full content with S3 fallback. If Redis has expired, loads from S3 first.\n * No line cap — returns raw content for frontend display (e.g., CodeViz).\n *\n * @param contentId - The content entry ID.\n * @returns Raw content with total line/char counts, or null if not found in any layer.\n */\n async readAll(contentId: string): Promise<ReadAllResult | null> {\n // Try Redis first via parent\n let result = await super.readAll(contentId);\n if (result) {\n return result;\n }\n\n // Redis miss — try S3 fallback\n const restored = await this.restoreFromS3(contentId);\n if (!restored) {\n return null;\n }\n\n // Retry read from Redis (now populated)\n result = await super.readAll(contentId);\n return result;\n }\n\n /**\n * Search with S3 fallback. If Redis has expired, loads from S3 first.\n *\n * @param contentId - The content entry ID.\n * @param pattern - Text or regex pattern to match.\n * @param maxResults - Maximum matches to return.\n * @returns Array of matches or null if not found.\n */\n async search(\n contentId: string,\n pattern: string,\n maxResults?: number\n ): Promise<SearchMatch[] | null> {\n // Try Redis first via parent\n let result = await super.search(contentId, pattern, maxResults);\n if (result) {\n return result;\n }\n\n // Redis miss — try S3 fallback\n const restored = await this.restoreFromS3(contentId);\n if (!restored) {\n return null;\n }\n\n result = await super.search(contentId, pattern, maxResults);\n return result;\n }\n\n /**\n * Get metadata with S3 fallback.\n *\n * @param contentId - The content entry ID.\n * @returns Metadata or null if not found in any layer.\n */\n async info(contentId: string): Promise<ContentMetadata | null> {\n // Try Redis first via parent\n let result = await super.info(contentId);\n if (result) {\n return result;\n }\n\n // Redis miss — try S3 fallback\n const restored = await this.restoreFromS3(contentId);\n if (!restored) {\n return null;\n }\n\n result = await super.info(contentId);\n return result;\n }\n\n /**\n * Delete an artifact from all layers: Redis + S3 + MongoDB.\n *\n * @param contentId - The content entry ID.\n */\n async deleteFile(contentId: string): Promise<void> {\n // 1. Delete from Redis\n await super.delete(contentId);\n\n // 2. Delete from MongoDB + S3 (background, best-effort)\n const fileId = this.buildFileId(contentId);\n try {\n const fileRecord = await this.fileModel.findFile({\n file_id: fileId,\n user: this.userId,\n });\n\n if (fileRecord && fileRecord.filepath) {\n // Delete from S3\n try {\n await this.s3.deleteFile(this.userId, fileRecord.filepath as string);\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] S3 delete failed for ${contentId}:`,\n (err as Error).message\n );\n }\n\n // Delete from MongoDB\n await this.fileModel.deleteFile(fileId);\n }\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] MongoDB cleanup failed for ${contentId}:`,\n (err as Error).message\n );\n }\n }\n\n /**\n * List all files in this conversation. Merges Redis index with MongoDB File\n * records found via `Conversation.files` (the single source of truth).\n *\n * Query flow:\n * 1. Redis index — fast cache of recently-accessed content entries (in-memory, no DB hit)\n * 2. Conversation.files — canonical file_id list via `getConversationFileIds()`\n * SCALE: Single indexed `findOne` on `{ conversationId }` — O(1)\n * 3. Backward compat fallback — `File.find({ conversationId, user })` for pre-migration\n * data not yet in `Conversation.files`. Uses index `{ user, conversationId, updatedAt }`.\n * Can be removed once all File records are migrated.\n * 4. Batch fetch — `File.find({ file_id: { $in: mergedIds }, user })` to hydrate full\n * File documents. Uses index `{ file_id, user }`.\n *\n * Deduplication: Redis entries win — if a contentId is already in Redis, the MongoDB\n * record is skipped. Non-artifact files are keyed by `file:{file_id}` to avoid dupes.\n *\n * @returns Array of metadata for all files in this conversation.\n */\n async listFiles(): Promise<ContentMetadata[]> {\n // 1. Redis entries — fast cache of content store entries\n const redisEntries = await super.list();\n const seen = new Set<string>(redisEntries.map((e) => e.id));\n\n // 2. Conversation.files — the primary source of truth\n // SCALE: Single indexed findOne on { conversationId } — O(1)\n let convoFileIds: string[] = [];\n try {\n convoFileIds = await this.fileModel.getConversationFileIds(\n this.conversationId\n );\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] getConversationFileIds failed for ${this.conversationId}:`,\n (err as Error).message\n );\n }\n const convoFileIdSet = new Set(convoFileIds);\n\n // 3. Backward compat fallback — files with File.conversationId set directly\n // (pre-migration artifacts/code executor files not yet in Conversation.files).\n // SCALE: Uses compound index { user: 1, conversationId: 1, updatedAt: -1 }\n let fallbackFileIds: string[] = [];\n try {\n const directFiles = await this.fileModel.findFiles({\n conversationId: this.conversationId,\n user: this.userId,\n });\n fallbackFileIds = directFiles\n .map((f) => f.file_id as string)\n .filter((fid) => fid && !convoFileIdSet.has(fid));\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] Fallback conversationId query failed for ${this.conversationId}:`,\n (err as Error).message\n );\n }\n\n // 4. Merge into single set and batch fetch File records\n // SCALE: Uses compound index { file_id: 1, user: 1 }\n const allFileIds = [...new Set([...convoFileIds, ...fallbackFileIds])];\n if (allFileIds.length === 0) {\n return redisEntries;\n }\n\n try {\n const files = await this.fileModel.findFiles({\n file_id: { $in: allFileIds } as unknown,\n user: this.userId,\n });\n\n for (const file of files) {\n const existingContentId = file.metadata\n ? ((file.metadata as Record<string, unknown>).contentId as string)\n : undefined;\n\n // Artifact file (has contentId) — add as content entry if not already in Redis\n if (existingContentId) {\n if (seen.has(existingContentId)) {\n continue;\n }\n seen.add(existingContentId);\n redisEntries.push({\n id: existingContentId,\n name: (file.filename as string) || 'unknown',\n type: (file.type as string) || 'text/plain',\n source:\n ((file.metadata as Record<string, unknown>)\n .artifactSource as string) || 'unknown',\n totalLines: 0,\n totalChars: (file.bytes as number) || 0,\n createdAt: file.createdAt\n ? new Date(file.createdAt as string).getTime()\n : Date.now(),\n fileId: file.file_id as string,\n userId: this.userId,\n conversationId: this.conversationId,\n });\n continue;\n }\n\n // Non-artifact file (attachment, code executor output, etc.)\n const fileId = file.file_id as string;\n if (seen.has(`file:${fileId}`)) {\n continue;\n }\n seen.add(`file:${fileId}`);\n\n // Determine source label from file context/metadata\n const context = file.context as string | undefined;\n const hasFileIdentifier = !!(file.metadata as Record<string, unknown>)\n .fileIdentifier;\n let source = 'attachment';\n if (context === 'artifact') {\n source = 'agent';\n } else if (hasFileIdentifier) {\n source = 'code_executor';\n }\n\n const hasText = !!(file.text && (file.text as string).length > 0);\n\n redisEntries.push({\n id: `file:${fileId}`,\n name: (file.filename as string) || 'unknown',\n type: (file.type as string) || 'text/plain',\n source,\n totalLines: 0,\n totalChars: (file.bytes as number) || 0,\n createdAt: file.createdAt\n ? new Date(file.createdAt as string).getTime()\n : Date.now(),\n fileId,\n userId: this.userId,\n conversationId: this.conversationId,\n needsIngestion: hasText,\n });\n }\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] File batch fetch failed for ${this.conversationId}:`,\n (err as Error).message\n );\n }\n\n return redisEntries;\n }\n\n /**\n * Persist a new content entry to S3 and create a MongoDB File record.\n * Called in background after Redis store — agent doesn't wait for this.\n *\n * @param contentId - The content entry ID.\n * @param entry - The original store entry with content and metadata.\n */\n protected async persistToS3(\n contentId: string,\n entry: StoreEntry\n ): Promise<void> {\n const buffer = Buffer.from(entry.content, 'utf-8');\n const fileName = this.getS3FileName(contentId, entry.name);\n const basePath = this.getS3BasePath();\n\n const filepath = await this.s3.saveBuffer({\n userId: this.userId,\n buffer,\n fileName,\n basePath,\n });\n\n const fileId = this.buildFileId(contentId);\n await this.fileModel.createFile(\n {\n user: this.userId,\n file_id: fileId,\n conversationId: this.conversationId,\n filename: entry.name,\n filepath,\n type: entry.type || 'text/plain',\n bytes: buffer.length,\n source: 's3',\n context: this.getContextLabel(),\n object: 'file',\n usage: 0,\n metadata: {\n contentId,\n artifactSource: entry.source,\n },\n },\n /* disableTTL */ true\n );\n\n // Link artifact to Conversation.files — the single source of truth for\n // which files belong to a conversation. Fire-and-forget: non-critical\n // because the fallback query in listFiles() will still find via File.conversationId.\n try {\n await this.fileModel.addFilesToConversation(this.conversationId, [\n fileId,\n ]);\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] addFilesToConversation failed for ${fileId}:`,\n (err as Error).message\n );\n }\n\n // Update Redis metadata with the S3-backed fileId\n const stored = await this.getStored(contentId);\n if (stored) {\n stored.metadata.fileId = fileId;\n await this.cache.set(contentId, JSON.stringify(stored));\n\n // Update index\n const index = await this.getIndex();\n if (index[contentId]) {\n index[contentId] = stored.metadata;\n await this.cache.set(this.indexKey, JSON.stringify(index));\n }\n }\n }\n\n /**\n * Sync updated Redis content to S3 (overwrite same key).\n * Called in background after write/edit operations.\n *\n * @param contentId - The content entry ID to sync.\n */\n protected async syncToS3(contentId: string): Promise<void> {\n const stored = await this.getStored(contentId);\n if (!stored) {\n return;\n }\n\n const buffer = Buffer.from(stored.content, 'utf-8');\n const fileName = this.getS3FileName(contentId, stored.metadata.name);\n const basePath = this.getS3BasePath();\n\n const filepath = await this.s3.saveBuffer({\n userId: this.userId,\n buffer,\n fileName,\n basePath,\n });\n\n // Update MongoDB File record with new bytes and filepath\n const fileId = this.buildFileId(contentId);\n try {\n await this.fileModel.updateFile({\n file_id: fileId,\n bytes: buffer.length,\n filepath,\n });\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] MongoDB update failed for ${contentId}:`,\n (err as Error).message\n );\n }\n }\n\n /**\n * Restore content from S3 into Redis on cache miss.\n * Looks up the MongoDB File record to find the S3 path, downloads content,\n * and re-populates the Redis cache with the same key structure.\n *\n * @param contentId - The content entry ID to restore.\n * @returns The restored StoredEntry, or null if not found in S3/MongoDB.\n */\n protected async restoreFromS3(\n contentId: string\n ): Promise<StoredEntry | null> {\n const fileId = this.buildFileId(contentId);\n\n try {\n // Find the MongoDB record for this artifact\n const fileRecord = await this.fileModel.findFile({\n file_id: fileId,\n user: this.userId,\n });\n\n if (!fileRecord || !fileRecord.filepath) {\n return null;\n }\n\n // Download content from S3\n const stream = await this.s3.getFileStream(fileRecord.filepath as string);\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n chunks.push(\n Buffer.isBuffer(chunk)\n ? chunk\n : Buffer.from(chunk as unknown as Uint8Array)\n );\n }\n const content = Buffer.concat(chunks).toString('utf-8');\n\n // Reconstruct metadata from MongoDB record + content\n const metadata: ContentMetadata = {\n id: contentId,\n name: (fileRecord.filename as string) || 'unknown',\n type: (fileRecord.type as string) || 'text/plain',\n source:\n ((fileRecord.metadata as Record<string, unknown>)\n .artifactSource as string) || 'unknown',\n totalLines: content.split('\\n').length,\n totalChars: content.length,\n createdAt: fileRecord.createdAt\n ? new Date(fileRecord.createdAt as string).getTime()\n : Date.now(),\n fileId,\n userId: this.userId,\n conversationId: this.conversationId,\n };\n\n // Re-populate Redis cache\n const stored: StoredEntry = { content, metadata };\n await this.cache.set(contentId, JSON.stringify(stored));\n\n // Update index\n const index = await this.getIndex();\n index[contentId] = metadata;\n await this.cache.set(this.indexKey, JSON.stringify(index));\n\n return stored;\n } catch (err) {\n this.logger.warn(\n `[ArtifactStore] S3 restore failed for ${contentId}:`,\n (err as Error).message\n );\n return null;\n }\n }\n\n /**\n * Ensure content is loaded into Redis. If not in Redis, attempt S3 restore.\n * Used before write/edit operations that need content to be present.\n *\n * @param contentId - The content entry ID.\n * @throws If content is not found in Redis or S3.\n */\n protected async ensureLoaded(contentId: string): Promise<void> {\n const existing = await this.getStored(contentId);\n if (existing) {\n return;\n }\n\n // Try S3 restore\n const restored = await this.restoreFromS3(contentId);\n if (!restored) {\n throw new Error(`Content \"${contentId}\" not found in Redis or S3`);\n }\n }\n}\n"],"names":["ContentStore"],"mappings":";;;;AAwBA,MAAM,UAAU,GAAW;AACzB,IAAA,KAAK,EAAE,MAAK,EAAE,CAAC;AACf,IAAA,IAAI,EAAE,MAAK,EAAE,CAAC;AACd,IAAA,IAAI,EAAE,MAAK,EAAE,CAAC;AACd,IAAA,KAAK,EAAE,MAAK,EAAE,CAAC;CAChB;AAsED;AACA,MAAM,mBAAmB,GAAG,WAAW;AAEvC;;;AAGG;AACG,SAAU,YAAY,CAAC,IAAY,EAAA;IACvC,OAAO,IAAI,CAAC,OAAO,CAAC,oBAAoB,EAAE,GAAG,CAAC;AAChD;AAEA;;;AAGG;AACH,SAAS,eAAe,CAAC,SAAiB,EAAE,IAAY,EAAA;IACtD,OAAO,CAAA,EAAG,SAAS,CAAA,EAAA,EAAK,YAAY,CAAC,IAAI,CAAC,EAAE;AAC9C;AAEA;;;;AAIG;AACH,SAAS,eAAe,CAAC,cAAsB,EAAA;AAC7C,IAAA,OAAO,CAAA,EAAG,mBAAmB,CAAA,CAAA,EAAI,cAAc,EAAE;AACnD;AAEA;;;;;;;;;;;;;;;;;;;;;;;AAuBG;AACG,MAAO,aAAc,SAAQA,yBAAY,CAAA;AAKjC,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,EAAA;AACA,IAAA,SAAA;AAPF,IAAA,MAAM;IAEhB,WAAA,CACE,KAAW,EACD,cAAsB,EACtB,MAAc,EACd,EAAc,EACd,SAAoB,EAC9B,MAAA,GAAiB,UAAU,EAAA;QAE3B,KAAK,CAAC,KAAK,CAAC;QANF,IAAA,CAAA,cAAc,GAAd,cAAc;QACd,IAAA,CAAA,MAAM,GAAN,MAAM;QACN,IAAA,CAAA,EAAE,GAAF,EAAE;QACF,IAAA,CAAA,SAAS,GAAT,SAAS;AAInB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;IACtB;;IAGU,eAAe,GAAA;AACvB,QAAA,OAAO,WAAW;IACpB;;IAGU,eAAe,GAAA;AACvB,QAAA,OAAO,UAAU;IACnB;;IAGU,aAAa,GAAA;AACrB,QAAA,OAAO,eAAe,CAAC,IAAI,CAAC,cAAc,CAAC;IAC7C;;IAGU,aAAa,CAAC,SAAiB,EAAE,IAAY,EAAA;AACrD,QAAA,OAAO,eAAe,CAAC,SAAS,EAAE,IAAI,CAAC;IACzC;;AAGU,IAAA,WAAW,CAAC,SAAiB,EAAA;QACrC,OAAO,CAAA,EAAG,IAAI,CAAC,eAAe,EAAE,CAAA,EAAG,SAAS,EAAE;IAChD;AAEA;;;;;;AAMG;IACH,MAAM,KAAK,CAAC,KAAiB,EAAA;;QAE3B,MAAM,SAAS,GAAG,MAAM,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC;;QAG1C,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;QAC9C,IAAI,MAAM,EAAE;YACV,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM;YACpC,MAAM,CAAC,QAAQ,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc;YACpD,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC;AACpD,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;;AAGvD,YAAA,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE;AACnC,YAAA,IAAI,KAAK,CAAC,SAAS,CAAC,EAAE;AACpB,gBAAA,KAAK,CAAC,SAAS,CAAC,GAAG,MAAM,CAAC,QAAQ;AAClC,gBAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC5D;QACF;;AAGA,QAAA,IAAI,CAAC,WAAW,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAI;AAC/C,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,sCAAA,EAAyC,SAAS,CAAA,CAAA,CAAG,EACpD,GAAa,CAAC,OAAO,CACvB;AACH,QAAA,CAAC,CAAC;AAEF,QAAA,OAAO,SAAS;IAClB;AAEA;;;;;;AAMG;AACH,IAAA,MAAM,KAAK,CAAC,SAAiB,EAAE,OAAe,EAAA;;AAE5C,QAAA,MAAM,IAAI,CAAC,YAAY,CAAC,SAAS,CAAC;;QAGlC,MAAM,KAAK,CAAC,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC;;QAGrC,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAI;AACrC,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,4CAAA,EAA+C,SAAS,CAAA,CAAA,CAAG,EAC1D,GAAa,CAAC,OAAO,CACvB;AACH,QAAA,CAAC,CAAC;IACJ;AAEA;;;;;;;AAOG;AACH,IAAA,MAAM,UAAU,CACd,SAAiB,EACjB,MAAc,EACd,MAAc,EAAA;;AAGd,QAAA,MAAM,IAAI,CAAC,YAAY,CAAC,SAAS,CAAC;;AAGlC,QAAA,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC,SAAS,EAAE,MAAM,EAAE,MAAM,CAAC;AAEhE,QAAA,IAAI,MAAM,CAAC,OAAO,EAAE;;YAElB,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAI;AACrC,gBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,2CAAA,EAA8C,SAAS,CAAA,CAAA,CAAG,EACzD,GAAa,CAAC,OAAO,CACvB;AACH,YAAA,CAAC,CAAC;QACJ;AAEA,QAAA,OAAO,MAAM;IACf;AAEA;;;;;;;AAOG;AACH,IAAA,MAAM,SAAS,CACb,SAAiB,EACjB,SAAkB,EAClB,OAAgB,EAAA;;AAGhB,QAAA,IAAI,MAAM,GAAG,MAAM,KAAK,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,EAAE,OAAO,CAAC;QACjE,IAAI,MAAM,EAAE;AACV,YAAA,OAAO,MAAM;QACf;;QAGA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC;QACpD,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,OAAO,IAAI;QACb;;AAGA,QAAA,MAAM,GAAG,MAAM,KAAK,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,EAAE,OAAO,CAAC;AAC7D,QAAA,OAAO,MAAM;IACf;AAEA;;;;;;AAMG;IACH,MAAM,OAAO,CAAC,SAAiB,EAAA;;QAE7B,IAAI,MAAM,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC;QAC3C,IAAI,MAAM,EAAE;AACV,YAAA,OAAO,MAAM;QACf;;QAGA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC;QACpD,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,OAAO,IAAI;QACb;;QAGA,MAAM,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC;AACvC,QAAA,OAAO,MAAM;IACf;AAEA;;;;;;;AAOG;AACH,IAAA,MAAM,MAAM,CACV,SAAiB,EACjB,OAAe,EACf,UAAmB,EAAA;;AAGnB,QAAA,IAAI,MAAM,GAAG,MAAM,KAAK,CAAC,MAAM,CAAC,SAAS,EAAE,OAAO,EAAE,UAAU,CAAC;QAC/D,IAAI,MAAM,EAAE;AACV,YAAA,OAAO,MAAM;QACf;;QAGA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC;QACpD,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,OAAO,IAAI;QACb;AAEA,QAAA,MAAM,GAAG,MAAM,KAAK,CAAC,MAAM,CAAC,SAAS,EAAE,OAAO,EAAE,UAAU,CAAC;AAC3D,QAAA,OAAO,MAAM;IACf;AAEA;;;;;AAKG;IACH,MAAM,IAAI,CAAC,SAAiB,EAAA;;QAE1B,IAAI,MAAM,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC;QACxC,IAAI,MAAM,EAAE;AACV,YAAA,OAAO,MAAM;QACf;;QAGA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC;QACpD,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,OAAO,IAAI;QACb;QAEA,MAAM,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC;AACpC,QAAA,OAAO,MAAM;IACf;AAEA;;;;AAIG;IACH,MAAM,UAAU,CAAC,SAAiB,EAAA;;AAEhC,QAAA,MAAM,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC;;QAG7B,MAAM,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC;AAC1C,QAAA,IAAI;YACF,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;AAC/C,gBAAA,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE,IAAI,CAAC,MAAM;AAClB,aAAA,CAAC;AAEF,YAAA,IAAI,UAAU,IAAI,UAAU,CAAC,QAAQ,EAAE;;AAErC,gBAAA,IAAI;AACF,oBAAA,MAAM,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,QAAkB,CAAC;gBACtE;gBAAE,OAAO,GAAG,EAAE;AACZ,oBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,qCAAA,EAAwC,SAAS,CAAA,CAAA,CAAG,EACnD,GAAa,CAAC,OAAO,CACvB;gBACH;;gBAGA,MAAM,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;YACzC;QACF;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,2CAAA,EAA8C,SAAS,CAAA,CAAA,CAAG,EACzD,GAAa,CAAC,OAAO,CACvB;QACH;IACF;AAEA;;;;;;;;;;;;;;;;;;AAkBG;AACH,IAAA,MAAM,SAAS,GAAA;;AAEb,QAAA,MAAM,YAAY,GAAG,MAAM,KAAK,CAAC,IAAI,EAAE;AACvC,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,CAAS,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;;;QAI3D,IAAI,YAAY,GAAa,EAAE;AAC/B,QAAA,IAAI;AACF,YAAA,YAAY,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,sBAAsB,CACxD,IAAI,CAAC,cAAc,CACpB;QACH;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,kDAAA,EAAqD,IAAI,CAAC,cAAc,GAAG,EAC1E,GAAa,CAAC,OAAO,CACvB;QACH;AACA,QAAA,MAAM,cAAc,GAAG,IAAI,GAAG,CAAC,YAAY,CAAC;;;;QAK5C,IAAI,eAAe,GAAa,EAAE;AAClC,QAAA,IAAI;YACF,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;gBACjD,cAAc,EAAE,IAAI,CAAC,cAAc;gBACnC,IAAI,EAAE,IAAI,CAAC,MAAM;AAClB,aAAA,CAAC;AACF,YAAA,eAAe,GAAG;iBACf,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,OAAiB;AAC9B,iBAAA,MAAM,CAAC,CAAC,GAAG,KAAK,GAAG,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrD;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,yDAAA,EAA4D,IAAI,CAAC,cAAc,GAAG,EACjF,GAAa,CAAC,OAAO,CACvB;QACH;;;AAIA,QAAA,MAAM,UAAU,GAAG,CAAC,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,YAAY,EAAE,GAAG,eAAe,CAAC,CAAC,CAAC;AACtE,QAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAC3B,YAAA,OAAO,YAAY;QACrB;AAEA,QAAA,IAAI;YACF,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;AAC3C,gBAAA,OAAO,EAAE,EAAE,GAAG,EAAE,UAAU,EAAa;gBACvC,IAAI,EAAE,IAAI,CAAC,MAAM;AAClB,aAAA,CAAC;AAEF,YAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACxB,gBAAA,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAC7B,sBAAI,IAAI,CAAC,QAAoC,CAAC;sBAC5C,SAAS;;gBAGb,IAAI,iBAAiB,EAAE;AACrB,oBAAA,IAAI,IAAI,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;wBAC/B;oBACF;AACA,oBAAA,IAAI,CAAC,GAAG,CAAC,iBAAiB,CAAC;oBAC3B,YAAY,CAAC,IAAI,CAAC;AAChB,wBAAA,EAAE,EAAE,iBAAiB;AACrB,wBAAA,IAAI,EAAG,IAAI,CAAC,QAAmB,IAAI,SAAS;AAC5C,wBAAA,IAAI,EAAG,IAAI,CAAC,IAAe,IAAI,YAAY;wBAC3C,MAAM,EACF,IAAI,CAAC;AACJ,6BAAA,cAAyB,IAAI,SAAS;AAC3C,wBAAA,UAAU,EAAE,CAAC;AACb,wBAAA,UAAU,EAAG,IAAI,CAAC,KAAgB,IAAI,CAAC;wBACvC,SAAS,EAAE,IAAI,CAAC;8BACZ,IAAI,IAAI,CAAC,IAAI,CAAC,SAAmB,CAAC,CAAC,OAAO;AAC5C,8BAAE,IAAI,CAAC,GAAG,EAAE;wBACd,MAAM,EAAE,IAAI,CAAC,OAAiB;wBAC9B,MAAM,EAAE,IAAI,CAAC,MAAM;wBACnB,cAAc,EAAE,IAAI,CAAC,cAAc;AACpC,qBAAA,CAAC;oBACF;gBACF;;AAGA,gBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,OAAiB;gBACrC,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,MAAM,CAAA,CAAE,CAAC,EAAE;oBAC9B;gBACF;AACA,gBAAA,IAAI,CAAC,GAAG,CAAC,QAAQ,MAAM,CAAA,CAAE,CAAC;;AAG1B,gBAAA,MAAM,OAAO,GAAG,IAAI,CAAC,OAA6B;AAClD,gBAAA,MAAM,iBAAiB,GAAG,CAAC,CAAE,IAAI,CAAC;AAC/B,qBAAA,cAAc;gBACjB,IAAI,MAAM,GAAG,YAAY;AACzB,gBAAA,IAAI,OAAO,KAAK,UAAU,EAAE;oBAC1B,MAAM,GAAG,OAAO;gBAClB;qBAAO,IAAI,iBAAiB,EAAE;oBAC5B,MAAM,GAAG,eAAe;gBAC1B;AAEA,gBAAA,MAAM,OAAO,GAAG,CAAC,EAAE,IAAI,CAAC,IAAI,IAAK,IAAI,CAAC,IAAe,CAAC,MAAM,GAAG,CAAC,CAAC;gBAEjE,YAAY,CAAC,IAAI,CAAC;oBAChB,EAAE,EAAE,CAAA,KAAA,EAAQ,MAAM,CAAA,CAAE;AACpB,oBAAA,IAAI,EAAG,IAAI,CAAC,QAAmB,IAAI,SAAS;AAC5C,oBAAA,IAAI,EAAG,IAAI,CAAC,IAAe,IAAI,YAAY;oBAC3C,MAAM;AACN,oBAAA,UAAU,EAAE,CAAC;AACb,oBAAA,UAAU,EAAG,IAAI,CAAC,KAAgB,IAAI,CAAC;oBACvC,SAAS,EAAE,IAAI,CAAC;0BACZ,IAAI,IAAI,CAAC,IAAI,CAAC,SAAmB,CAAC,CAAC,OAAO;AAC5C,0BAAE,IAAI,CAAC,GAAG,EAAE;oBACd,MAAM;oBACN,MAAM,EAAE,IAAI,CAAC,MAAM;oBACnB,cAAc,EAAE,IAAI,CAAC,cAAc;AACnC,oBAAA,cAAc,EAAE,OAAO;AACxB,iBAAA,CAAC;YACJ;QACF;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,4CAAA,EAA+C,IAAI,CAAC,cAAc,GAAG,EACpE,GAAa,CAAC,OAAO,CACvB;QACH;AAEA,QAAA,OAAO,YAAY;IACrB;AAEA;;;;;;AAMG;AACO,IAAA,MAAM,WAAW,CACzB,SAAiB,EACjB,KAAiB,EAAA;AAEjB,QAAA,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,OAAO,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE,KAAK,CAAC,IAAI,CAAC;AAC1D,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,EAAE;QAErC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC;YACxC,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,MAAM;YACN,QAAQ;YACR,QAAQ;AACT,SAAA,CAAC;QAEF,MAAM,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC;AAC1C,QAAA,MAAM,IAAI,CAAC,SAAS,CAAC,UAAU,CAC7B;YACE,IAAI,EAAE,IAAI,CAAC,MAAM;AACjB,YAAA,OAAO,EAAE,MAAM;YACf,cAAc,EAAE,IAAI,CAAC,cAAc;YACnC,QAAQ,EAAE,KAAK,CAAC,IAAI;YACpB,QAAQ;AACR,YAAA,IAAI,EAAE,KAAK,CAAC,IAAI,IAAI,YAAY;YAChC,KAAK,EAAE,MAAM,CAAC,MAAM;AACpB,YAAA,MAAM,EAAE,IAAI;AACZ,YAAA,OAAO,EAAE,IAAI,CAAC,eAAe,EAAE;AAC/B,YAAA,MAAM,EAAE,MAAM;AACd,YAAA,KAAK,EAAE,CAAC;AACR,YAAA,QAAQ,EAAE;gBACR,SAAS;gBACT,cAAc,EAAE,KAAK,CAAC,MAAM;AAC7B,aAAA;AACF,SAAA;yBACgB,IAAI,CACtB;;;;AAKD,QAAA,IAAI;YACF,MAAM,IAAI,CAAC,SAAS,CAAC,sBAAsB,CAAC,IAAI,CAAC,cAAc,EAAE;gBAC/D,MAAM;AACP,aAAA,CAAC;QACJ;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,kDAAA,EAAqD,MAAM,CAAA,CAAA,CAAG,EAC7D,GAAa,CAAC,OAAO,CACvB;QACH;;QAGA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;QAC9C,IAAI,MAAM,EAAE;AACV,YAAA,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,MAAM;AAC/B,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;;AAGvD,YAAA,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE;AACnC,YAAA,IAAI,KAAK,CAAC,SAAS,CAAC,EAAE;AACpB,gBAAA,KAAK,CAAC,SAAS,CAAC,GAAG,MAAM,CAAC,QAAQ;AAClC,gBAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YAC5D;QACF;IACF;AAEA;;;;;AAKG;IACO,MAAM,QAAQ,CAAC,SAAiB,EAAA;QACxC,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;QAC9C,IAAI,CAAC,MAAM,EAAE;YACX;QACF;AAEA,QAAA,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,CAAC;AACnD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;AACpE,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,EAAE;QAErC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC;YACxC,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,MAAM;YACN,QAAQ;YACR,QAAQ;AACT,SAAA,CAAC;;QAGF,MAAM,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC;AAC1C,QAAA,IAAI;AACF,YAAA,MAAM,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC;AAC9B,gBAAA,OAAO,EAAE,MAAM;gBACf,KAAK,EAAE,MAAM,CAAC,MAAM;gBACpB,QAAQ;AACT,aAAA,CAAC;QACJ;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,0CAAA,EAA6C,SAAS,CAAA,CAAA,CAAG,EACxD,GAAa,CAAC,OAAO,CACvB;QACH;IACF;AAEA;;;;;;;AAOG;IACO,MAAM,aAAa,CAC3B,SAAiB,EAAA;QAEjB,MAAM,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC;AAE1C,QAAA,IAAI;;YAEF,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;AAC/C,gBAAA,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE,IAAI,CAAC,MAAM;AAClB,aAAA,CAAC;YAEF,IAAI,CAAC,UAAU,IAAI,CAAC,UAAU,CAAC,QAAQ,EAAE;AACvC,gBAAA,OAAO,IAAI;YACb;;AAGA,YAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,aAAa,CAAC,UAAU,CAAC,QAAkB,CAAC;YACzE,MAAM,MAAM,GAAa,EAAE;AAC3B,YAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;gBAChC,MAAM,CAAC,IAAI,CACT,MAAM,CAAC,QAAQ,CAAC,KAAK;AACnB,sBAAE;sBACA,MAAM,CAAC,IAAI,CAAC,KAA8B,CAAC,CAChD;YACH;AACA,YAAA,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC;;AAGvD,YAAA,MAAM,QAAQ,GAAoB;AAChC,gBAAA,EAAE,EAAE,SAAS;AACb,gBAAA,IAAI,EAAG,UAAU,CAAC,QAAmB,IAAI,SAAS;AAClD,gBAAA,IAAI,EAAG,UAAU,CAAC,IAAe,IAAI,YAAY;gBACjD,MAAM,EACF,UAAU,CAAC;AACV,qBAAA,cAAyB,IAAI,SAAS;gBAC3C,UAAU,EAAE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM;gBACtC,UAAU,EAAE,OAAO,CAAC,MAAM;gBAC1B,SAAS,EAAE,UAAU,CAAC;sBAClB,IAAI,IAAI,CAAC,UAAU,CAAC,SAAmB,CAAC,CAAC,OAAO;AAClD,sBAAE,IAAI,CAAC,GAAG,EAAE;gBACd,MAAM;gBACN,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,cAAc,EAAE,IAAI,CAAC,cAAc;aACpC;;AAGD,YAAA,MAAM,MAAM,GAAgB,EAAE,OAAO,EAAE,QAAQ,EAAE;AACjD,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;;AAGvD,YAAA,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE;AACnC,YAAA,KAAK,CAAC,SAAS,CAAC,GAAG,QAAQ;AAC3B,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;AAE1D,YAAA,OAAO,MAAM;QACf;QAAE,OAAO,GAAG,EAAE;AACZ,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CACd,CAAA,sCAAA,EAAyC,SAAS,CAAA,CAAA,CAAG,EACpD,GAAa,CAAC,OAAO,CACvB;AACD,YAAA,OAAO,IAAI;QACb;IACF;AAEA;;;;;;AAMG;IACO,MAAM,YAAY,CAAC,SAAiB,EAAA;QAC5C,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;QAChD,IAAI,QAAQ,EAAE;YACZ;QACF;;QAGA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC;QACpD,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,MAAM,IAAI,KAAK,CAAC,YAAY,SAAS,CAAA,0BAAA,CAA4B,CAAC;QACpE;IACF;AACD;;;;;"}