@aj-archipelago/cortex 1.4.21 → 1.4.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/FILE_SYSTEM_DOCUMENTATION.md +116 -48
  2. package/config.js +27 -0
  3. package/lib/fileUtils.js +226 -201
  4. package/lib/requestExecutor.js +3 -2
  5. package/lib/util.js +71 -1
  6. package/package.json +1 -1
  7. package/pathways/image_flux.js +8 -2
  8. package/pathways/image_qwen.js +1 -1
  9. package/pathways/system/entity/files/sys_read_file_collection.js +13 -11
  10. package/pathways/system/entity/files/sys_update_file_metadata.js +16 -7
  11. package/pathways/system/entity/sys_entity_agent.js +8 -6
  12. package/pathways/system/entity/tools/sys_tool_codingagent.js +4 -4
  13. package/pathways/system/entity/tools/sys_tool_editfile.js +27 -22
  14. package/pathways/system/entity/tools/sys_tool_file_collection.js +15 -10
  15. package/pathways/system/entity/tools/sys_tool_image.js +5 -5
  16. package/pathways/system/entity/tools/sys_tool_image_gemini.js +1 -1
  17. package/pathways/system/entity/tools/sys_tool_readfile.js +4 -4
  18. package/pathways/system/entity/tools/sys_tool_slides_gemini.js +1 -1
  19. package/pathways/system/entity/tools/sys_tool_video_veo.js +1 -1
  20. package/pathways/system/entity/tools/sys_tool_view_image.js +10 -5
  21. package/pathways/system/workspaces/run_workspace_agent.js +4 -1
  22. package/pathways/video_seedance.js +2 -0
  23. package/server/executeWorkspace.js +45 -2
  24. package/server/pathwayResolver.js +18 -0
  25. package/server/plugins/claude3VertexPlugin.js +2 -6
  26. package/server/plugins/claude4VertexPlugin.js +5 -10
  27. package/server/plugins/gemini3ReasoningVisionPlugin.js +0 -2
  28. package/server/plugins/grokResponsesPlugin.js +3 -19
  29. package/server/plugins/grokVisionPlugin.js +3 -18
  30. package/server/plugins/modelPlugin.js +3 -0
  31. package/server/plugins/openAiVisionPlugin.js +3 -18
  32. package/server/plugins/replicateApiPlugin.js +182 -101
  33. package/server/resolver.js +32 -3
  34. package/server/typeDef.js +10 -1
  35. package/test.log +39427 -0
  36. package/tests/integration/features/tools/fileCollection.test.js +254 -248
  37. package/tests/integration/features/tools/fileOperations.test.js +131 -81
  38. package/tests/integration/graphql/async/stream/agentic.test.js +1 -1
  39. package/tests/integration/graphql/async/stream/vendors/claude_streaming.test.js +3 -4
  40. package/tests/integration/graphql/async/stream/vendors/gemini_streaming.test.js +3 -4
  41. package/tests/integration/graphql/async/stream/vendors/grok_streaming.test.js +3 -4
  42. package/tests/integration/graphql/async/stream/vendors/openai_streaming.test.js +5 -5
  43. package/tests/unit/core/fileCollection.test.js +86 -25
  44. package/pathways/system/workspaces/run_workspace_research_agent.js +0 -27
package/lib/util.js CHANGED
@@ -294,6 +294,75 @@ function removeImageAndFileFromMessage(message) {
294
294
  return modifiedMessage;
295
295
  }
296
296
 
297
+ /**
298
+ * Recursively sanitizes base64 data in objects/arrays to prevent logging large base64 strings
299
+ * Replaces base64 data with a placeholder string
300
+ */
301
+ function sanitizeBase64(obj) {
302
+ if (obj === null || obj === undefined) {
303
+ return obj;
304
+ }
305
+
306
+ // Handle strings - check for base64 data URLs or long base64 strings
307
+ if (typeof obj === 'string') {
308
+ // Check if it's a data URL with base64
309
+ if (obj.startsWith('data:') && obj.includes('base64,')) {
310
+ return '* base64 data truncated for log *';
311
+ }
312
+ // Check if it's a long base64 string (likely base64 if > 100 chars and matches base64 pattern)
313
+ if (obj.length > 100 && /^[A-Za-z0-9+/=]+$/.test(obj) && obj.length % 4 === 0) {
314
+ return '* base64 data truncated for log *';
315
+ }
316
+ return obj;
317
+ }
318
+
319
+ // Handle arrays
320
+ if (Array.isArray(obj)) {
321
+ return obj.map(item => sanitizeBase64(item));
322
+ }
323
+
324
+ // Handle objects
325
+ if (typeof obj === 'object') {
326
+ const sanitized = {};
327
+ for (const [key, value] of Object.entries(obj)) {
328
+ // Special handling for known base64 fields
329
+ if (key === 'data' && typeof value === 'string' && value.length > 50) {
330
+ // Check if it looks like base64
331
+ if (/^[A-Za-z0-9+/=]+$/.test(value) && value.length % 4 === 0) {
332
+ sanitized[key] = '* base64 data truncated for log *';
333
+ continue;
334
+ }
335
+ }
336
+ // Handle image_url.url with base64
337
+ if (key === 'url' && typeof value === 'string' && value.startsWith('data:') && value.includes('base64,')) {
338
+ sanitized[key] = '* base64 data truncated for log *';
339
+ continue;
340
+ }
341
+ // Handle source.data (Claude format)
342
+ if (key === 'source' && typeof value === 'object' && value?.type === 'base64' && value?.data) {
343
+ sanitized[key] = {
344
+ ...value,
345
+ data: '* base64 data truncated for log *'
346
+ };
347
+ continue;
348
+ }
349
+ // Handle inlineData.data (Gemini format)
350
+ if (key === 'inlineData' && typeof value === 'object' && value?.data) {
351
+ sanitized[key] = {
352
+ ...value,
353
+ data: '* base64 data truncated for log *'
354
+ };
355
+ continue;
356
+ }
357
+ // Recursively sanitize nested objects
358
+ sanitized[key] = sanitizeBase64(value);
359
+ }
360
+ return sanitized;
361
+ }
362
+
363
+ return obj;
364
+ }
365
+
297
366
  export {
298
367
  getUniqueId,
299
368
  getSearchResultId,
@@ -303,5 +372,6 @@ export {
303
372
  chatArgsHasType,
304
373
  convertSrtToText,
305
374
  alignSubtitles,
306
- removeOldImageAndFileContent
375
+ removeOldImageAndFileContent,
376
+ sanitizeBase64
307
377
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aj-archipelago/cortex",
3
- "version": "1.4.21",
3
+ "version": "1.4.23",
4
4
  "description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
5
5
  "private": false,
6
6
  "repository": {
@@ -13,7 +13,13 @@ export default {
13
13
  output_format: "webp",
14
14
  output_quality: 80,
15
15
  steps: 4,
16
- input_image: "", // URL to input image for models that support it
17
- input_image_2: "", // URL to second input image for models that support it
16
+ input_image: "", // URL to a single input image (primary field for models that support image input)
17
+ input_image_1: "", // URL to the first input image when providing multiple input images
18
+ input_image_2: "", // URL to the second input image when providing multiple input images
19
+ input_image_3: "", // URL to the third input image when providing multiple input images
20
+ input_images: { type: "array", items: { type: "string" } }, // Array of input image URLs (alternative to input_image_*, max 8 for flux-2-pro)
21
+ // Flux 2 Pro specific parameters
22
+ resolution: "1 MP", // Options: "match_input_image", "0.5 MP", "1 MP", "2 MP", "4 MP" (flux-2-pro only)
23
+ seed: { type: "integer" }, // Optional seed for reproducible results
18
24
  },
19
25
  };
@@ -3,7 +3,7 @@ export default {
3
3
 
4
4
  enableDuplicateRequests: false,
5
5
  inputParameters: {
6
- model: "replicate-qwen-image", // Options: "replicate-qwen-image" or "replicate-qwen-image-edit-plus"
6
+ model: "replicate-qwen-image", // Options: "replicate-qwen-image", "replicate-qwen-image-edit-plus", or "replicate-qwen-image-edit-2511"
7
7
  negativePrompt: "",
8
8
  width: 1024,
9
9
  height: 1024,
@@ -3,39 +3,41 @@
3
3
  // File collections are stored in Redis hash maps (FileStoreMap:ctx:<contextId>
4
4
  // Returns file collection as JSON array string for backward compatibility with Labeeb
5
5
 
6
- import { loadFileCollection } from '../../../../lib/fileUtils.js';
6
+ import { loadMergedFileCollection } from '../../../../lib/fileUtils.js';
7
7
 
8
8
  export default {
9
9
  inputParameters: {
10
- contextId: ``,
11
- contextKey: ``,
10
+ agentContext: [
11
+ { contextId: ``, contextKey: ``, default: true }
12
+ ],
12
13
  useCache: true
13
14
  },
14
15
  // No format field - returns String directly (like sys_read_memory)
15
16
  model: 'oai-gpt4o',
16
17
 
17
18
  resolver: async (_parent, args, _contextValue, _info) => {
18
- const { contextId, contextKey = null, useCache = true } = args;
19
+ const { agentContext } = args;
19
20
 
20
- // Validate that contextId is provided
21
- if (!contextId) {
21
+ // Validate that agentContext is provided
22
+ if (!agentContext || !Array.isArray(agentContext) || agentContext.length === 0) {
22
23
  return JSON.stringify({ error: 'Context error' }, null, 2);
23
24
  }
24
25
 
25
26
  try {
26
- // Load file collection from Redis hash maps
27
- const collection = await loadFileCollection(contextId, contextKey, useCache);
27
+ // Load file collection from Redis hash maps (merged from all agentContext contexts)
28
+ const collection = await loadMergedFileCollection(agentContext);
28
29
 
29
30
  // Return as JSON array string for backward compatibility with Labeeb
30
31
  // Labeeb expects either: [] or { version: "...", files: [...] }
31
32
  // Since we removed versioning, we just return the array directly
32
- // Ensure we always return a valid JSON array (empty if no files)
33
- const result = Array.isArray(collection) ? collection : [];
33
+ // Strip internal _contextId before returning
34
+ const result = (Array.isArray(collection) ? collection : [])
35
+ .map(({ _contextId, ...file }) => file);
34
36
  return JSON.stringify(result);
35
37
  } catch (e) {
36
38
  // Log error for debugging
37
39
  const logger = (await import('../../../../lib/logger.js')).default;
38
- logger.warn(`Error loading file collection for contextId ${contextId}: ${e.message}`);
40
+ logger.warn(`Error loading file collection: ${e.message}`);
39
41
  // Return empty array on error for backward compatibility
40
42
  return "[]";
41
43
  }
@@ -2,31 +2,40 @@
2
2
  // GraphQL pathway for updating file metadata (replaces sys_save_memory for renames and metadata updates)
3
3
  // Only updates Cortex-managed fields (displayFilename, tags, notes, etc.), not CFH fields (url, gcs, hash, filename)
4
4
 
5
- import { updateFileMetadata } from '../../../../lib/fileUtils.js';
5
+ import { updateFileMetadata, getDefaultContext } from '../../../../lib/fileUtils.js';
6
6
 
7
7
  export default {
8
8
  inputParameters: {
9
- contextId: ``,
9
+ agentContext: [{ contextId: ``, contextKey: ``, default: true }],
10
10
  hash: ``,
11
11
  displayFilename: { type: 'string' }, // Optional - no default
12
12
  tags: { type: 'array', items: { type: 'string' } }, // Optional - no default
13
13
  notes: { type: 'string' }, // Optional - no default
14
14
  mimeType: { type: 'string' }, // Optional - no default
15
15
  permanent: { type: 'boolean' }, // Optional - no default
16
- inCollection: { type: 'array', items: { type: 'string' } }, // Optional - array of chat IDs, or can be boolean true/false (normalized to ['*'] or removed)
17
- contextKey: `` // Optional - context key for encryption
16
+ inCollection: { type: 'array', items: { type: 'string' } } // Optional - array of chat IDs, or can be boolean true/false (normalized to ['*'] or removed)
18
17
  },
19
18
  model: 'oai-gpt4o',
20
19
  isMutation: true, // Declaratively mark this as a Mutation
21
20
 
22
21
  resolver: async (_parent, args, _contextValue, _info) => {
23
- const { contextId, hash, displayFilename, tags, notes, mimeType, permanent, inCollection, contextKey } = args;
22
+ const { agentContext, hash, displayFilename, tags, notes, mimeType, permanent, inCollection } = args;
23
+
24
+ const defaultCtx = getDefaultContext(agentContext);
25
+ if (!defaultCtx) {
26
+ return JSON.stringify({
27
+ success: false,
28
+ error: 'agentContext with at least one default context is required'
29
+ });
30
+ }
31
+ const contextId = defaultCtx.contextId;
32
+ const contextKey = defaultCtx.contextKey || null;
24
33
 
25
34
  // Validate required parameters
26
- if (!contextId || !hash) {
35
+ if (!hash) {
27
36
  return JSON.stringify({
28
37
  success: false,
29
- error: 'contextId and hash are required'
38
+ error: 'hash is required'
30
39
  });
31
40
  }
32
41
 
@@ -63,7 +63,9 @@ export default {
63
63
  inputParameters: {
64
64
  privateData: false,
65
65
  chatHistory: [{role: '', content: []}],
66
- contextId: ``,
66
+ agentContext: [
67
+ { contextId: ``, contextKey: ``, default: true }
68
+ ],
67
69
  chatId: ``,
68
70
  language: "English",
69
71
  aiName: "Jarvis",
@@ -76,8 +78,7 @@ export default {
76
78
  entityId: ``,
77
79
  researchMode: false,
78
80
  userInfo: '',
79
- model: 'oai-gpt41',
80
- contextKey: ``
81
+ model: 'oai-gpt41'
81
82
  },
82
83
  timeout: 600,
83
84
 
@@ -547,10 +548,11 @@ export default {
547
548
  args.chatHistory = args.chatHistory.slice(-20);
548
549
  }
549
550
 
550
- // Sync files from chat history to collection and strip file content
551
- // Files are accessible via tools (AnalyzeFile, ReadTextFile, etc.)
551
+ // Process files in chat history:
552
+ // - Files in collection (all agentContext contexts): stripped, accessible via tools
553
+ // - Files not in collection: left in message for model to see directly
552
554
  const { chatHistory: strippedHistory, availableFiles } = await syncAndStripFilesFromChatHistory(
553
- args.chatHistory, args.contextId, args.contextKey
555
+ args.chatHistory, args.agentContext
554
556
  );
555
557
  args.chatHistory = strippedHistory;
556
558
 
@@ -81,12 +81,12 @@ export default {
81
81
 
82
82
  executePathway: async ({args, resolver}) => {
83
83
  try {
84
- const { codingTask, userMessage, inputFiles, codingTaskKeywords, contextId, contextKey } = args;
84
+ const { codingTask, userMessage, inputFiles, codingTaskKeywords } = args;
85
85
 
86
86
  let taskSuffix = "";
87
87
  if (inputFiles) {
88
- if (!contextId) {
89
- throw new Error("contextId is required when using the 'inputFiles' parameter. Use ListFileCollection or SearchFileCollection to find available files.");
88
+ if (!args.agentContext || !Array.isArray(args.agentContext) || args.agentContext.length === 0) {
89
+ throw new Error("agentContext is required when using the 'inputFiles' parameter. Use ListFileCollection or SearchFileCollection to find available files.");
90
90
  }
91
91
 
92
92
  // Resolve file parameters to URLs
@@ -100,7 +100,7 @@ export default {
100
100
 
101
101
  for (const fileRef of fileReferences) {
102
102
  // Try to resolve each file reference
103
- const resolvedUrl = await resolveFileParameter(fileRef, contextId, contextKey);
103
+ const resolvedUrl = await resolveFileParameter(fileRef, args.agentContext);
104
104
  if (resolvedUrl) {
105
105
  resolvedUrls.push(resolvedUrl);
106
106
  } else {
@@ -2,7 +2,7 @@
2
2
  // Entity tool that modifies existing files by replacing line ranges or exact string matches
3
3
  import logger from '../../../../lib/logger.js';
4
4
  import { axios } from '../../../../lib/requestExecutor.js';
5
- import { uploadFileToCloud, findFileInCollection, loadFileCollection, getMimeTypeFromFilename, deleteFileByHash, isTextMimeType, updateFileMetadata, writeFileDataToRedis, invalidateFileCollectionCache, getActualContentMimeType } from '../../../../lib/fileUtils.js';
5
+ import { uploadFileToCloud, findFileInCollection, loadMergedFileCollection, getDefaultContext, getMimeTypeFromFilename, deleteFileByHash, isTextMimeType, updateFileMetadata, writeFileDataToRedis, invalidateFileCollectionCache, getActualContentMimeType } from '../../../../lib/fileUtils.js';
6
6
 
7
7
  // Maximum file size for editing (50MB) - prevents memory blowup on huge files
8
8
  const MAX_EDITABLE_FILE_SIZE = 50 * 1024 * 1024;
@@ -145,7 +145,19 @@ export default {
145
145
  ],
146
146
 
147
147
  executePathway: async ({args, runAllPrompts, resolver}) => {
148
- const { file, startLine, endLine, content, oldString, newString, replaceAll = false, contextId, contextKey } = args;
148
+ const { file, startLine, endLine, content, oldString, newString, replaceAll = false, agentContext } = args;
149
+
150
+ const defaultCtx = getDefaultContext(agentContext);
151
+ if (!defaultCtx) {
152
+ const errorResult = {
153
+ success: false,
154
+ error: "agentContext with at least one default context is required"
155
+ };
156
+ resolver.tool = JSON.stringify({ toolUsed: "EditFile" });
157
+ return JSON.stringify(errorResult);
158
+ }
159
+ const contextId = defaultCtx.contextId;
160
+ const contextKey = defaultCtx.contextKey || null;
149
161
 
150
162
  // Determine which tool was called based on parameters
151
163
  const isSearchReplace = oldString !== undefined && newString !== undefined;
@@ -162,14 +174,6 @@ export default {
162
174
  return JSON.stringify(errorResult);
163
175
  }
164
176
 
165
- if (!contextId) {
166
- const errorResult = {
167
- success: false,
168
- error: "contextId is required for file modification"
169
- };
170
- resolver.tool = JSON.stringify({ toolUsed: toolName });
171
- return JSON.stringify(errorResult);
172
- }
173
177
 
174
178
  // Validate that we have the right parameters for the tool being used
175
179
  if (!isSearchReplace && !isEditByLine) {
@@ -243,7 +247,7 @@ export default {
243
247
 
244
248
  try {
245
249
  // Resolve file ID first (needed for serialization)
246
- const collection = await loadFileCollection(contextId, contextKey, false);
250
+ const collection = await loadMergedFileCollection(agentContext);
247
251
  const foundFile = findFileInCollection(file, collection);
248
252
 
249
253
  if (!foundFile) {
@@ -281,7 +285,7 @@ export default {
281
285
  logger.info(`Using cached content for: ${currentFile.displayFilename || file}`);
282
286
  } else {
283
287
  // First edit in session: load collection and download file
284
- const currentCollection = await loadFileCollection(contextId, contextKey, false);
288
+ const currentCollection = await loadMergedFileCollection(agentContext);
285
289
  currentFile = findFileInCollection(file, currentCollection);
286
290
 
287
291
  if (!currentFile) {
@@ -467,7 +471,7 @@ export default {
467
471
  if (editResult._isLastOperation) {
468
472
  // Flush: upload the final content and update metadata
469
473
  const { modifiedContent, currentFile, fileIdToUpdate: initialFileId, filename, mimeType,
470
- modificationInfo, message, contextId: ctxId, contextKey: ctxKey, resolver: res,
474
+ modificationInfo, message, resolver: res,
471
475
  file: fileParam, isEditByLine: isByLine, isSearchReplace: isSR, replaceAll: repAll,
472
476
  startLine: sLine, endLine: eLine } = editResult;
473
477
 
@@ -482,7 +486,7 @@ export default {
482
486
  mimeType,
483
487
  filename,
484
488
  res,
485
- ctxId
489
+ contextId
486
490
  );
487
491
 
488
492
  if (!uploadResult || !uploadResult.url) {
@@ -490,7 +494,8 @@ export default {
490
494
  }
491
495
 
492
496
  // Update the file collection entry directly (atomic operation)
493
- const latestCollection = await loadFileCollection(ctxId, ctxKey, false);
497
+ // Use default context from agentContext for consistency
498
+ const latestCollection = await loadMergedFileCollection(agentContext);
494
499
  let fileToUpdate = latestCollection.find(f => f.id === fileIdToUpdate);
495
500
 
496
501
  // If not found by ID, try to find by the original file parameter
@@ -512,7 +517,7 @@ export default {
512
517
  const { getRedisClient } = await import('../../../../lib/fileUtils.js');
513
518
  const redisClient = await getRedisClient();
514
519
  if (redisClient) {
515
- const contextMapKey = `FileStoreMap:ctx:${ctxId}`;
520
+ const contextMapKey = `FileStoreMap:ctx:${contextId}`;
516
521
 
517
522
  const existingDataStr = await redisClient.hget(contextMapKey, uploadResult.hash);
518
523
  let existingData = {};
@@ -541,21 +546,21 @@ export default {
541
546
  permanent: fileToUpdate.permanent || false
542
547
  };
543
548
 
544
- await writeFileDataToRedis(redisClient, contextMapKey, uploadResult.hash, fileData, ctxKey);
549
+ await writeFileDataToRedis(redisClient, contextMapKey, uploadResult.hash, fileData, contextKey);
545
550
 
546
551
  if (oldHashToDelete && oldHashToDelete !== uploadResult.hash) {
547
552
  await redisClient.hdel(contextMapKey, oldHashToDelete);
548
553
  }
549
554
 
550
- invalidateFileCollectionCache(ctxId, ctxKey);
555
+ invalidateFileCollectionCache(contextId, contextKey);
551
556
  }
552
557
  } else if (fileToUpdate.hash) {
553
- await updateFileMetadata(ctxId, fileToUpdate.hash, {
558
+ await updateFileMetadata(contextId, fileToUpdate.hash, {
554
559
  filename: filename,
555
560
  lastAccessed: new Date().toISOString()
556
- }, ctxKey);
561
+ }, contextKey);
557
562
 
558
- invalidateFileCollectionCache(ctxId, ctxKey);
563
+ invalidateFileCollectionCache(contextId, contextKey);
559
564
  }
560
565
 
561
566
  // Delete old file version (fire-and-forget)
@@ -563,7 +568,7 @@ export default {
563
568
  (async () => {
564
569
  try {
565
570
  logger.info(`Deleting old file version with hash ${oldHashToDelete} (background task)`);
566
- await deleteFileByHash(oldHashToDelete, res, ctxId);
571
+ await deleteFileByHash(oldHashToDelete, res, contextId);
567
572
  } catch (cleanupError) {
568
573
  logger.warn(`Failed to cleanup old file version: ${cleanupError.message}`);
569
574
  }
@@ -3,7 +3,7 @@
3
3
  // Uses Redis hash maps (FileStoreMap:ctx:<contextId>) for storage
4
4
  // Supports atomic rename/tag/notes updates via UpdateFileMetadata
5
5
  import logger from '../../../../lib/logger.js';
6
- import { addFileToCollection, loadFileCollection, findFileInCollection, deleteFileByHash, updateFileMetadata, invalidateFileCollectionCache } from '../../../../lib/fileUtils.js';
6
+ import { addFileToCollection, loadFileCollection, loadMergedFileCollection, findFileInCollection, deleteFileByHash, updateFileMetadata, invalidateFileCollectionCache, getDefaultContext } from '../../../../lib/fileUtils.js';
7
7
 
8
8
  export default {
9
9
  prompt: [],
@@ -198,7 +198,12 @@ export default {
198
198
  ],
199
199
 
200
200
  executePathway: async ({args, runAllPrompts, resolver}) => {
201
- const { contextId, contextKey } = args;
201
+ const defaultCtx = getDefaultContext(args.agentContext);
202
+ if (!defaultCtx) {
203
+ throw new Error("agentContext with at least one default context is required");
204
+ }
205
+ const contextId = defaultCtx.contextId;
206
+ const contextKey = defaultCtx.contextKey || null;
202
207
 
203
208
  // Determine which function was called based on which parameters are present
204
209
  // Order matters: check most specific operations first
@@ -359,12 +364,12 @@ export default {
359
364
  const safeFilterTags = Array.isArray(filterTags) ? filterTags : [];
360
365
  const queryLower = query.toLowerCase();
361
366
 
362
- // Update lastAccessed for matching files directly (atomic operations)
363
- const allFiles = await loadFileCollection(contextId, contextKey, false);
367
+ // Load primary collection for lastAccessed updates (only update files in primary context)
368
+ const primaryFiles = await loadFileCollection(contextId, contextKey, false);
364
369
  const now = new Date().toISOString();
365
370
 
366
- // Find matching files and update lastAccessed directly
367
- for (const file of allFiles) {
371
+ // Find matching files in primary collection and update lastAccessed directly
372
+ for (const file of primaryFiles) {
368
373
  if (!file.hash) continue;
369
374
 
370
375
  // Fallback to filename if displayFilename is not set (for files uploaded before displayFilename was added)
@@ -387,8 +392,8 @@ export default {
387
392
  }
388
393
  }
389
394
 
390
- // Reload collection to get results (after update)
391
- const updatedFiles = await loadFileCollection(contextId, contextKey, false);
395
+ // Load merged collection for search results (includes all agentContext files)
396
+ const updatedFiles = await loadMergedFileCollection(args.agentContext);
392
397
 
393
398
  // Filter and sort results (for display only, not modifying)
394
399
  let results = updatedFiles.filter(file => {
@@ -560,8 +565,8 @@ export default {
560
565
  // List collection (read-only, no locking needed)
561
566
  const { tags: filterTags = [], sortBy = 'date', limit = 50 } = args;
562
567
 
563
- // Use useCache: false to ensure we get the latest file data (important after edits)
564
- const collection = await loadFileCollection(contextId, contextKey, false);
568
+ // Use merged collection to include files from all agentContext contexts
569
+ const collection = await loadMergedFileCollection(args.agentContext);
565
570
  let results = collection;
566
571
 
567
572
  // Filter by tags if provided
@@ -93,9 +93,9 @@ export default {
93
93
  let model = "replicate-seedream-4";
94
94
  let prompt = args.detailedInstructions || "";
95
95
 
96
- // If we have input images, use the flux-kontext-max model
96
+ // If we have input images, use the qwen-image-edit-2511 model
97
97
  if (args.inputImages && Array.isArray(args.inputImages) && args.inputImages.length > 0) {
98
- model = "replicate-qwen-image-edit-plus";
98
+ model = "replicate-qwen-image-edit-2511";
99
99
  }
100
100
 
101
101
  pathwayResolver.tool = JSON.stringify({ toolUsed: "image" });
@@ -113,7 +113,7 @@ export default {
113
113
 
114
114
  for (let i = 0; i < imagesToProcess.length; i++) {
115
115
  const imageRef = imagesToProcess[i];
116
- const resolved = await resolveFileParameter(imageRef, args.contextId, args.contextKey);
116
+ const resolved = await resolveFileParameter(imageRef, args.agentContext);
117
117
  if (!resolved) {
118
118
  throw new Error(`File not found: "${imageRef}". Use ListFileCollection or SearchFileCollection to find available files.`);
119
119
  }
@@ -139,8 +139,8 @@ export default {
139
139
  params.input_image_3 = resolvedInputImages[2];
140
140
  }
141
141
 
142
- // Set default aspectRatio for qwen-image-edit-plus model
143
- if (model === "replicate-qwen-image-edit-plus") {
142
+ // Set default aspectRatio for qwen-image-edit-2511 model
143
+ if (model === "replicate-qwen-image-edit-2511") {
144
144
  params.aspectRatio = "match_input_image";
145
145
  }
146
146
 
@@ -109,7 +109,7 @@ export default {
109
109
 
110
110
  for (let i = 0; i < imagesToProcess.length; i++) {
111
111
  const imageRef = imagesToProcess[i];
112
- const resolved = await resolveFileParameter(imageRef, args.contextId, args.contextKey, { preferGcs: true });
112
+ const resolved = await resolveFileParameter(imageRef, args.agentContext, { preferGcs: true });
113
113
  if (!resolved) {
114
114
  throw new Error(`File not found: "${imageRef}". Use ListFileCollection or SearchFileCollection to find available files.`);
115
115
  }
@@ -135,20 +135,20 @@ export default {
135
135
  const MAX_CHARS = 100000;
136
136
  const MAX_LINES = 1000;
137
137
 
138
- let { cloudUrl, file, startChar, endChar, startLine, endLine, contextId, contextKey } = args;
138
+ let { cloudUrl, file, startChar, endChar, startLine, endLine } = args;
139
139
 
140
140
  // If file parameter is provided, resolve it to a URL using the common utility
141
141
  if (file) {
142
- if (!contextId) {
142
+ if (!args.agentContext || !Array.isArray(args.agentContext) || args.agentContext.length === 0) {
143
143
  const errorResult = {
144
144
  success: false,
145
- error: "contextId is required when using the 'file' parameter. Use ListFileCollection or SearchFileCollection to find available files."
145
+ error: "agentContext is required when using the 'file' parameter. Use ListFileCollection or SearchFileCollection to find available files."
146
146
  };
147
147
  resolver.tool = JSON.stringify({ toolUsed: "ReadFile" });
148
148
  return JSON.stringify(errorResult);
149
149
  }
150
150
  // Use useCache: false to ensure we get the latest file data (important after edits)
151
- const resolvedUrl = await resolveFileParameter(file, contextId, contextKey, { useCache: false });
151
+ const resolvedUrl = await resolveFileParameter(file, args.agentContext, { useCache: false });
152
152
  if (!resolvedUrl) {
153
153
  const errorResult = {
154
154
  success: false,
@@ -80,7 +80,7 @@ export default {
80
80
 
81
81
  for (let i = 0; i < imagesToProcess.length; i++) {
82
82
  const imageRef = imagesToProcess[i];
83
- const resolved = await resolveFileParameter(imageRef, args.contextId, args.contextKey, { preferGcs: true });
83
+ const resolved = await resolveFileParameter(imageRef, args.agentContext, { preferGcs: true });
84
84
  if (!resolved) {
85
85
  throw new Error(`File not found: "${imageRef}". Use ListFileCollection or SearchFileCollection to find available files.`);
86
86
  }
@@ -128,7 +128,7 @@ export default {
128
128
  throw new Error("contextId is required when using the 'inputImage' parameter. Use ListFileCollection or SearchFileCollection to find available files.");
129
129
  }
130
130
 
131
- const resolved = await resolveFileParameter(args.inputImage, args.contextId, args.contextKey, { preferGcs: true });
131
+ const resolved = await resolveFileParameter(args.inputImage, args.agentContext, { preferGcs: true });
132
132
  if (!resolved) {
133
133
  throw new Error(`File not found: "${args.inputImage}". Use ListFileCollection or SearchFileCollection to find available files.`);
134
134
  }
@@ -1,7 +1,7 @@
1
1
  // sys_tool_view_image.js
2
2
  // Tool pathway that allows agents to view image files from the file collection
3
3
  import logger from '../../../../lib/logger.js';
4
- import { loadFileCollection, findFileInCollection, ensureShortLivedUrl } from '../../../../lib/fileUtils.js';
4
+ import { loadMergedFileCollection, findFileInCollection, ensureShortLivedUrl, getDefaultContext } from '../../../../lib/fileUtils.js';
5
5
  import { config } from '../../../../config.js';
6
6
 
7
7
  export default {
@@ -34,15 +34,19 @@ export default {
34
34
  },
35
35
 
36
36
  executePathway: async ({args, runAllPrompts, resolver}) => {
37
- const { files, contextId, contextKey } = args;
37
+ const { files } = args;
38
38
 
39
39
  if (!files || !Array.isArray(files) || files.length === 0) {
40
40
  throw new Error("Files parameter is required and must be a non-empty array");
41
41
  }
42
42
 
43
+ if (!args.agentContext || !Array.isArray(args.agentContext) || args.agentContext.length === 0) {
44
+ throw new Error("agentContext is required");
45
+ }
46
+
43
47
  try {
44
- // Load the file collection
45
- const collection = await loadFileCollection(contextId, contextKey, true);
48
+ // Load the file collection (merged from all agentContext contexts)
49
+ const collection = await loadMergedFileCollection(args.agentContext);
46
50
 
47
51
  const imageUrls = [];
48
52
  const errors = [];
@@ -70,7 +74,8 @@ export default {
70
74
 
71
75
  // Resolve to short-lived URL if possible
72
76
  const fileHandlerUrl = config.get('whisperMediaApiUrl');
73
- const fileWithShortLivedUrl = await ensureShortLivedUrl(foundFile, fileHandlerUrl, contextId);
77
+ const defaultCtx = getDefaultContext(args.agentContext);
78
+ const fileWithShortLivedUrl = await ensureShortLivedUrl(foundFile, fileHandlerUrl, defaultCtx?.contextId || null);
74
79
 
75
80
  // Add to imageUrls array
76
81
  imageUrls.push({
@@ -6,8 +6,11 @@ export default {
6
6
 
7
7
  inputParameters: {
8
8
  model: "oai-gpt41",
9
- aiStyle: "OpenAI",
10
9
  chatHistory: [{role: '', content: []}],
10
+ researchMode: false,
11
+ agentContext: [
12
+ { contextId: "", contextKey: "", default: true }
13
+ ]
11
14
  },
12
15
  timeout: 600,
13
16
 
@@ -11,6 +11,8 @@ export default {
11
11
  image: "",
12
12
  camera_fixed: false,
13
13
  seed: -1,
14
+ generate_audio: false,
15
+ last_frame_image: "",
14
16
  },
15
17
 
16
18
  timeout: 60 * 30, // 30 minutes