@aj-archipelago/cortex 1.4.22 → 1.4.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/FILE_SYSTEM_DOCUMENTATION.md +116 -48
  2. package/config.js +9 -0
  3. package/lib/fileUtils.js +329 -214
  4. package/package.json +1 -1
  5. package/pathways/system/entity/files/sys_read_file_collection.js +22 -11
  6. package/pathways/system/entity/files/sys_update_file_metadata.js +18 -8
  7. package/pathways/system/entity/sys_entity_agent.js +8 -6
  8. package/pathways/system/entity/tools/sys_tool_codingagent.js +4 -4
  9. package/pathways/system/entity/tools/sys_tool_editfile.js +35 -24
  10. package/pathways/system/entity/tools/sys_tool_file_collection.js +93 -36
  11. package/pathways/system/entity/tools/sys_tool_image.js +1 -1
  12. package/pathways/system/entity/tools/sys_tool_image_gemini.js +1 -1
  13. package/pathways/system/entity/tools/sys_tool_readfile.js +4 -4
  14. package/pathways/system/entity/tools/sys_tool_slides_gemini.js +1 -1
  15. package/pathways/system/entity/tools/sys_tool_video_veo.js +1 -1
  16. package/pathways/system/entity/tools/sys_tool_view_image.js +10 -5
  17. package/pathways/system/workspaces/run_workspace_agent.js +4 -1
  18. package/pathways/video_seedance.js +2 -0
  19. package/server/executeWorkspace.js +45 -2
  20. package/server/pathwayResolver.js +18 -0
  21. package/server/plugins/replicateApiPlugin.js +18 -0
  22. package/server/typeDef.js +10 -1
  23. package/tests/integration/features/tools/fileCollection.test.js +254 -248
  24. package/tests/integration/features/tools/fileOperations.test.js +131 -81
  25. package/tests/integration/graphql/async/stream/vendors/claude_streaming.test.js +3 -4
  26. package/tests/integration/graphql/async/stream/vendors/gemini_streaming.test.js +3 -4
  27. package/tests/integration/graphql/async/stream/vendors/grok_streaming.test.js +3 -4
  28. package/tests/integration/graphql/async/stream/vendors/openai_streaming.test.js +5 -5
  29. package/tests/unit/core/fileCollection.test.js +86 -25
  30. package/pathways/system/workspaces/run_workspace_research_agent.js +0 -27
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aj-archipelago/cortex",
3
- "version": "1.4.22",
3
+ "version": "1.4.24",
4
4
  "description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
5
5
  "private": false,
6
6
  "repository": {
@@ -3,39 +3,50 @@
3
3
  // File collections are stored in Redis hash maps (FileStoreMap:ctx:<contextId>
4
4
  // Returns file collection as JSON array string for backward compatibility with Labeeb
5
5
 
6
- import { loadFileCollection } from '../../../../lib/fileUtils.js';
6
+ import { loadMergedFileCollection } from '../../../../lib/fileUtils.js';
7
7
 
8
8
  export default {
9
9
  inputParameters: {
10
- contextId: ``,
11
- contextKey: ``,
10
+ agentContext: [
11
+ { contextId: ``, contextKey: ``, default: true }
12
+ ],
12
13
  useCache: true
13
14
  },
14
15
  // No format field - returns String directly (like sys_read_memory)
15
16
  model: 'oai-gpt4o',
16
17
 
17
18
  resolver: async (_parent, args, _contextValue, _info) => {
18
- const { contextId, contextKey = null, useCache = true } = args;
19
+ let { agentContext } = args;
19
20
 
20
- // Validate that contextId is provided
21
- if (!contextId) {
21
+ // Backward compatibility: if contextId is provided without agentContext, create agentContext
22
+ if ((!agentContext || !Array.isArray(agentContext) || agentContext.length === 0) && args.contextId) {
23
+ agentContext = [{
24
+ contextId: args.contextId,
25
+ contextKey: args.contextKey || null,
26
+ default: true
27
+ }];
28
+ }
29
+
30
+ // Validate that agentContext is provided
31
+ if (!agentContext || !Array.isArray(agentContext) || agentContext.length === 0) {
22
32
  return JSON.stringify({ error: 'Context error' }, null, 2);
23
33
  }
24
34
 
25
35
  try {
26
- // Load file collection from Redis hash maps
27
- const collection = await loadFileCollection(contextId, contextKey, useCache);
36
+ // Load file collection from Redis hash maps (merged from all agentContext contexts)
37
+ const collection = await loadMergedFileCollection(agentContext);
28
38
 
29
39
  // Return as JSON array string for backward compatibility with Labeeb
30
40
  // Labeeb expects either: [] or { version: "...", files: [...] }
31
41
  // Since we removed versioning, we just return the array directly
32
- // Ensure we always return a valid JSON array (empty if no files)
33
- const result = Array.isArray(collection) ? collection : [];
42
+ // Strip internal _contextId before returning
43
+ const result = (Array.isArray(collection) ? collection : [])
44
+ .map(({ _contextId, ...file }) => file);
34
45
  return JSON.stringify(result);
35
46
  } catch (e) {
36
47
  // Log error for debugging
37
48
  const logger = (await import('../../../../lib/logger.js')).default;
38
- logger.warn(`Error loading file collection for contextId ${contextId}: ${e.message}`);
49
+ logger.warn(`Error loading file collection: ${e.message}`);
39
50
  // Return empty array on error for backward compatibility
40
51
  return "[]";
41
52
  }
@@ -2,31 +2,40 @@
2
2
  // GraphQL pathway for updating file metadata (replaces sys_save_memory for renames and metadata updates)
3
3
  // Only updates Cortex-managed fields (displayFilename, tags, notes, etc.), not CFH fields (url, gcs, hash, filename)
4
4
 
5
- import { updateFileMetadata } from '../../../../lib/fileUtils.js';
5
+ import { updateFileMetadata, getDefaultContext } from '../../../../lib/fileUtils.js';
6
6
 
7
7
  export default {
8
8
  inputParameters: {
9
- contextId: ``,
9
+ agentContext: [{ contextId: ``, contextKey: ``, default: true }],
10
10
  hash: ``,
11
11
  displayFilename: { type: 'string' }, // Optional - no default
12
12
  tags: { type: 'array', items: { type: 'string' } }, // Optional - no default
13
13
  notes: { type: 'string' }, // Optional - no default
14
14
  mimeType: { type: 'string' }, // Optional - no default
15
15
  permanent: { type: 'boolean' }, // Optional - no default
16
- inCollection: { type: 'array', items: { type: 'string' } }, // Optional - array of chat IDs, or can be boolean true/false (normalized to ['*'] or removed)
17
- contextKey: `` // Optional - context key for encryption
16
+ inCollection: { type: 'array', items: { type: 'string' } } // Optional - array of chat IDs, or can be boolean true/false (normalized to ['*'] or removed)
18
17
  },
19
18
  model: 'oai-gpt4o',
20
19
  isMutation: true, // Declaratively mark this as a Mutation
21
20
 
22
21
  resolver: async (_parent, args, _contextValue, _info) => {
23
- const { contextId, hash, displayFilename, tags, notes, mimeType, permanent, inCollection, contextKey } = args;
22
+ const { agentContext, hash, displayFilename, tags, notes, mimeType, permanent, inCollection, chatId } = args;
23
+
24
+ const defaultCtx = getDefaultContext(agentContext);
25
+ if (!defaultCtx) {
26
+ return JSON.stringify({
27
+ success: false,
28
+ error: 'agentContext with at least one default context is required'
29
+ });
30
+ }
31
+ const contextId = defaultCtx.contextId;
32
+ const contextKey = defaultCtx.contextKey || null;
24
33
 
25
34
  // Validate required parameters
26
- if (!contextId || !hash) {
35
+ if (!hash) {
27
36
  return JSON.stringify({
28
37
  success: false,
29
- error: 'contextId and hash are required'
38
+ error: 'hash is required'
30
39
  });
31
40
  }
32
41
 
@@ -50,12 +59,13 @@ export default {
50
59
  }
51
60
  // inCollection can be: boolean true/false, or array of chat IDs (e.g., ['*'] for global, ['chat-123'] for specific chat)
52
61
  // Will be normalized by updateFileMetadata: true -> ['*'], false -> undefined (removed), array -> as-is
62
+ // If not provided, will default based on chatId
53
63
  if (inCollection !== undefined && inCollection !== null) {
54
64
  metadata.inCollection = inCollection;
55
65
  }
56
66
 
57
67
  // Update metadata (only Cortex-managed fields)
58
- const success = await updateFileMetadata(contextId, hash, metadata, contextKey);
68
+ const success = await updateFileMetadata(contextId, hash, metadata, contextKey, chatId);
59
69
 
60
70
  if (success) {
61
71
  return JSON.stringify({
@@ -63,7 +63,9 @@ export default {
63
63
  inputParameters: {
64
64
  privateData: false,
65
65
  chatHistory: [{role: '', content: []}],
66
- contextId: ``,
66
+ agentContext: [
67
+ { contextId: ``, contextKey: ``, default: true }
68
+ ],
67
69
  chatId: ``,
68
70
  language: "English",
69
71
  aiName: "Jarvis",
@@ -76,8 +78,7 @@ export default {
76
78
  entityId: ``,
77
79
  researchMode: false,
78
80
  userInfo: '',
79
- model: 'oai-gpt41',
80
- contextKey: ``
81
+ model: 'oai-gpt41'
81
82
  },
82
83
  timeout: 600,
83
84
 
@@ -547,10 +548,11 @@ export default {
547
548
  args.chatHistory = args.chatHistory.slice(-20);
548
549
  }
549
550
 
550
- // Sync files from chat history to collection and strip file content
551
- // Files are accessible via tools (AnalyzeFile, ReadTextFile, etc.)
551
+ // Process files in chat history:
552
+ // - Files in collection (all agentContext contexts): stripped, accessible via tools
553
+ // - Files not in collection: left in message for model to see directly
552
554
  const { chatHistory: strippedHistory, availableFiles } = await syncAndStripFilesFromChatHistory(
553
- args.chatHistory, args.contextId, args.contextKey
555
+ args.chatHistory, args.agentContext, chatId
554
556
  );
555
557
  args.chatHistory = strippedHistory;
556
558
 
@@ -81,12 +81,12 @@ export default {
81
81
 
82
82
  executePathway: async ({args, resolver}) => {
83
83
  try {
84
- const { codingTask, userMessage, inputFiles, codingTaskKeywords, contextId, contextKey } = args;
84
+ const { codingTask, userMessage, inputFiles, codingTaskKeywords } = args;
85
85
 
86
86
  let taskSuffix = "";
87
87
  if (inputFiles) {
88
- if (!contextId) {
89
- throw new Error("contextId is required when using the 'inputFiles' parameter. Use ListFileCollection or SearchFileCollection to find available files.");
88
+ if (!args.agentContext || !Array.isArray(args.agentContext) || args.agentContext.length === 0) {
89
+ throw new Error("agentContext is required when using the 'inputFiles' parameter. Use ListFileCollection or SearchFileCollection to find available files.");
90
90
  }
91
91
 
92
92
  // Resolve file parameters to URLs
@@ -100,7 +100,7 @@ export default {
100
100
 
101
101
  for (const fileRef of fileReferences) {
102
102
  // Try to resolve each file reference
103
- const resolvedUrl = await resolveFileParameter(fileRef, contextId, contextKey);
103
+ const resolvedUrl = await resolveFileParameter(fileRef, args.agentContext);
104
104
  if (resolvedUrl) {
105
105
  resolvedUrls.push(resolvedUrl);
106
106
  } else {
@@ -2,7 +2,7 @@
2
2
  // Entity tool that modifies existing files by replacing line ranges or exact string matches
3
3
  import logger from '../../../../lib/logger.js';
4
4
  import { axios } from '../../../../lib/requestExecutor.js';
5
- import { uploadFileToCloud, findFileInCollection, loadFileCollection, getMimeTypeFromFilename, deleteFileByHash, isTextMimeType, updateFileMetadata, writeFileDataToRedis, invalidateFileCollectionCache, getActualContentMimeType } from '../../../../lib/fileUtils.js';
5
+ import { uploadFileToCloud, findFileInCollection, loadMergedFileCollection, getDefaultContext, getMimeTypeFromFilename, deleteFileByHash, isTextMimeType, updateFileMetadata, writeFileDataToRedis, invalidateFileCollectionCache, getActualContentMimeType } from '../../../../lib/fileUtils.js';
6
6
 
7
7
  // Maximum file size for editing (50MB) - prevents memory blowup on huge files
8
8
  const MAX_EDITABLE_FILE_SIZE = 50 * 1024 * 1024;
@@ -145,7 +145,19 @@ export default {
145
145
  ],
146
146
 
147
147
  executePathway: async ({args, runAllPrompts, resolver}) => {
148
- const { file, startLine, endLine, content, oldString, newString, replaceAll = false, contextId, contextKey } = args;
148
+ const { file, startLine, endLine, content, oldString, newString, replaceAll = false, agentContext, chatId } = args;
149
+
150
+ const defaultCtx = getDefaultContext(agentContext);
151
+ if (!defaultCtx) {
152
+ const errorResult = {
153
+ success: false,
154
+ error: "agentContext with at least one default context is required"
155
+ };
156
+ resolver.tool = JSON.stringify({ toolUsed: "EditFile" });
157
+ return JSON.stringify(errorResult);
158
+ }
159
+ const contextId = defaultCtx.contextId;
160
+ const contextKey = defaultCtx.contextKey || null;
149
161
 
150
162
  // Determine which tool was called based on parameters
151
163
  const isSearchReplace = oldString !== undefined && newString !== undefined;
@@ -162,14 +174,6 @@ export default {
162
174
  return JSON.stringify(errorResult);
163
175
  }
164
176
 
165
- if (!contextId) {
166
- const errorResult = {
167
- success: false,
168
- error: "contextId is required for file modification"
169
- };
170
- resolver.tool = JSON.stringify({ toolUsed: toolName });
171
- return JSON.stringify(errorResult);
172
- }
173
177
 
174
178
  // Validate that we have the right parameters for the tool being used
175
179
  if (!isSearchReplace && !isEditByLine) {
@@ -243,7 +247,7 @@ export default {
243
247
 
244
248
  try {
245
249
  // Resolve file ID first (needed for serialization)
246
- const collection = await loadFileCollection(contextId, contextKey, false);
250
+ const collection = await loadMergedFileCollection(agentContext);
247
251
  const foundFile = findFileInCollection(file, collection);
248
252
 
249
253
  if (!foundFile) {
@@ -281,7 +285,7 @@ export default {
281
285
  logger.info(`Using cached content for: ${currentFile.displayFilename || file}`);
282
286
  } else {
283
287
  // First edit in session: load collection and download file
284
- const currentCollection = await loadFileCollection(contextId, contextKey, false);
288
+ const currentCollection = await loadMergedFileCollection(agentContext);
285
289
  currentFile = findFileInCollection(file, currentCollection);
286
290
 
287
291
  if (!currentFile) {
@@ -467,7 +471,7 @@ export default {
467
471
  if (editResult._isLastOperation) {
468
472
  // Flush: upload the final content and update metadata
469
473
  const { modifiedContent, currentFile, fileIdToUpdate: initialFileId, filename, mimeType,
470
- modificationInfo, message, contextId: ctxId, contextKey: ctxKey, resolver: res,
474
+ modificationInfo, message, resolver: res,
471
475
  file: fileParam, isEditByLine: isByLine, isSearchReplace: isSR, replaceAll: repAll,
472
476
  startLine: sLine, endLine: eLine } = editResult;
473
477
 
@@ -482,7 +486,7 @@ export default {
482
486
  mimeType,
483
487
  filename,
484
488
  res,
485
- ctxId
489
+ contextId
486
490
  );
487
491
 
488
492
  if (!uploadResult || !uploadResult.url) {
@@ -490,7 +494,8 @@ export default {
490
494
  }
491
495
 
492
496
  // Update the file collection entry directly (atomic operation)
493
- const latestCollection = await loadFileCollection(ctxId, ctxKey, false);
497
+ // Use default context from agentContext for consistency
498
+ const latestCollection = await loadMergedFileCollection(agentContext);
494
499
  let fileToUpdate = latestCollection.find(f => f.id === fileIdToUpdate);
495
500
 
496
501
  // If not found by ID, try to find by the original file parameter
@@ -509,10 +514,10 @@ export default {
509
514
 
510
515
  // Write new entry with CFH data (url, gcs, hash) + Cortex metadata
511
516
  if (uploadResult.hash) {
512
- const { getRedisClient } = await import('../../../../lib/fileUtils.js');
517
+ const { getRedisClient, addChatIdToInCollection, getInCollectionValue } = await import('../../../../lib/fileUtils.js');
513
518
  const redisClient = await getRedisClient();
514
519
  if (redisClient) {
515
- const contextMapKey = `FileStoreMap:ctx:${ctxId}`;
520
+ const contextMapKey = `FileStoreMap:ctx:${contextId}`;
516
521
 
517
522
  const existingDataStr = await redisClient.hget(contextMapKey, uploadResult.hash);
518
523
  let existingData = {};
@@ -524,6 +529,12 @@ export default {
524
529
  }
525
530
  }
526
531
 
532
+ // Merge chatId into existing inCollection (reference counting)
533
+ const existingInCollection = fileToUpdate.inCollection || existingData.inCollection;
534
+ const updatedInCollection = existingInCollection
535
+ ? addChatIdToInCollection(existingInCollection, chatId)
536
+ : getInCollectionValue(chatId);
537
+
527
538
  const fileData = {
528
539
  ...existingData,
529
540
  url: uploadResult.url,
@@ -535,27 +546,27 @@ export default {
535
546
  tags: fileToUpdate.tags || [],
536
547
  notes: fileToUpdate.notes || '',
537
548
  mimeType: fileToUpdate.mimeType || mimeType || null,
538
- inCollection: ['*'],
549
+ inCollection: updatedInCollection,
539
550
  addedDate: fileToUpdate.addedDate,
540
551
  lastAccessed: new Date().toISOString(),
541
552
  permanent: fileToUpdate.permanent || false
542
553
  };
543
554
 
544
- await writeFileDataToRedis(redisClient, contextMapKey, uploadResult.hash, fileData, ctxKey);
555
+ await writeFileDataToRedis(redisClient, contextMapKey, uploadResult.hash, fileData, contextKey);
545
556
 
546
557
  if (oldHashToDelete && oldHashToDelete !== uploadResult.hash) {
547
558
  await redisClient.hdel(contextMapKey, oldHashToDelete);
548
559
  }
549
560
 
550
- invalidateFileCollectionCache(ctxId, ctxKey);
561
+ invalidateFileCollectionCache(contextId, contextKey);
551
562
  }
552
563
  } else if (fileToUpdate.hash) {
553
- await updateFileMetadata(ctxId, fileToUpdate.hash, {
564
+ await updateFileMetadata(contextId, fileToUpdate.hash, {
554
565
  filename: filename,
555
566
  lastAccessed: new Date().toISOString()
556
- }, ctxKey);
567
+ }, contextKey);
557
568
 
558
- invalidateFileCollectionCache(ctxId, ctxKey);
569
+ invalidateFileCollectionCache(contextId, contextKey);
559
570
  }
560
571
 
561
572
  // Delete old file version (fire-and-forget)
@@ -563,7 +574,7 @@ export default {
563
574
  (async () => {
564
575
  try {
565
576
  logger.info(`Deleting old file version with hash ${oldHashToDelete} (background task)`);
566
- await deleteFileByHash(oldHashToDelete, res, ctxId);
577
+ await deleteFileByHash(oldHashToDelete, res, contextId);
567
578
  } catch (cleanupError) {
568
579
  logger.warn(`Failed to cleanup old file version: ${cleanupError.message}`);
569
580
  }
@@ -3,7 +3,7 @@
3
3
  // Uses Redis hash maps (FileStoreMap:ctx:<contextId>) for storage
4
4
  // Supports atomic rename/tag/notes updates via UpdateFileMetadata
5
5
  import logger from '../../../../lib/logger.js';
6
- import { addFileToCollection, loadFileCollection, findFileInCollection, deleteFileByHash, updateFileMetadata, invalidateFileCollectionCache } from '../../../../lib/fileUtils.js';
6
+ import { addFileToCollection, loadFileCollection, loadMergedFileCollection, findFileInCollection, deleteFileByHash, updateFileMetadata, invalidateFileCollectionCache, getDefaultContext } from '../../../../lib/fileUtils.js';
7
7
 
8
8
  export default {
9
9
  prompt: [],
@@ -198,7 +198,13 @@ export default {
198
198
  ],
199
199
 
200
200
  executePathway: async ({args, runAllPrompts, resolver}) => {
201
- const { contextId, contextKey } = args;
201
+ const defaultCtx = getDefaultContext(args.agentContext);
202
+ if (!defaultCtx) {
203
+ throw new Error("agentContext with at least one default context is required");
204
+ }
205
+ const contextId = defaultCtx.contextId;
206
+ const contextKey = defaultCtx.contextKey || null;
207
+ const chatId = args.chatId || null;
202
208
 
203
209
  // Determine which function was called based on which parameters are present
204
210
  // Order matters: check most specific operations first
@@ -288,7 +294,7 @@ export default {
288
294
  metadataUpdate.lastAccessed = new Date().toISOString();
289
295
 
290
296
  // Perform the atomic update
291
- const success = await updateFileMetadata(contextId, foundFile.hash, metadataUpdate, contextKey);
297
+ const success = await updateFileMetadata(contextId, foundFile.hash, metadataUpdate, contextKey, chatId);
292
298
 
293
299
  if (!success) {
294
300
  throw new Error(`Failed to update file metadata for "${file}"`);
@@ -337,7 +343,8 @@ export default {
337
343
  hash,
338
344
  fileUrl,
339
345
  resolver,
340
- permanent
346
+ permanent,
347
+ chatId
341
348
  );
342
349
 
343
350
  resolver.tool = JSON.stringify({ toolUsed: "AddFileToCollection" });
@@ -359,12 +366,12 @@ export default {
359
366
  const safeFilterTags = Array.isArray(filterTags) ? filterTags : [];
360
367
  const queryLower = query.toLowerCase();
361
368
 
362
- // Update lastAccessed for matching files directly (atomic operations)
363
- const allFiles = await loadFileCollection(contextId, contextKey, false);
369
+ // Load primary collection for lastAccessed updates (only update files in primary context)
370
+ const primaryFiles = await loadFileCollection(contextId, contextKey, false);
364
371
  const now = new Date().toISOString();
365
372
 
366
- // Find matching files and update lastAccessed directly
367
- for (const file of allFiles) {
373
+ // Find matching files in primary collection and update lastAccessed directly
374
+ for (const file of primaryFiles) {
368
375
  if (!file.hash) continue;
369
376
 
370
377
  // Fallback to filename if displayFilename is not set (for files uploaded before displayFilename was added)
@@ -381,14 +388,15 @@ export default {
381
388
 
382
389
  if (matchesQuery && matchesTags) {
383
390
  // Update lastAccessed directly (atomic operation)
391
+ // Don't pass chatId - we're only updating access time, not changing inCollection
384
392
  await updateFileMetadata(contextId, file.hash, {
385
393
  lastAccessed: now
386
394
  }, contextKey);
387
395
  }
388
396
  }
389
397
 
390
- // Reload collection to get results (after update)
391
- const updatedFiles = await loadFileCollection(contextId, contextKey, false);
398
+ // Load merged collection for search results (includes all agentContext files)
399
+ const updatedFiles = await loadMergedFileCollection(args.agentContext);
392
400
 
393
401
  // Filter and sort results (for display only, not modifying)
394
402
  let results = updatedFiles.filter(file => {
@@ -444,7 +452,8 @@ export default {
444
452
  });
445
453
 
446
454
  } else if (isRemove) {
447
- // Remove file(s) from collection and delete from cloud storage
455
+ // Remove file(s) from this chat's collection (reference counting)
456
+ // Only delete from cloud if no other chats reference the file
448
457
  const { fileIds, fileId } = args;
449
458
 
450
459
  // Normalize input to array
@@ -460,9 +469,9 @@ export default {
460
469
  }
461
470
 
462
471
  let notFoundFiles = [];
463
- let filesToRemove = [];
472
+ let filesToProcess = [];
464
473
 
465
- // Load collection ONCE to find all files and their hashes
474
+ // Load collection ONCE to find all files and their data
466
475
  // Use useCache: false to get fresh data
467
476
  const collection = await loadFileCollection(contextId, contextKey, false);
468
477
 
@@ -474,12 +483,13 @@ export default {
474
483
 
475
484
  if (foundFile) {
476
485
  // Avoid duplicates (by hash since that's the unique key in Redis)
477
- if (!filesToRemove.some(f => f.hash === foundFile.hash)) {
478
- filesToRemove.push({
486
+ if (!filesToProcess.some(f => f.hash === foundFile.hash)) {
487
+ filesToProcess.push({
479
488
  id: foundFile.id,
480
489
  displayFilename: foundFile.displayFilename || foundFile.filename || null,
481
490
  hash: foundFile.hash || null,
482
- permanent: foundFile.permanent ?? false
491
+ permanent: foundFile.permanent ?? false,
492
+ inCollection: foundFile.inCollection || []
483
493
  });
484
494
  }
485
495
  } else {
@@ -487,34 +497,76 @@ export default {
487
497
  }
488
498
  }
489
499
 
490
- if (filesToRemove.length === 0 && notFoundFiles.length > 0) {
500
+ if (filesToProcess.length === 0 && notFoundFiles.length > 0) {
491
501
  throw new Error(`No files found matching: ${notFoundFiles.join(', ')}`);
492
502
  }
493
503
 
494
- // Use the hashes collected from the single collection load
495
- // No need to reload - we already have all the info we need
496
- const hashesToDelete = filesToRemove.filter(f => f.hash);
497
-
498
- // Delete entries directly from hash map (atomic operations)
499
- const { getRedisClient } = await import('../../../../lib/fileUtils.js');
504
+ // Import helpers for reference counting
505
+ const { getRedisClient, removeChatIdFromInCollection } = await import('../../../../lib/fileUtils.js');
500
506
  const redisClient = await getRedisClient();
507
+ const contextMapKey = `FileStoreMap:ctx:${contextId}`;
508
+
509
+ // Track files that will be fully deleted vs just updated
510
+ const filesToFullyDelete = [];
511
+ const filesToUpdate = [];
512
+
513
+ for (const fileInfo of filesToProcess) {
514
+ if (!fileInfo.hash) continue;
515
+
516
+ // Check if file is global ('*') - global files can't be removed per-chat
517
+ const isGlobal = Array.isArray(fileInfo.inCollection) && fileInfo.inCollection.includes('*');
518
+
519
+ if (isGlobal) {
520
+ // Global file - fully remove it (no reference counting for global files)
521
+ filesToFullyDelete.push(fileInfo);
522
+ } else if (!chatId) {
523
+ // No chatId context - fully remove
524
+ filesToFullyDelete.push(fileInfo);
525
+ } else {
526
+ // Remove this chatId from inCollection
527
+ const updatedInCollection = removeChatIdFromInCollection(fileInfo.inCollection, chatId);
528
+
529
+ if (updatedInCollection.length === 0) {
530
+ // No more references - fully delete
531
+ filesToFullyDelete.push(fileInfo);
532
+ } else {
533
+ // Still has references from other chats - just update inCollection
534
+ filesToUpdate.push({ ...fileInfo, updatedInCollection });
535
+ }
536
+ }
537
+ }
538
+
539
+ // Update files that still have references (remove this chatId only)
540
+ for (const fileInfo of filesToUpdate) {
541
+ if (redisClient) {
542
+ try {
543
+ const existingDataStr = await redisClient.hget(contextMapKey, fileInfo.hash);
544
+ if (existingDataStr) {
545
+ const existingData = JSON.parse(existingDataStr);
546
+ existingData.inCollection = fileInfo.updatedInCollection;
547
+ await redisClient.hset(contextMapKey, fileInfo.hash, JSON.stringify(existingData));
548
+ logger.info(`Removed chatId ${chatId} from file: ${fileInfo.displayFilename} (still referenced by: ${fileInfo.updatedInCollection.join(', ')})`);
549
+ }
550
+ } catch (e) {
551
+ logger.warn(`Failed to update inCollection for file ${fileInfo.hash}: ${e.message}`);
552
+ }
553
+ }
554
+ }
555
+
556
+ // Fully delete files with no remaining references
501
557
  if (redisClient) {
502
- const contextMapKey = `FileStoreMap:ctx:${contextId}`;
503
- for (const fileInfo of hashesToDelete) {
558
+ for (const fileInfo of filesToFullyDelete) {
504
559
  await redisClient.hdel(contextMapKey, fileInfo.hash);
505
560
  }
506
561
  }
507
562
 
508
- // Always invalidate cache immediately so list operations reflect removals
509
- // (even if Redis operations failed, cache might be stale)
563
+ // Always invalidate cache immediately so list operations reflect changes
510
564
  invalidateFileCollectionCache(contextId, contextKey);
511
565
 
512
- // Delete files from cloud storage ASYNC (fire and forget, but log errors)
513
- // We do this after updating collection so user gets fast response and files are "gone" from UI immediately
514
- // Use hashes captured inside the lock to ensure we delete the correct files
566
+ // Delete files from cloud storage ASYNC (only for files with no remaining references)
515
567
  // IMPORTANT: Don't delete permanent files from cloud storage - they should persist
516
568
  (async () => {
517
- for (const fileInfo of hashesToDelete) {
569
+ for (const fileInfo of filesToFullyDelete) {
518
570
  // Skip deletion if file is marked as permanent
519
571
  if (fileInfo.permanent) {
520
572
  logger.info(`Skipping cloud deletion for permanent file: ${fileInfo.displayFilename} (hash: ${fileInfo.hash})`);
@@ -522,7 +574,7 @@ export default {
522
574
  }
523
575
 
524
576
  try {
525
- logger.info(`Deleting file from cloud storage: ${fileInfo.displayFilename} (hash: ${fileInfo.hash})`);
577
+ logger.info(`Deleting file from cloud storage (no remaining references): ${fileInfo.displayFilename} (hash: ${fileInfo.hash})`);
526
578
  await deleteFileByHash(fileInfo.hash, resolver, contextId);
527
579
  } catch (error) {
528
580
  logger.warn(`Failed to delete file ${fileInfo.displayFilename} (hash: ${fileInfo.hash}) from cloud storage: ${error?.message || String(error)}`);
@@ -530,8 +582,13 @@ export default {
530
582
  }
531
583
  })().catch(err => logger.error(`Async cloud deletion error: ${err}`));
532
584
 
533
- const removedCount = filesToRemove.length;
534
- const removedFiles = filesToRemove;
585
+ const removedCount = filesToProcess.length;
586
+ const removedFiles = filesToProcess.map(f => ({
587
+ id: f.id,
588
+ displayFilename: f.displayFilename,
589
+ hash: f.hash,
590
+ fullyDeleted: filesToFullyDelete.some(fd => fd.hash === f.hash)
591
+ }));
535
592
 
536
593
  // Get remaining files count after deletion
537
594
  const remainingCollection = await loadFileCollection(contextId, contextKey, false);
@@ -560,8 +617,8 @@ export default {
560
617
  // List collection (read-only, no locking needed)
561
618
  const { tags: filterTags = [], sortBy = 'date', limit = 50 } = args;
562
619
 
563
- // Use useCache: false to ensure we get the latest file data (important after edits)
564
- const collection = await loadFileCollection(contextId, contextKey, false);
620
+ // Use merged collection to include files from all agentContext contexts
621
+ const collection = await loadMergedFileCollection(args.agentContext);
565
622
  let results = collection;
566
623
 
567
624
  // Filter by tags if provided
@@ -113,7 +113,7 @@ export default {
113
113
 
114
114
  for (let i = 0; i < imagesToProcess.length; i++) {
115
115
  const imageRef = imagesToProcess[i];
116
- const resolved = await resolveFileParameter(imageRef, args.contextId, args.contextKey);
116
+ const resolved = await resolveFileParameter(imageRef, args.agentContext);
117
117
  if (!resolved) {
118
118
  throw new Error(`File not found: "${imageRef}". Use ListFileCollection or SearchFileCollection to find available files.`);
119
119
  }
@@ -109,7 +109,7 @@ export default {
109
109
 
110
110
  for (let i = 0; i < imagesToProcess.length; i++) {
111
111
  const imageRef = imagesToProcess[i];
112
- const resolved = await resolveFileParameter(imageRef, args.contextId, args.contextKey, { preferGcs: true });
112
+ const resolved = await resolveFileParameter(imageRef, args.agentContext, { preferGcs: true });
113
113
  if (!resolved) {
114
114
  throw new Error(`File not found: "${imageRef}". Use ListFileCollection or SearchFileCollection to find available files.`);
115
115
  }
@@ -135,20 +135,20 @@ export default {
135
135
  const MAX_CHARS = 100000;
136
136
  const MAX_LINES = 1000;
137
137
 
138
- let { cloudUrl, file, startChar, endChar, startLine, endLine, contextId, contextKey } = args;
138
+ let { cloudUrl, file, startChar, endChar, startLine, endLine } = args;
139
139
 
140
140
  // If file parameter is provided, resolve it to a URL using the common utility
141
141
  if (file) {
142
- if (!contextId) {
142
+ if (!args.agentContext || !Array.isArray(args.agentContext) || args.agentContext.length === 0) {
143
143
  const errorResult = {
144
144
  success: false,
145
- error: "contextId is required when using the 'file' parameter. Use ListFileCollection or SearchFileCollection to find available files."
145
+ error: "agentContext is required when using the 'file' parameter. Use ListFileCollection or SearchFileCollection to find available files."
146
146
  };
147
147
  resolver.tool = JSON.stringify({ toolUsed: "ReadFile" });
148
148
  return JSON.stringify(errorResult);
149
149
  }
150
150
  // Use useCache: false to ensure we get the latest file data (important after edits)
151
- const resolvedUrl = await resolveFileParameter(file, contextId, contextKey, { useCache: false });
151
+ const resolvedUrl = await resolveFileParameter(file, args.agentContext, { useCache: false });
152
152
  if (!resolvedUrl) {
153
153
  const errorResult = {
154
154
  success: false,