@aj-archipelago/cortex 1.4.22 → 1.4.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/FILE_SYSTEM_DOCUMENTATION.md +116 -48
  2. package/config.js +9 -0
  3. package/lib/fileUtils.js +329 -214
  4. package/package.json +1 -1
  5. package/pathways/system/entity/files/sys_read_file_collection.js +22 -11
  6. package/pathways/system/entity/files/sys_update_file_metadata.js +18 -8
  7. package/pathways/system/entity/sys_entity_agent.js +8 -6
  8. package/pathways/system/entity/tools/sys_tool_codingagent.js +4 -4
  9. package/pathways/system/entity/tools/sys_tool_editfile.js +35 -24
  10. package/pathways/system/entity/tools/sys_tool_file_collection.js +93 -36
  11. package/pathways/system/entity/tools/sys_tool_image.js +1 -1
  12. package/pathways/system/entity/tools/sys_tool_image_gemini.js +1 -1
  13. package/pathways/system/entity/tools/sys_tool_readfile.js +4 -4
  14. package/pathways/system/entity/tools/sys_tool_slides_gemini.js +1 -1
  15. package/pathways/system/entity/tools/sys_tool_video_veo.js +1 -1
  16. package/pathways/system/entity/tools/sys_tool_view_image.js +10 -5
  17. package/pathways/system/workspaces/run_workspace_agent.js +4 -1
  18. package/pathways/video_seedance.js +2 -0
  19. package/server/executeWorkspace.js +45 -2
  20. package/server/pathwayResolver.js +18 -0
  21. package/server/plugins/replicateApiPlugin.js +18 -0
  22. package/server/typeDef.js +10 -1
  23. package/tests/integration/features/tools/fileCollection.test.js +254 -248
  24. package/tests/integration/features/tools/fileOperations.test.js +131 -81
  25. package/tests/integration/graphql/async/stream/vendors/claude_streaming.test.js +3 -4
  26. package/tests/integration/graphql/async/stream/vendors/gemini_streaming.test.js +3 -4
  27. package/tests/integration/graphql/async/stream/vendors/grok_streaming.test.js +3 -4
  28. package/tests/integration/graphql/async/stream/vendors/openai_streaming.test.js +5 -5
  29. package/tests/unit/core/fileCollection.test.js +86 -25
  30. package/pathways/system/workspaces/run_workspace_research_agent.js +0 -27
@@ -80,7 +80,7 @@ export default {
80
80
 
81
81
  for (let i = 0; i < imagesToProcess.length; i++) {
82
82
  const imageRef = imagesToProcess[i];
83
- const resolved = await resolveFileParameter(imageRef, args.contextId, args.contextKey, { preferGcs: true });
83
+ const resolved = await resolveFileParameter(imageRef, args.agentContext, { preferGcs: true });
84
84
  if (!resolved) {
85
85
  throw new Error(`File not found: "${imageRef}". Use ListFileCollection or SearchFileCollection to find available files.`);
86
86
  }
@@ -128,7 +128,7 @@ export default {
128
128
  throw new Error("contextId is required when using the 'inputImage' parameter. Use ListFileCollection or SearchFileCollection to find available files.");
129
129
  }
130
130
 
131
- const resolved = await resolveFileParameter(args.inputImage, args.contextId, args.contextKey, { preferGcs: true });
131
+ const resolved = await resolveFileParameter(args.inputImage, args.agentContext, { preferGcs: true });
132
132
  if (!resolved) {
133
133
  throw new Error(`File not found: "${args.inputImage}". Use ListFileCollection or SearchFileCollection to find available files.`);
134
134
  }
@@ -1,7 +1,7 @@
1
1
  // sys_tool_view_image.js
2
2
  // Tool pathway that allows agents to view image files from the file collection
3
3
  import logger from '../../../../lib/logger.js';
4
- import { loadFileCollection, findFileInCollection, ensureShortLivedUrl } from '../../../../lib/fileUtils.js';
4
+ import { loadMergedFileCollection, findFileInCollection, ensureShortLivedUrl, getDefaultContext } from '../../../../lib/fileUtils.js';
5
5
  import { config } from '../../../../config.js';
6
6
 
7
7
  export default {
@@ -34,15 +34,19 @@ export default {
34
34
  },
35
35
 
36
36
  executePathway: async ({args, runAllPrompts, resolver}) => {
37
- const { files, contextId, contextKey } = args;
37
+ const { files } = args;
38
38
 
39
39
  if (!files || !Array.isArray(files) || files.length === 0) {
40
40
  throw new Error("Files parameter is required and must be a non-empty array");
41
41
  }
42
42
 
43
+ if (!args.agentContext || !Array.isArray(args.agentContext) || args.agentContext.length === 0) {
44
+ throw new Error("agentContext is required");
45
+ }
46
+
43
47
  try {
44
- // Load the file collection
45
- const collection = await loadFileCollection(contextId, contextKey, true);
48
+ // Load the file collection (merged from all agentContext contexts)
49
+ const collection = await loadMergedFileCollection(args.agentContext);
46
50
 
47
51
  const imageUrls = [];
48
52
  const errors = [];
@@ -70,7 +74,8 @@ export default {
70
74
 
71
75
  // Resolve to short-lived URL if possible
72
76
  const fileHandlerUrl = config.get('whisperMediaApiUrl');
73
- const fileWithShortLivedUrl = await ensureShortLivedUrl(foundFile, fileHandlerUrl, contextId);
77
+ const defaultCtx = getDefaultContext(args.agentContext);
78
+ const fileWithShortLivedUrl = await ensureShortLivedUrl(foundFile, fileHandlerUrl, defaultCtx?.contextId || null);
74
79
 
75
80
  // Add to imageUrls array
76
81
  imageUrls.push({
@@ -6,8 +6,11 @@ export default {
6
6
 
7
7
  inputParameters: {
8
8
  model: "oai-gpt41",
9
- aiStyle: "OpenAI",
10
9
  chatHistory: [{role: '', content: []}],
10
+ researchMode: false,
11
+ agentContext: [
12
+ { contextId: "", contextKey: "", default: true }
13
+ ]
11
14
  },
12
15
  timeout: 600,
13
16
 
@@ -11,6 +11,8 @@ export default {
11
11
  image: "",
12
12
  camera_fixed: false,
13
13
  seed: -1,
14
+ generate_audio: false,
15
+ last_frame_image: "",
14
16
  },
15
17
 
16
18
  timeout: 60 * 30, // 30 minutes
@@ -87,14 +87,57 @@ const executePathwayWithFallback = async (pathway, pathwayArgs, contextValue, in
87
87
  if (cortexPathwayName) {
88
88
  // Use the specific cortex pathway
89
89
  // Transform parameters for cortex pathway
90
- // Spread all pathway args first (including contextId, contextKey, etc.), then override specific fields
90
+ // Spread all pathway args first, then override specific fields
91
91
  const cortexArgs = {
92
- ...pathwayArgs, // Spread all pathway args (including contextId, contextKey, etc.)
92
+ ...pathwayArgs, // Spread all pathway args
93
93
  model: pathway.model || pathwayArgs.model || "labeeb-agent", // Use pathway model or default
94
94
  chatHistory: pathwayArgs.chatHistory ? JSON.parse(JSON.stringify(pathwayArgs.chatHistory)) : [],
95
95
  systemPrompt: pathway.systemPrompt || pathwayArgs.systemPrompt
96
96
  };
97
97
 
98
+ // Transform old parameters to new format for run_workspace_agent
99
+ if (cortexPathwayName === 'run_workspace_agent') {
100
+ // Remove old aiStyle parameter (no longer used)
101
+ delete cortexArgs.aiStyle;
102
+
103
+ // Transform context parameters to agentContext array format (only if agentContext not already provided)
104
+ if (!cortexArgs.agentContext && (cortexArgs.contextId || cortexArgs.contextKey || cortexArgs.altContextId || cortexArgs.altContextKey)) {
105
+ const agentContext = [];
106
+
107
+ // Add primary context if present
108
+ if (cortexArgs.contextId) {
109
+ agentContext.push({
110
+ contextId: cortexArgs.contextId,
111
+ contextKey: cortexArgs.contextKey || null,
112
+ default: true
113
+ });
114
+ }
115
+
116
+ // Add alternate context if present
117
+ if (cortexArgs.altContextId) {
118
+ agentContext.push({
119
+ contextId: cortexArgs.altContextId,
120
+ contextKey: cortexArgs.altContextKey || null,
121
+ default: false
122
+ });
123
+ }
124
+
125
+ // If we have at least one context, set agentContext and remove old params
126
+ if (agentContext.length > 0) {
127
+ cortexArgs.agentContext = agentContext;
128
+ delete cortexArgs.contextId;
129
+ delete cortexArgs.contextKey;
130
+ delete cortexArgs.altContextId;
131
+ delete cortexArgs.altContextKey;
132
+ }
133
+ }
134
+
135
+ // Ensure researchMode defaults to false if not provided
136
+ if (cortexArgs.researchMode === undefined) {
137
+ cortexArgs.researchMode = false;
138
+ }
139
+ }
140
+
98
141
  // If we have text parameter, we need to add it to the chatHistory
99
142
  if (pathwayArgs.text) {
100
143
  // Find the last user message or create a new one
@@ -367,6 +367,24 @@ class PathwayResolver {
367
367
  }
368
368
 
369
369
  async executePathway(args) {
370
+ // Bidirectional context transformation for backward compatibility:
371
+ // 1. If agentContext provided: extract contextId/contextKey for legacy pathways
372
+ // 2. If contextId provided without agentContext: create agentContext for new pathways
373
+ if (args.agentContext && Array.isArray(args.agentContext) && args.agentContext.length > 0) {
374
+ const defaultCtx = args.agentContext.find(ctx => ctx.default) || args.agentContext[0];
375
+ if (defaultCtx) {
376
+ args.contextId = defaultCtx.contextId;
377
+ args.contextKey = defaultCtx.contextKey || null;
378
+ }
379
+ } else if (args.contextId && !args.agentContext) {
380
+ // Backward compat: create agentContext from legacy contextId/contextKey
381
+ args.agentContext = [{
382
+ contextId: args.contextId,
383
+ contextKey: args.contextKey || null,
384
+ default: true
385
+ }];
386
+ }
387
+
370
388
  if (this.pathway.executePathway && typeof this.pathway.executePathway === 'function') {
371
389
  return await this.pathway.executePathway({ args, runAllPrompts: this.promptAndParse.bind(this), resolver: this });
372
390
  }
@@ -321,6 +321,24 @@ class ReplicateApiPlugin extends ModelPlugin {
321
321
  };
322
322
  break;
323
323
  }
324
+ case "replicate-seedance-1.5-pro": {
325
+ const validRatios = ["16:9", "4:3", "1:1", "3:4", "9:16", "21:9", "9:21"];
326
+
327
+ requestParameters = {
328
+ input: {
329
+ prompt: modelPromptText,
330
+ aspect_ratio: validRatios.includes(combinedParameters.aspectRatio) ? combinedParameters.aspectRatio : "16:9",
331
+ duration: Math.min(12, Math.max(2, combinedParameters.duration || 5)),
332
+ fps: 24,
333
+ camera_fixed: combinedParameters.camera_fixed || false,
334
+ generate_audio: combinedParameters.generate_audio || false,
335
+ ...(combinedParameters.seed && Number.isInteger(combinedParameters.seed) && combinedParameters.seed > 0 ? { seed: combinedParameters.seed } : {}),
336
+ ...(combinedParameters.image ? { image: combinedParameters.image } : {}),
337
+ ...(combinedParameters.image && combinedParameters.last_frame_image ? { last_frame_image: combinedParameters.last_frame_image } : {}),
338
+ },
339
+ };
340
+ break;
341
+ }
324
342
  case "replicate-seedream-4": {
325
343
  const validSizes = ["1K", "2K", "4K", "custom"];
326
344
  const validRatios = ["1:1", "4:3", "3:4", "16:9", "9:16", "match_input_image"];
package/server/typeDef.js CHANGED
@@ -58,6 +58,10 @@ const getGraphQlType = (value) => {
58
58
  const items = schema.items || {};
59
59
  const def = schema.default;
60
60
  const defaultArray = Array.isArray(def) ? JSON.stringify(def) : '[]';
61
+ // Support explicit object type name (e.g., items: { objType: 'AgentContextInput' })
62
+ if (items.objType) {
63
+ return { type: `[${items.objType}]`, defaultValue: `"${defaultArray.replace(/"/g, '\\"')}"` };
64
+ }
61
65
  if (items.type === 'string') {
62
66
  return { type: '[String]', defaultValue: defaultArray };
63
67
  }
@@ -103,6 +107,10 @@ const getGraphQlType = (value) => {
103
107
  if (Array.isArray(value[0]?.content)) {
104
108
  return {type: '[MultiMessage]', defaultValue: `"${JSON.stringify(value).replace(/"/g, '\\"')}"`};
105
109
  }
110
+ // Check if it's AgentContextInput (has contextId and default properties)
111
+ else if (value[0] && typeof value[0] === 'object' && 'contextId' in value[0] && 'default' in value[0]) {
112
+ return {type: '[AgentContextInput]', defaultValue: `"${JSON.stringify(value).replace(/"/g, '\\"')}"`};
113
+ }
106
114
  else {
107
115
  return {type: '[Message]', defaultValue: `"${JSON.stringify(value).replace(/"/g, '\\"')}"`};
108
116
  }
@@ -123,8 +131,9 @@ const getGraphQlType = (value) => {
123
131
  const getMessageTypeDefs = () => {
124
132
  const messageType = `input Message { role: String, content: String, name: String }`;
125
133
  const multiMessageType = `input MultiMessage { role: String, content: [String], name: String, tool_calls: [String], tool_call_id: String }`;
134
+ const agentContextType = `input AgentContextInput { contextId: String, contextKey: String, default: Boolean }`;
126
135
 
127
- return `${messageType}\n\n${multiMessageType}`;
136
+ return `${messageType}\n\n${multiMessageType}\n\n${agentContextType}`;
128
137
  };
129
138
 
130
139
  const getPathwayTypeDef = (name, returnType) => {