@aj-archipelago/cortex 1.4.21 → 1.4.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/FILE_SYSTEM_DOCUMENTATION.md +116 -48
  2. package/config.js +27 -0
  3. package/lib/fileUtils.js +226 -201
  4. package/lib/requestExecutor.js +3 -2
  5. package/lib/util.js +71 -1
  6. package/package.json +1 -1
  7. package/pathways/image_flux.js +8 -2
  8. package/pathways/image_qwen.js +1 -1
  9. package/pathways/system/entity/files/sys_read_file_collection.js +13 -11
  10. package/pathways/system/entity/files/sys_update_file_metadata.js +16 -7
  11. package/pathways/system/entity/sys_entity_agent.js +8 -6
  12. package/pathways/system/entity/tools/sys_tool_codingagent.js +4 -4
  13. package/pathways/system/entity/tools/sys_tool_editfile.js +27 -22
  14. package/pathways/system/entity/tools/sys_tool_file_collection.js +15 -10
  15. package/pathways/system/entity/tools/sys_tool_image.js +5 -5
  16. package/pathways/system/entity/tools/sys_tool_image_gemini.js +1 -1
  17. package/pathways/system/entity/tools/sys_tool_readfile.js +4 -4
  18. package/pathways/system/entity/tools/sys_tool_slides_gemini.js +1 -1
  19. package/pathways/system/entity/tools/sys_tool_video_veo.js +1 -1
  20. package/pathways/system/entity/tools/sys_tool_view_image.js +10 -5
  21. package/pathways/system/workspaces/run_workspace_agent.js +4 -1
  22. package/pathways/video_seedance.js +2 -0
  23. package/server/executeWorkspace.js +45 -2
  24. package/server/pathwayResolver.js +18 -0
  25. package/server/plugins/claude3VertexPlugin.js +2 -6
  26. package/server/plugins/claude4VertexPlugin.js +5 -10
  27. package/server/plugins/gemini3ReasoningVisionPlugin.js +0 -2
  28. package/server/plugins/grokResponsesPlugin.js +3 -19
  29. package/server/plugins/grokVisionPlugin.js +3 -18
  30. package/server/plugins/modelPlugin.js +3 -0
  31. package/server/plugins/openAiVisionPlugin.js +3 -18
  32. package/server/plugins/replicateApiPlugin.js +182 -101
  33. package/server/resolver.js +32 -3
  34. package/server/typeDef.js +10 -1
  35. package/test.log +39427 -0
  36. package/tests/integration/features/tools/fileCollection.test.js +254 -248
  37. package/tests/integration/features/tools/fileOperations.test.js +131 -81
  38. package/tests/integration/graphql/async/stream/agentic.test.js +1 -1
  39. package/tests/integration/graphql/async/stream/vendors/claude_streaming.test.js +3 -4
  40. package/tests/integration/graphql/async/stream/vendors/gemini_streaming.test.js +3 -4
  41. package/tests/integration/graphql/async/stream/vendors/grok_streaming.test.js +3 -4
  42. package/tests/integration/graphql/async/stream/vendors/openai_streaming.test.js +5 -5
  43. package/tests/unit/core/fileCollection.test.js +86 -25
  44. package/pathways/system/workspaces/run_workspace_research_agent.js +0 -27
@@ -87,14 +87,57 @@ const executePathwayWithFallback = async (pathway, pathwayArgs, contextValue, in
87
87
  if (cortexPathwayName) {
88
88
  // Use the specific cortex pathway
89
89
  // Transform parameters for cortex pathway
90
- // Spread all pathway args first (including contextId, contextKey, etc.), then override specific fields
90
+ // Spread all pathway args first, then override specific fields
91
91
  const cortexArgs = {
92
- ...pathwayArgs, // Spread all pathway args (including contextId, contextKey, etc.)
92
+ ...pathwayArgs, // Spread all pathway args
93
93
  model: pathway.model || pathwayArgs.model || "labeeb-agent", // Use pathway model or default
94
94
  chatHistory: pathwayArgs.chatHistory ? JSON.parse(JSON.stringify(pathwayArgs.chatHistory)) : [],
95
95
  systemPrompt: pathway.systemPrompt || pathwayArgs.systemPrompt
96
96
  };
97
97
 
98
+ // Transform old parameters to new format for run_workspace_agent
99
+ if (cortexPathwayName === 'run_workspace_agent') {
100
+ // Remove old aiStyle parameter (no longer used)
101
+ delete cortexArgs.aiStyle;
102
+
103
+ // Transform context parameters to agentContext array format (only if agentContext not already provided)
104
+ if (!cortexArgs.agentContext && (cortexArgs.contextId || cortexArgs.contextKey || cortexArgs.altContextId || cortexArgs.altContextKey)) {
105
+ const agentContext = [];
106
+
107
+ // Add primary context if present
108
+ if (cortexArgs.contextId) {
109
+ agentContext.push({
110
+ contextId: cortexArgs.contextId,
111
+ contextKey: cortexArgs.contextKey || null,
112
+ default: true
113
+ });
114
+ }
115
+
116
+ // Add alternate context if present
117
+ if (cortexArgs.altContextId) {
118
+ agentContext.push({
119
+ contextId: cortexArgs.altContextId,
120
+ contextKey: cortexArgs.altContextKey || null,
121
+ default: false
122
+ });
123
+ }
124
+
125
+ // If we have at least one context, set agentContext and remove old params
126
+ if (agentContext.length > 0) {
127
+ cortexArgs.agentContext = agentContext;
128
+ delete cortexArgs.contextId;
129
+ delete cortexArgs.contextKey;
130
+ delete cortexArgs.altContextId;
131
+ delete cortexArgs.altContextKey;
132
+ }
133
+ }
134
+
135
+ // Ensure researchMode defaults to false if not provided
136
+ if (cortexArgs.researchMode === undefined) {
137
+ cortexArgs.researchMode = false;
138
+ }
139
+ }
140
+
98
141
  // If we have text parameter, we need to add it to the chatHistory
99
142
  if (pathwayArgs.text) {
100
143
  // Find the last user message or create a new one
@@ -367,6 +367,24 @@ class PathwayResolver {
367
367
  }
368
368
 
369
369
  async executePathway(args) {
370
+ // Bidirectional context transformation for backward compatibility:
371
+ // 1. If agentContext provided: extract contextId/contextKey for legacy pathways
372
+ // 2. If contextId provided without agentContext: create agentContext for new pathways
373
+ if (args.agentContext && Array.isArray(args.agentContext) && args.agentContext.length > 0) {
374
+ const defaultCtx = args.agentContext.find(ctx => ctx.default) || args.agentContext[0];
375
+ if (defaultCtx) {
376
+ args.contextId = defaultCtx.contextId;
377
+ args.contextKey = defaultCtx.contextKey || null;
378
+ }
379
+ } else if (args.contextId && !args.agentContext) {
380
+ // Backward compat: create agentContext from legacy contextId/contextKey
381
+ args.agentContext = [{
382
+ contextId: args.contextId,
383
+ contextKey: args.contextKey || null,
384
+ default: true
385
+ }];
386
+ }
387
+
370
388
  if (this.pathway.executePathway && typeof this.pathway.executePathway === 'function') {
371
389
  return await this.pathway.executePathway({ args, runAllPrompts: this.promptAndParse.bind(this), resolver: this });
372
390
  }
@@ -4,6 +4,7 @@ import { requestState } from '../requestState.js';
4
4
  import { addCitationsToResolver } from '../../lib/pathwayTools.js';
5
5
  import CortexResponse from '../../lib/cortexResponse.js';
6
6
  import axios from 'axios';
7
+ import { sanitizeBase64 } from "../../lib/util.js";
7
8
 
8
9
  async function convertContentItem(item, maxImageSize, plugin) {
9
10
  let imageUrl = "";
@@ -576,12 +577,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
576
577
  let totalUnits;
577
578
  messages.forEach((message, index) => {
578
579
  const content = Array.isArray(message.content)
579
- ? message.content.map((item) => {
580
- if (item.source && item.source.type === 'base64') {
581
- item.source.data = '* base64 data truncated for log *';
582
- }
583
- return JSON.stringify(item);
584
- }).join(", ")
580
+ ? message.content.map((item) => JSON.stringify(sanitizeBase64(item))).join(", ")
585
581
  : message.content;
586
582
  const { length, units } = this.getLength(content);
587
583
  const preview = this.shortenContent(content);
@@ -1,6 +1,7 @@
1
1
  import Claude3VertexPlugin from "./claude3VertexPlugin.js";
2
2
  import logger from "../../lib/logger.js";
3
3
  import axios from 'axios';
4
+ import { sanitizeBase64 } from "../../lib/util.js";
4
5
 
5
6
  // Claude 4 default maximum file size limit (30MB) for both images and PDFs
6
7
  const CLAUDE4_DEFAULT_MAX_FILE_SIZE = 30 * 1024 * 1024; // 30MB
@@ -475,13 +476,10 @@ class Claude4VertexPlugin extends Claude3VertexPlugin {
475
476
  messages.forEach((message, index) => {
476
477
  const content = Array.isArray(message.content)
477
478
  ? message.content.map((item) => {
478
- if (item.source && item.source.type === 'base64') {
479
- item.source.data = '* base64 data truncated for log *';
480
- }
481
479
  if (item.type === 'document') {
482
- return `{type: document, source: ${JSON.stringify(item.source)}}`;
480
+ return `{type: document, source: ${JSON.stringify(sanitizeBase64(item.source))}}`;
483
481
  }
484
- return JSON.stringify(item);
482
+ return JSON.stringify(sanitizeBase64(item));
485
483
  }).join(", ")
486
484
  : message.content;
487
485
  const { length, units } = this.getLength(content);
@@ -500,13 +498,10 @@ class Claude4VertexPlugin extends Claude3VertexPlugin {
500
498
  const message = messages[0];
501
499
  const content = Array.isArray(message.content)
502
500
  ? message.content.map((item) => {
503
- if (item.source && item.source.type === 'base64') {
504
- item.source.data = '* base64 data truncated for log *';
505
- }
506
501
  if (item.type === 'document') {
507
- return `{type: document, source: ${JSON.stringify(item.source)}}`;
502
+ return `{type: document, source: ${JSON.stringify(sanitizeBase64(item.source))}}`;
508
503
  }
509
- return JSON.stringify(item);
504
+ return JSON.stringify(sanitizeBase64(item));
510
505
  }).join(", ")
511
506
  : message.content;
512
507
  const { length, units } = this.getLength(content);
@@ -24,8 +24,6 @@ class Gemini3ReasoningVisionPlugin extends Gemini3ImagePlugin {
24
24
  } else {
25
25
  // Fallback: use documented dummy signature to prevent 400 errors
26
26
  // This allows the request to proceed but may affect reasoning quality
27
- const toolName = toolCall?.function?.name || 'unknown';
28
- logger.warn(`Missing thoughtSignature for tool "${toolName}"; using fallback. This may indicate thoughtSignatures were lost during history persistence.`);
29
27
  part.thoughtSignature = "skip_thought_signature_validator";
30
28
  }
31
29
  return part;
@@ -4,7 +4,7 @@
4
4
 
5
5
  import OpenAIVisionPlugin from './openAiVisionPlugin.js';
6
6
  import logger from '../../lib/logger.js';
7
- import { extractCitationTitle } from '../../lib/util.js';
7
+ import { extractCitationTitle, sanitizeBase64 } from '../../lib/util.js';
8
8
  import CortexResponse from '../../lib/cortexResponse.js';
9
9
  import { requestState } from '../requestState.js';
10
10
  import { addCitationsToResolver } from '../../lib/pathwayTools.js';
@@ -37,15 +37,7 @@ class GrokResponsesPlugin extends OpenAIVisionPlugin {
37
37
  let totalLength = 0;
38
38
  let totalUnits;
39
39
  messages.forEach((message, index) => {
40
- const content = message.content === undefined ? JSON.stringify(message) : (Array.isArray(message.content) ? message.content.map(item => {
41
- if (item.type === 'image_url' && item.image_url?.url?.startsWith('data:')) {
42
- return JSON.stringify({
43
- type: 'image_url',
44
- image_url: { url: '* base64 data truncated for log *' }
45
- });
46
- }
47
- return JSON.stringify(item);
48
- }).join(', ') : message.content);
40
+ const content = message.content === undefined ? JSON.stringify(sanitizeBase64(message)) : (Array.isArray(message.content) ? message.content.map(item => JSON.stringify(sanitizeBase64(item))).join(', ') : message.content);
49
41
  const { length, units } = this.getLength(content);
50
42
  const displayContent = this.shortenContent(content);
51
43
 
@@ -62,15 +54,7 @@ class GrokResponsesPlugin extends OpenAIVisionPlugin {
62
54
  logger.info(`[grok responses request contained ${totalLength} ${totalUnits}]`);
63
55
  } else if (messages && messages.length === 1) {
64
56
  const message = messages[0];
65
- const content = Array.isArray(message.content) ? message.content.map(item => {
66
- if (item.type === 'image_url' && item.image_url?.url?.startsWith('data:')) {
67
- return JSON.stringify({
68
- type: 'image_url',
69
- image_url: { url: '* base64 data truncated for log *' }
70
- });
71
- }
72
- return JSON.stringify(item);
73
- }).join(', ') : message.content;
57
+ const content = Array.isArray(message.content) ? message.content.map(item => JSON.stringify(sanitizeBase64(item))).join(', ') : message.content;
74
58
  const { length, units } = this.getLength(content);
75
59
  logger.info(`[grok responses request sent containing ${length} ${units}]`);
76
60
  logger.verbose(`${this.shortenContent(content)}`);
@@ -1,5 +1,6 @@
1
1
  import OpenAIVisionPlugin from './openAiVisionPlugin.js';
2
2
  import logger from '../../lib/logger.js';
3
+ import { sanitizeBase64 } from '../../lib/util.js';
3
4
  import { extractCitationTitle } from '../../lib/util.js';
4
5
  import CortexResponse from '../../lib/cortexResponse.js';
5
6
 
@@ -28,15 +29,7 @@ class GrokVisionPlugin extends OpenAIVisionPlugin {
28
29
  let totalUnits;
29
30
  messages.forEach((message, index) => {
30
31
  //message.content string or array
31
- const content = message.content === undefined ? JSON.stringify(message) : (Array.isArray(message.content) ? message.content.map(item => {
32
- if (item.type === 'image_url' && item.image_url?.url?.startsWith('data:')) {
33
- return JSON.stringify({
34
- type: 'image_url',
35
- image_url: { url: '* base64 data truncated for log *' }
36
- });
37
- }
38
- return JSON.stringify(item);
39
- }).join(', ') : message.content);
32
+ const content = message.content === undefined ? JSON.stringify(sanitizeBase64(message)) : (Array.isArray(message.content) ? message.content.map(item => JSON.stringify(sanitizeBase64(item))).join(', ') : message.content);
40
33
  const { length, units } = this.getLength(content);
41
34
  const displayContent = this.shortenContent(content);
42
35
 
@@ -54,15 +47,7 @@ class GrokVisionPlugin extends OpenAIVisionPlugin {
54
47
  logger.info(`[grok request contained ${totalLength} ${totalUnits}]`);
55
48
  } else {
56
49
  const message = messages[0];
57
- const content = Array.isArray(message.content) ? message.content.map(item => {
58
- if (item.type === 'image_url' && item.image_url?.url?.startsWith('data:')) {
59
- return JSON.stringify({
60
- type: 'image_url',
61
- image_url: { url: '* base64 data truncated for log *' }
62
- });
63
- }
64
- return JSON.stringify(item);
65
- }).join(', ') : message.content;
50
+ const content = Array.isArray(message.content) ? message.content.map(item => JSON.stringify(sanitizeBase64(item))).join(', ') : message.content;
66
51
  const { length, units } = this.getLength(content);
67
52
  logger.info(`[grok request sent containing ${length} ${units}]`);
68
53
  logger.verbose(`${this.shortenContent(content)}`);
@@ -594,6 +594,9 @@ class ModelPlugin {
594
594
  if (error.response) {
595
595
  logger.error(`Response status: ${error.response.status}`);
596
596
  logger.error(`Response headers: ${JSON.stringify(error.response.headers)}`);
597
+ if (error.response.data) {
598
+ logger.error(`Response data: ${JSON.stringify(error.response.data)}`);
599
+ }
597
600
  }
598
601
  if (error.data) {
599
602
  logger.error(`Additional error data: ${JSON.stringify(error.data)}`);
@@ -3,6 +3,7 @@ import logger from '../../lib/logger.js';
3
3
  import { requestState } from '../requestState.js';
4
4
  import { addCitationsToResolver } from '../../lib/pathwayTools.js';
5
5
  import CortexResponse from '../../lib/cortexResponse.js';
6
+ import { sanitizeBase64 } from '../../lib/util.js';
6
7
  function safeJsonParse(content) {
7
8
  try {
8
9
  const parsedContent = JSON.parse(content);
@@ -158,15 +159,7 @@ class OpenAIVisionPlugin extends OpenAIChatPlugin {
158
159
  let totalUnits;
159
160
  messages.forEach((message, index) => {
160
161
  //message.content string or array
161
- const content = message.content === undefined ? JSON.stringify(message) : (Array.isArray(message.content) ? message.content.map(item => {
162
- if (item.type === 'image_url' && item.image_url?.url?.startsWith('data:')) {
163
- return JSON.stringify({
164
- type: 'image_url',
165
- image_url: { url: '* base64 data truncated for log *' }
166
- });
167
- }
168
- return JSON.stringify(item);
169
- }).join(', ') : message.content);
162
+ const content = message.content === undefined ? JSON.stringify(sanitizeBase64(message)) : (Array.isArray(message.content) ? message.content.map(item => JSON.stringify(sanitizeBase64(item))).join(', ') : message.content);
170
163
  const { length, units } = this.getLength(content);
171
164
  const displayContent = this.shortenContent(content);
172
165
 
@@ -184,15 +177,7 @@ class OpenAIVisionPlugin extends OpenAIChatPlugin {
184
177
  logger.info(`[chat request contained ${totalLength} ${totalUnits}]`);
185
178
  } else {
186
179
  const message = messages[0];
187
- const content = Array.isArray(message.content) ? message.content.map(item => {
188
- if (item.type === 'image_url' && item.image_url?.url?.startsWith('data:')) {
189
- return JSON.stringify({
190
- type: 'image_url',
191
- image_url: { url: '* base64 data truncated for log *' }
192
- });
193
- }
194
- return JSON.stringify(item);
195
- }).join(', ') : message.content;
180
+ const content = Array.isArray(message.content) ? message.content.map(item => JSON.stringify(sanitizeBase64(item))).join(', ') : message.content;
196
181
  const { length, units } = this.getLength(content);
197
182
  logger.info(`[request sent containing ${length} ${units}]`);
198
183
  logger.verbose(`${this.shortenContent(content)}`);
@@ -5,6 +5,64 @@ import logger from "../../lib/logger.js";
5
5
  import axios from "axios";
6
6
  import mime from "mime-types";
7
7
 
8
+ // Helper function to collect images from various parameter sources
9
+ const collectImages = (candidate, accumulator) => {
10
+ if (!candidate) return;
11
+ if (Array.isArray(candidate)) {
12
+ candidate.forEach((item) => collectImages(item, accumulator));
13
+ return;
14
+ }
15
+ accumulator.push(candidate);
16
+ };
17
+
18
+ // Helper function to normalize image entries to strings
19
+ const normalizeImageEntry = (entry) => {
20
+ if (!entry) return null;
21
+ if (typeof entry === "string") {
22
+ return entry;
23
+ }
24
+ if (typeof entry === "object") {
25
+ if (Array.isArray(entry)) {
26
+ return null;
27
+ }
28
+ if (entry.value) {
29
+ return entry.value;
30
+ }
31
+ if (entry.url) {
32
+ return entry.url;
33
+ }
34
+ if (entry.path) {
35
+ return entry.path;
36
+ }
37
+ }
38
+ return null;
39
+ };
40
+
41
+ // Helper function to omit undefined/null values from an object
42
+ const omitUndefined = (obj) =>
43
+ Object.fromEntries(
44
+ Object.entries(obj).filter(([, value]) => value !== undefined && value !== null),
45
+ );
46
+
47
+ // Helper function to collect and normalize images from combined parameters
48
+ const collectNormalizedImages = (combinedParameters, additionalFields = []) => {
49
+ const imageCandidates = [];
50
+ const defaultFields = [
51
+ 'image', 'images', 'input_image', 'input_images',
52
+ 'input_image_1', 'input_image_2', 'input_image_3',
53
+ 'image_1', 'image_2'
54
+ ];
55
+ const allFields = [...defaultFields, ...additionalFields];
56
+
57
+ allFields.forEach(field => {
58
+ collectImages(combinedParameters[field], imageCandidates);
59
+ });
60
+
61
+ return imageCandidates
62
+ .map((candidate) => normalizeImageEntry(candidate))
63
+ .filter((candidate) => candidate && typeof candidate === 'string');
64
+ };
65
+
8
66
  class ReplicateApiPlugin extends ModelPlugin {
9
67
  constructor(pathway, model) {
10
68
  super(pathway, model);
@@ -139,67 +197,55 @@ class ReplicateApiPlugin extends ModelPlugin {
139
197
  const goFast = combinedParameters.go_fast ?? combinedParameters.goFast ?? true;
140
198
  const disableSafetyChecker = combinedParameters.disable_safety_checker ?? combinedParameters.disableSafetyChecker ?? false;
141
199
 
142
- const collectImages = (candidate, accumulator) => {
143
- if (!candidate) return;
144
- if (Array.isArray(candidate)) {
145
- candidate.forEach((item) => collectImages(item, accumulator));
146
- return;
147
- }
148
- accumulator.push(candidate);
149
- };
200
+ const normalizedImages = collectNormalizedImages(combinedParameters);
201
+
202
+ const basePayload = omitUndefined({
203
+ prompt: modelPromptText,
204
+ go_fast: goFast,
205
+ aspect_ratio: aspectRatio,
206
+ output_format: outputFormat,
207
+ output_quality: outputQuality,
208
+ disable_safety_checker: disableSafetyChecker,
209
+ });
150
210
 
151
- const imageCandidates = [];
152
- collectImages(combinedParameters.image, imageCandidates);
153
- collectImages(combinedParameters.images, imageCandidates);
154
- collectImages(combinedParameters.input_image, imageCandidates);
155
- collectImages(combinedParameters.input_images, imageCandidates);
156
- collectImages(combinedParameters.input_image_1, imageCandidates);
157
- collectImages(combinedParameters.input_image_2, imageCandidates);
158
- collectImages(combinedParameters.input_image_3, imageCandidates);
159
- collectImages(combinedParameters.image_1, imageCandidates);
160
- collectImages(combinedParameters.image_2, imageCandidates);
161
-
162
- const normalizeImageEntry = (entry) => {
163
- if (!entry) return null;
164
- if (typeof entry === "string") {
165
- return entry; // Return the URL string directly
166
- }
167
- if (typeof entry === "object") {
168
- if (Array.isArray(entry)) {
169
- return null;
170
- }
171
- if (entry.value) {
172
- return entry.value; // Return the value as a string
173
- }
174
- if (entry.url) {
175
- return entry.url; // Return the URL as a string
176
- }
177
- if (entry.path) {
178
- return entry.path; // Return the path as a string
179
- }
180
- }
181
- return null;
211
+ // For qwen-image-edit-plus, always include the image array if we have images
212
+ const inputPayload = {
213
+ ...basePayload,
214
+ ...(normalizedImages.length > 0 ? { image: normalizedImages } : {})
182
215
  };
183
216
 
184
- const normalizedImages = imageCandidates
185
- .map((candidate) => normalizeImageEntry(candidate))
186
- .filter((candidate) => candidate && typeof candidate === 'string');
217
+ requestParameters = {
218
+ input: inputPayload,
219
+ };
220
+ break;
221
+ }
222
+ case "replicate-qwen-image-edit-2511": {
223
+ const validRatios = ["1:1", "16:9", "9:16", "4:3", "3:4", "match_input_image"];
224
+ const validOutputFormats = ["webp", "jpg", "png"];
225
+
226
+ const aspectRatio = validRatios.includes(combinedParameters.aspect_ratio ?? combinedParameters.aspectRatio)
227
+ ? (combinedParameters.aspect_ratio ?? combinedParameters.aspectRatio)
228
+ : "match_input_image";
229
+ const outputFormat = validOutputFormats.includes(combinedParameters.output_format ?? combinedParameters.outputFormat)
230
+ ? (combinedParameters.output_format ?? combinedParameters.outputFormat)
231
+ : "webp";
232
+ const outputQuality = combinedParameters.output_quality ?? combinedParameters.outputQuality ?? 95;
233
+ const goFast = combinedParameters.go_fast ?? combinedParameters.goFast ?? true;
234
+ const disableSafetyChecker = combinedParameters.disable_safety_checker ?? combinedParameters.disableSafetyChecker ?? false;
187
235
 
188
- const omitUndefined = (obj) =>
189
- Object.fromEntries(
190
- Object.entries(obj).filter(([, value]) => value !== undefined && value !== null),
191
- );
236
+ const normalizedImages = collectNormalizedImages(combinedParameters);
192
237
 
193
238
  const basePayload = omitUndefined({
194
239
  prompt: modelPromptText,
195
240
  go_fast: goFast,
196
241
  aspect_ratio: aspectRatio,
197
242
  output_format: outputFormat,
198
- output_quality: outputQuality,
243
+ output_quality: Math.max(0, Math.min(100, outputQuality)),
199
244
  disable_safety_checker: disableSafetyChecker,
245
+ ...(Number.isInteger(combinedParameters.seed) && combinedParameters.seed > 0 ? { seed: combinedParameters.seed } : {}),
200
246
  });
201
247
 
202
- // For qwen-image-edit-plus, always include the image array if we have images
248
+ // For qwen-image-edit-2511, format images as array of strings (not objects)
203
249
  const inputPayload = {
204
250
  ...basePayload,
205
251
  ...(normalizedImages.length > 0 ? { image: normalizedImages } : {})
@@ -275,63 +321,30 @@ class ReplicateApiPlugin extends ModelPlugin {
275
321
  };
276
322
  break;
277
323
  }
324
+ case "replicate-seedance-1.5-pro": {
325
+ const validRatios = ["16:9", "4:3", "1:1", "3:4", "9:16", "21:9", "9:21"];
326
+
327
+ requestParameters = {
328
+ input: {
329
+ prompt: modelPromptText,
330
+ aspect_ratio: validRatios.includes(combinedParameters.aspectRatio) ? combinedParameters.aspectRatio : "16:9",
331
+ duration: Math.min(12, Math.max(2, combinedParameters.duration || 5)),
332
+ fps: 24,
333
+ camera_fixed: combinedParameters.camera_fixed || false,
334
+ generate_audio: combinedParameters.generate_audio || false,
335
+ ...(combinedParameters.seed && Number.isInteger(combinedParameters.seed) && combinedParameters.seed > 0 ? { seed: combinedParameters.seed } : {}),
336
+ ...(combinedParameters.image ? { image: combinedParameters.image } : {}),
337
+ ...(combinedParameters.image && combinedParameters.last_frame_image ? { last_frame_image: combinedParameters.last_frame_image } : {}),
338
+ },
339
+ };
340
+ break;
341
+ }
278
342
  case "replicate-seedream-4": {
279
343
  const validSizes = ["1K", "2K", "4K", "custom"];
280
344
  const validRatios = ["1:1", "4:3", "3:4", "16:9", "9:16", "match_input_image"];
281
345
  const validSequentialModes = ["disabled", "auto"];
282
346
 
283
- // Collect input images from multiple parameter sources (same pattern as qwen-image-edit-plus)
284
- const collectImages = (candidate, accumulator) => {
285
- if (!candidate) return;
286
- if (Array.isArray(candidate)) {
287
- candidate.forEach((item) => collectImages(item, accumulator));
288
- return;
289
- }
290
- accumulator.push(candidate);
291
- };
292
-
293
- const imageCandidates = [];
294
- collectImages(combinedParameters.image, imageCandidates);
295
- collectImages(combinedParameters.images, imageCandidates);
296
- collectImages(combinedParameters.input_image, imageCandidates);
297
- collectImages(combinedParameters.input_images, imageCandidates);
298
- collectImages(combinedParameters.input_image_1, imageCandidates);
299
- collectImages(combinedParameters.input_image_2, imageCandidates);
300
- collectImages(combinedParameters.input_image_3, imageCandidates);
301
- collectImages(combinedParameters.image_1, imageCandidates);
302
- collectImages(combinedParameters.image_2, imageCandidates);
303
- collectImages(combinedParameters.imageInput, imageCandidates);
304
-
305
- const normalizeImageEntry = (entry) => {
306
- if (!entry) return null;
307
- if (typeof entry === "string") {
308
- return entry; // Return the URL string directly
309
- }
310
- if (typeof entry === "object") {
311
- if (Array.isArray(entry)) {
312
- return null;
313
- }
314
- if (entry.value) {
315
- return entry.value; // Return the value as a string
316
- }
317
- if (entry.url) {
318
- return entry.url; // Return the URL as a string
319
- }
320
- if (entry.path) {
321
- return entry.path; // Return the path as a string
322
- }
323
- }
324
- return null;
325
- };
326
-
327
- const normalizedImages = imageCandidates
328
- .map((candidate) => normalizeImageEntry(candidate))
329
- .filter((candidate) => candidate && typeof candidate === 'string');
330
-
331
- const omitUndefined = (obj) =>
332
- Object.fromEntries(
333
- Object.entries(obj).filter(([, value]) => value !== undefined && value !== null),
334
- );
347
+ const normalizedImages = collectNormalizedImages(combinedParameters, ['imageInput']);
335
348
 
336
349
  const basePayload = omitUndefined({
337
350
  prompt: modelPromptText,
@@ -341,7 +354,7 @@ class ReplicateApiPlugin extends ModelPlugin {
341
354
  max_images: combinedParameters.maxImages || combinedParameters.numberResults || 1,
342
355
  aspect_ratio: validRatios.includes(combinedParameters.aspectRatio) ? combinedParameters.aspectRatio : "4:3",
343
356
  sequential_image_generation: validSequentialModes.includes(combinedParameters.sequentialImageGeneration) ? combinedParameters.sequentialImageGeneration : "disabled",
344
- ...(combinedParameters.seed && Number.isInteger(combinedParameters.seed) && combinedParameters.seed > 0 ? { seed: combinedParameters.seed } : {}),
357
+ ...(Number.isInteger(combinedParameters.seed) && combinedParameters.seed > 0 ? { seed: combinedParameters.seed } : {}),
345
358
  });
346
359
 
347
360
  // For seedream-4, include the image_input array if we have images
@@ -350,6 +363,74 @@ class ReplicateApiPlugin extends ModelPlugin {
350
363
  ...(normalizedImages.length > 0 ? { image_input: normalizedImages } : {})
351
364
  };
352
365
 
366
+ requestParameters = {
367
+ input: inputPayload,
368
+ };
369
+ break;
370
+ }
371
+ case "replicate-flux-2-pro": {
372
+ const validResolutions = ["match_input_image", "0.5 MP", "1 MP", "2 MP", "4 MP"];
373
+ const validRatios = [
374
+ "match_input_image",
375
+ "custom",
376
+ "1:1",
377
+ "16:9",
378
+ "3:2",
379
+ "2:3",
380
+ "4:5",
381
+ "5:4",
382
+ "9:16",
383
+ "3:4",
384
+ "4:3"
385
+ ];
386
+ const validOutputFormats = ["webp", "jpg", "png"];
387
+
388
+ const normalizedImages = collectNormalizedImages(combinedParameters).slice(0, 8); // Maximum 8 images
389
+
390
+ const aspectRatio = validRatios.includes(combinedParameters.aspect_ratio ?? combinedParameters.aspectRatio)
391
+ ? (combinedParameters.aspect_ratio ?? combinedParameters.aspectRatio)
392
+ : "1:1";
393
+
394
+ const resolution = validResolutions.includes(combinedParameters.resolution)
395
+ ? combinedParameters.resolution
396
+ : "1 MP";
397
+
398
+ const outputFormat = validOutputFormats.includes(combinedParameters.output_format ?? combinedParameters.outputFormat)
399
+ ? (combinedParameters.output_format ?? combinedParameters.outputFormat)
400
+ : "webp";
401
+
402
+ const outputQuality = combinedParameters.output_quality ?? combinedParameters.outputQuality ?? 80;
403
+ const safetyTolerance = combinedParameters.safety_tolerance ?? combinedParameters.safetyTolerance ?? 2;
404
+
405
+ // Validate and round width/height to multiples of 32 if provided
406
+ let width = combinedParameters.width;
407
+ let height = combinedParameters.height;
408
+
409
+ if (width !== undefined && width !== null) {
410
+ width = Math.max(256, Math.min(2048, Math.round(width / 32) * 32));
411
+ }
412
+ if (height !== undefined && height !== null) {
413
+ height = Math.max(256, Math.min(2048, Math.round(height / 32) * 32));
414
+ }
415
+
416
+ const basePayload = omitUndefined({
417
+ prompt: modelPromptText,
418
+ aspect_ratio: aspectRatio,
419
+ resolution: resolution,
420
+ output_format: outputFormat,
421
+ output_quality: Math.max(0, Math.min(100, outputQuality)),
422
+ safety_tolerance: Math.max(1, Math.min(5, safetyTolerance)),
423
+ ...(width !== undefined && width !== null ? { width } : {}),
424
+ ...(height !== undefined && height !== null ? { height } : {}),
425
+ ...(Number.isInteger(combinedParameters.seed) && combinedParameters.seed > 0 ? { seed: combinedParameters.seed } : {}),
426
+ });
427
+
428
+ // Include input_images array if we have images
429
+ const inputPayload = {
430
+ ...basePayload,
431
+ ...(normalizedImages.length > 0 ? { input_images: normalizedImages } : {})
432
+ };
433
+
353
434
  requestParameters = {
354
435
  input: inputPayload,
355
436
  };