@alpic80/rivet-core 1.24.0-aidon.5 → 1.24.2-aidon.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -6
- package/dist/cjs/bundle.cjs +1382 -262
- package/dist/cjs/bundle.cjs.map +4 -4
- package/dist/esm/api/createProcessor.js +2 -0
- package/dist/esm/api/streaming.js +27 -0
- package/dist/esm/exports.js +1 -0
- package/dist/esm/integrations/CodeRunner.js +10 -2
- package/dist/esm/integrations/mcp/MCPBase.js +87 -0
- package/dist/esm/integrations/mcp/MCPProvider.js +23 -0
- package/dist/esm/integrations/mcp/MCPUtils.js +33 -0
- package/dist/esm/model/GraphProcessor.js +3 -0
- package/dist/esm/model/NodeRegistration.js +0 -1
- package/dist/esm/model/Nodes.js +9 -0
- package/dist/esm/model/nodes/ChatNodeBase.js +1 -1
- package/dist/esm/model/nodes/CodeNode.js +1 -1
- package/dist/esm/model/nodes/GetAllDatasetsNode.js +1 -1
- package/dist/esm/model/nodes/GraphInputNode.js +2 -0
- package/dist/esm/model/nodes/MCPDiscoveryNode.js +210 -0
- package/dist/esm/model/nodes/MCPGetPromptNode.js +233 -0
- package/dist/esm/model/nodes/MCPToolCallNode.js +261 -0
- package/dist/esm/model/nodes/ObjectNode.js +42 -21
- package/dist/esm/model/nodes/PromptNode.js +1 -1
- package/dist/esm/model/nodes/TextNode.js +13 -2
- package/dist/esm/plugins/anthropic/anthropic.js +22 -3
- package/dist/esm/plugins/anthropic/nodes/ChatAnthropicNode.js +33 -3
- package/dist/esm/plugins/google/google.js +29 -14
- package/dist/esm/plugins/google/nodes/ChatGoogleNode.js +70 -5
- package/dist/esm/utils/interpolation.js +155 -17
- package/dist/esm/utils/openai.js +24 -0
- package/dist/types/api/createProcessor.d.ts +3 -2
- package/dist/types/api/streaming.d.ts +8 -1
- package/dist/types/exports.d.ts +1 -0
- package/dist/types/integrations/CodeRunner.d.ts +4 -3
- package/dist/types/integrations/mcp/MCPBase.d.ts +18 -0
- package/dist/types/integrations/mcp/MCPProvider.d.ts +153 -0
- package/dist/types/integrations/mcp/MCPUtils.d.ts +9 -0
- package/dist/types/model/GraphProcessor.d.ts +1 -1
- package/dist/types/model/Nodes.d.ts +13 -2
- package/dist/types/model/ProcessContext.d.ts +5 -1
- package/dist/types/model/Project.d.ts +2 -0
- package/dist/types/model/nodes/GetAllDatasetsNode.d.ts +2 -2
- package/dist/types/model/nodes/MCPDiscoveryNode.d.ts +9 -0
- package/dist/types/model/nodes/MCPGetPromptNode.d.ts +23 -0
- package/dist/types/model/nodes/MCPToolCallNode.d.ts +26 -0
- package/dist/types/model/nodes/ObjectNode.d.ts +3 -2
- package/dist/types/model/nodes/TextNode.d.ts +2 -1
- package/dist/types/plugins/anthropic/anthropic.d.ts +21 -3
- package/dist/types/plugins/anthropic/nodes/ChatAnthropicNode.d.ts +5 -0
- package/dist/types/plugins/google/google.d.ts +12 -2
- package/dist/types/plugins/google/nodes/ChatGoogleNode.d.ts +7 -0
- package/dist/types/utils/interpolation.d.ts +6 -1
- package/dist/types/utils/openai.d.ts +24 -0
- package/package.json +3 -3
package/dist/cjs/bundle.cjs
CHANGED
|
@@ -89,6 +89,10 @@ __export(src_exports, {
|
|
|
89
89
|
LoadDatasetNodeImpl: () => LoadDatasetNodeImpl,
|
|
90
90
|
LoopControllerNodeImpl: () => LoopControllerNodeImpl,
|
|
91
91
|
LoopUntilNodeImpl: () => LoopUntilNodeImpl,
|
|
92
|
+
MCPError: () => MCPError,
|
|
93
|
+
MCPErrorType: () => MCPErrorType,
|
|
94
|
+
MCPGetPromptNodeImpl: () => MCPGetPromptNodeImpl,
|
|
95
|
+
MCPToolCallNodeImpl: () => MCPToolCallNodeImpl,
|
|
92
96
|
MatchNodeImpl: () => MatchNodeImpl,
|
|
93
97
|
NodeImpl: () => NodeImpl,
|
|
94
98
|
NodeRegistration: () => NodeRegistration,
|
|
@@ -161,6 +165,7 @@ __export(src_exports, {
|
|
|
161
165
|
coreCreateProcessor: () => coreCreateProcessor,
|
|
162
166
|
coreRunGraph: () => coreRunGraph,
|
|
163
167
|
createDatasetNode: () => createDatasetNode,
|
|
168
|
+
createOnStreamUserEvents: () => createOnStreamUserEvents,
|
|
164
169
|
cronNode: () => cronNode,
|
|
165
170
|
dataTypeDisplayNames: () => dataTypeDisplayNames,
|
|
166
171
|
dataTypes: () => dataTypes,
|
|
@@ -238,6 +243,9 @@ __export(src_exports, {
|
|
|
238
243
|
looseDataValueToDataValue: () => looseDataValueToDataValue,
|
|
239
244
|
looseDataValuesToDataValues: () => looseDataValuesToDataValues,
|
|
240
245
|
matchNode: () => matchNode,
|
|
246
|
+
mcpDiscoveryNode: () => mcpDiscoveryNode,
|
|
247
|
+
mcpGetPromptNode: () => mcpGetPromptNode,
|
|
248
|
+
mcpToolCallNode: () => mcpToolCallNode,
|
|
241
249
|
newId: () => newId,
|
|
242
250
|
nodeDefinition: () => nodeDefinition,
|
|
243
251
|
numberNode: () => numberNode,
|
|
@@ -352,6 +360,10 @@ __export(exports_exports, {
|
|
|
352
360
|
LoadDatasetNodeImpl: () => LoadDatasetNodeImpl,
|
|
353
361
|
LoopControllerNodeImpl: () => LoopControllerNodeImpl,
|
|
354
362
|
LoopUntilNodeImpl: () => LoopUntilNodeImpl,
|
|
363
|
+
MCPError: () => MCPError,
|
|
364
|
+
MCPErrorType: () => MCPErrorType,
|
|
365
|
+
MCPGetPromptNodeImpl: () => MCPGetPromptNodeImpl,
|
|
366
|
+
MCPToolCallNodeImpl: () => MCPToolCallNodeImpl,
|
|
355
367
|
MatchNodeImpl: () => MatchNodeImpl,
|
|
356
368
|
NodeImpl: () => NodeImpl,
|
|
357
369
|
NodeRegistration: () => NodeRegistration,
|
|
@@ -423,6 +435,7 @@ __export(exports_exports, {
|
|
|
423
435
|
coreCreateProcessor: () => coreCreateProcessor,
|
|
424
436
|
coreRunGraph: () => coreRunGraph,
|
|
425
437
|
createDatasetNode: () => createDatasetNode,
|
|
438
|
+
createOnStreamUserEvents: () => createOnStreamUserEvents,
|
|
426
439
|
cronNode: () => cronNode,
|
|
427
440
|
dataTypeDisplayNames: () => dataTypeDisplayNames,
|
|
428
441
|
dataTypes: () => dataTypes,
|
|
@@ -500,6 +513,9 @@ __export(exports_exports, {
|
|
|
500
513
|
looseDataValueToDataValue: () => looseDataValueToDataValue,
|
|
501
514
|
looseDataValuesToDataValues: () => looseDataValuesToDataValues,
|
|
502
515
|
matchNode: () => matchNode,
|
|
516
|
+
mcpDiscoveryNode: () => mcpDiscoveryNode,
|
|
517
|
+
mcpGetPromptNode: () => mcpGetPromptNode,
|
|
518
|
+
mcpToolCallNode: () => mcpToolCallNode,
|
|
503
519
|
newId: () => newId,
|
|
504
520
|
nodeDefinition: () => nodeDefinition,
|
|
505
521
|
numberNode: () => numberNode,
|
|
@@ -1782,10 +1798,10 @@ var DEFAULT_CHAT_ENDPOINT = "https://api.openai.com/v1/chat/completions";
|
|
|
1782
1798
|
var DEFAULT_CHAT_NODE_TIMEOUT = 3e4;
|
|
1783
1799
|
|
|
1784
1800
|
// src/model/GraphProcessor.ts
|
|
1785
|
-
var
|
|
1801
|
+
var import_lodash_es16 = require("lodash");
|
|
1786
1802
|
var import_p_queue = __toESM(require("p-queue-6"), 1);
|
|
1787
1803
|
var import_emittery2 = __toESM(require("emittery-0-13"), 1);
|
|
1788
|
-
var
|
|
1804
|
+
var import_non_secure78 = require("nanoid/non-secure");
|
|
1789
1805
|
var import_ts_pattern9 = require("ts-pattern");
|
|
1790
1806
|
|
|
1791
1807
|
// src/model/NodeImpl.ts
|
|
@@ -2114,8 +2130,10 @@ var import_non_secure4 = require("nanoid/non-secure");
|
|
|
2114
2130
|
var import_ts_dedent3 = require("ts-dedent");
|
|
2115
2131
|
|
|
2116
2132
|
// src/utils/interpolation.ts
|
|
2117
|
-
var
|
|
2118
|
-
var
|
|
2133
|
+
var import_lodash_es4 = require("lodash");
|
|
2134
|
+
var TOKEN_MATCH_REGEX = /\{\{([^}]+?)\}\}/g;
|
|
2135
|
+
var ESCAPED_TOKEN_REGEX = /\{\{\{([^}]+?)\}\}\}/g;
|
|
2136
|
+
var ESCAPED_ESCAPED_TOKEN_REGEX = /\\\{\\\{([^}]+?)\\\}\\\}/g;
|
|
2119
2137
|
var processingFunctions = {
|
|
2120
2138
|
indent: (input, spaces = 0) => {
|
|
2121
2139
|
const indent = " ".repeat(spaces);
|
|
@@ -2167,6 +2185,68 @@ var processingFunctions = {
|
|
|
2167
2185
|
return lines.join("\n");
|
|
2168
2186
|
}
|
|
2169
2187
|
};
|
|
2188
|
+
function unwrapPotentialDataValue(value) {
|
|
2189
|
+
if (typeof value === "object" && value !== null && typeof value.type === "string" && Object.prototype.hasOwnProperty.call(value, "value")) {
|
|
2190
|
+
return value.value;
|
|
2191
|
+
}
|
|
2192
|
+
return value;
|
|
2193
|
+
}
|
|
2194
|
+
function resolveExpressionRawValue(source, expression, sourceType) {
|
|
2195
|
+
if (!source) {
|
|
2196
|
+
return void 0;
|
|
2197
|
+
}
|
|
2198
|
+
const match13 = expression.trim().match(/^([^[.\s]+)\s*(.*)$/);
|
|
2199
|
+
let key;
|
|
2200
|
+
let path;
|
|
2201
|
+
if (match13 && typeof match13[1] === "string") {
|
|
2202
|
+
key = match13[1];
|
|
2203
|
+
const rawPath = match13[2];
|
|
2204
|
+
if (rawPath) {
|
|
2205
|
+
path = rawPath.trim().replace(/\s*(\.|\[|\])\s*/g, "$1");
|
|
2206
|
+
} else {
|
|
2207
|
+
path = void 0;
|
|
2208
|
+
}
|
|
2209
|
+
} else {
|
|
2210
|
+
key = expression.trim();
|
|
2211
|
+
path = void 0;
|
|
2212
|
+
}
|
|
2213
|
+
if (!key) {
|
|
2214
|
+
return void 0;
|
|
2215
|
+
}
|
|
2216
|
+
const topLevelValue = source[key];
|
|
2217
|
+
if (topLevelValue === void 0) {
|
|
2218
|
+
return void 0;
|
|
2219
|
+
}
|
|
2220
|
+
const baseValue = unwrapPotentialDataValue(topLevelValue);
|
|
2221
|
+
let finalValue;
|
|
2222
|
+
if (path) {
|
|
2223
|
+
try {
|
|
2224
|
+
finalValue = (0, import_lodash_es4.get)(baseValue, path);
|
|
2225
|
+
finalValue = unwrapPotentialDataValue(finalValue);
|
|
2226
|
+
} catch (error) {
|
|
2227
|
+
console.warn(`Error accessing path "${path}" in ${sourceType} value for key "${key}":`, error);
|
|
2228
|
+
return void 0;
|
|
2229
|
+
}
|
|
2230
|
+
} else {
|
|
2231
|
+
finalValue = baseValue;
|
|
2232
|
+
}
|
|
2233
|
+
return finalValue;
|
|
2234
|
+
}
|
|
2235
|
+
function resolveExpressionToString(source, expression, sourceType) {
|
|
2236
|
+
const finalValue = resolveExpressionRawValue(source, expression, sourceType);
|
|
2237
|
+
if (finalValue === void 0) {
|
|
2238
|
+
return void 0;
|
|
2239
|
+
}
|
|
2240
|
+
if (typeof finalValue === "object" && finalValue !== null) {
|
|
2241
|
+
try {
|
|
2242
|
+
return JSON.stringify(finalValue);
|
|
2243
|
+
} catch (error) {
|
|
2244
|
+
console.warn(`Error stringifying object/array in ${sourceType} for expression "${expression}":`, error);
|
|
2245
|
+
return "[object Object]";
|
|
2246
|
+
}
|
|
2247
|
+
}
|
|
2248
|
+
return String(finalValue);
|
|
2249
|
+
}
|
|
2170
2250
|
function parseProcessing(instruction) {
|
|
2171
2251
|
const parts = instruction.trim().split(/\s+/);
|
|
2172
2252
|
return {
|
|
@@ -2186,26 +2266,57 @@ function applyProcessing(value, processingChain) {
|
|
|
2186
2266
|
return processingFunc(result, param);
|
|
2187
2267
|
}, value);
|
|
2188
2268
|
}
|
|
2189
|
-
function interpolate(
|
|
2190
|
-
return
|
|
2191
|
-
|
|
2192
|
-
|
|
2193
|
-
|
|
2269
|
+
function interpolate(template, variables, graphInputValues, contextValues) {
|
|
2270
|
+
return template.replace(ESCAPED_TOKEN_REGEX, (_match, expression) => {
|
|
2271
|
+
return `\\{\\{${expression}\\}\\}`;
|
|
2272
|
+
}).replace(/\{\{((?:@graphInputs|@context)\..*?|[^}]+?)\}\}/g, (_match, expressionWithMaybeProcessing) => {
|
|
2273
|
+
const parts = expressionWithMaybeProcessing.split("|").map((s) => s.trim());
|
|
2274
|
+
const expression = parts[0];
|
|
2275
|
+
const processingChain = parts.slice(1).join("|");
|
|
2276
|
+
let resolvedValue;
|
|
2277
|
+
if (expression.startsWith("@graphInputs.")) {
|
|
2278
|
+
resolvedValue = resolveExpressionToString(
|
|
2279
|
+
graphInputValues,
|
|
2280
|
+
expression.substring("@graphInputs.".length),
|
|
2281
|
+
"graphInputs"
|
|
2282
|
+
);
|
|
2283
|
+
} else if (expression.startsWith("@context.")) {
|
|
2284
|
+
resolvedValue = resolveExpressionToString(contextValues, expression.substring("@context.".length), "context");
|
|
2285
|
+
} else {
|
|
2286
|
+
const simpleVar = variables[expression];
|
|
2287
|
+
if (simpleVar !== void 0) {
|
|
2288
|
+
resolvedValue = String(unwrapPotentialDataValue(simpleVar) ?? "");
|
|
2289
|
+
} else {
|
|
2290
|
+
resolvedValue = void 0;
|
|
2291
|
+
}
|
|
2292
|
+
}
|
|
2293
|
+
if (resolvedValue === void 0) {
|
|
2294
|
+
console.warn(`Interpolation variable or path "${expression}" not found or resolved to undefined.`);
|
|
2194
2295
|
return "";
|
|
2195
|
-
if (processing.length > 0) {
|
|
2196
|
-
return applyProcessing(value, p1);
|
|
2197
2296
|
}
|
|
2198
|
-
|
|
2199
|
-
|
|
2200
|
-
|
|
2297
|
+
if (processingChain) {
|
|
2298
|
+
return applyProcessing(resolvedValue, processingChain);
|
|
2299
|
+
}
|
|
2300
|
+
return resolvedValue;
|
|
2301
|
+
}).replace(ESCAPED_ESCAPED_TOKEN_REGEX, (_match, expression) => {
|
|
2302
|
+
return `{{${expression}}}`;
|
|
2201
2303
|
});
|
|
2202
2304
|
}
|
|
2203
2305
|
function extractInterpolationVariables(template) {
|
|
2204
|
-
const matches = template.
|
|
2306
|
+
const matches = template.replace(ESCAPED_TOKEN_REGEX, (_match, content) => {
|
|
2307
|
+
return `\\{\\{${content}\\}\\}`;
|
|
2308
|
+
}).matchAll(TOKEN_MATCH_REGEX);
|
|
2205
2309
|
const variables = /* @__PURE__ */ new Set();
|
|
2206
2310
|
for (const match13 of matches) {
|
|
2207
|
-
|
|
2208
|
-
|
|
2311
|
+
if (match13[1]) {
|
|
2312
|
+
const [tokenPart] = match13[1].split("|");
|
|
2313
|
+
if (tokenPart) {
|
|
2314
|
+
const token = tokenPart.trim();
|
|
2315
|
+
if (!token.startsWith("@graphInputs.") && !token.startsWith("@context.")) {
|
|
2316
|
+
variables.add(token);
|
|
2317
|
+
}
|
|
2318
|
+
}
|
|
2319
|
+
}
|
|
2209
2320
|
}
|
|
2210
2321
|
return Array.from(variables);
|
|
2211
2322
|
}
|
|
@@ -2223,7 +2334,9 @@ var TextNodeImpl = class extends NodeImpl {
|
|
|
2223
2334
|
width: 300
|
|
2224
2335
|
},
|
|
2225
2336
|
data: {
|
|
2226
|
-
text: "{{input}}"
|
|
2337
|
+
text: "{{input}}",
|
|
2338
|
+
normalizeLineEndings: true
|
|
2339
|
+
// Default to true for better compatibility
|
|
2227
2340
|
}
|
|
2228
2341
|
};
|
|
2229
2342
|
return chartNode;
|
|
@@ -2263,6 +2376,12 @@ var TextNodeImpl = class extends NodeImpl {
|
|
|
2263
2376
|
dataKey: "text",
|
|
2264
2377
|
language: "prompt-interpolation-markdown",
|
|
2265
2378
|
theme: "prompt-interpolation"
|
|
2379
|
+
},
|
|
2380
|
+
{
|
|
2381
|
+
type: "toggle",
|
|
2382
|
+
label: "Normalize Line Endings",
|
|
2383
|
+
dataKey: "normalizeLineEndings",
|
|
2384
|
+
helperMessage: "Normalize line endings to use only LF (\\n) instead of CRLF (\\r\\n)."
|
|
2266
2385
|
}
|
|
2267
2386
|
];
|
|
2268
2387
|
}
|
|
@@ -2275,7 +2394,7 @@ var TextNodeImpl = class extends NodeImpl {
|
|
|
2275
2394
|
text: truncated
|
|
2276
2395
|
};
|
|
2277
2396
|
}
|
|
2278
|
-
async process(inputs) {
|
|
2397
|
+
async process(inputs, context) {
|
|
2279
2398
|
const inputMap = Object.keys(inputs).reduce(
|
|
2280
2399
|
(acc, key) => {
|
|
2281
2400
|
const stringValue = coerceTypeOptional(inputs[key], "string") ?? "";
|
|
@@ -2284,7 +2403,17 @@ var TextNodeImpl = class extends NodeImpl {
|
|
|
2284
2403
|
},
|
|
2285
2404
|
{}
|
|
2286
2405
|
);
|
|
2287
|
-
|
|
2406
|
+
let outputValue = interpolate(
|
|
2407
|
+
this.chartNode.data.text,
|
|
2408
|
+
inputMap,
|
|
2409
|
+
context.graphInputNodeValues,
|
|
2410
|
+
// Pass graph inputs
|
|
2411
|
+
context.contextValues
|
|
2412
|
+
// Pass context values
|
|
2413
|
+
);
|
|
2414
|
+
if (this.chartNode.data.normalizeLineEndings) {
|
|
2415
|
+
outputValue = outputValue.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
|
|
2416
|
+
}
|
|
2288
2417
|
return {
|
|
2289
2418
|
output: {
|
|
2290
2419
|
type: "string",
|
|
@@ -2326,7 +2455,7 @@ __export(openai_exports, {
|
|
|
2326
2455
|
openaiModels: () => openaiModels,
|
|
2327
2456
|
streamChatCompletions: () => streamChatCompletions
|
|
2328
2457
|
});
|
|
2329
|
-
var
|
|
2458
|
+
var import_lodash_es5 = require("lodash");
|
|
2330
2459
|
|
|
2331
2460
|
// src/utils/fetchEventSource.ts
|
|
2332
2461
|
var EventSourceResponse = class extends Response {
|
|
@@ -2629,6 +2758,30 @@ var openaiModels = {
|
|
|
2629
2758
|
},
|
|
2630
2759
|
displayName: "GPT-4o Audio (Preview)"
|
|
2631
2760
|
},
|
|
2761
|
+
"gpt-4.1": {
|
|
2762
|
+
maxTokens: 1047576,
|
|
2763
|
+
cost: {
|
|
2764
|
+
prompt: 2e-6,
|
|
2765
|
+
completion: 8e-6
|
|
2766
|
+
},
|
|
2767
|
+
displayName: "GPT-4.1"
|
|
2768
|
+
},
|
|
2769
|
+
o3: {
|
|
2770
|
+
maxTokens: 2e5,
|
|
2771
|
+
cost: {
|
|
2772
|
+
prompt: 1e-5,
|
|
2773
|
+
completion: 4e-5
|
|
2774
|
+
},
|
|
2775
|
+
displayName: "o3"
|
|
2776
|
+
},
|
|
2777
|
+
"o4-mini": {
|
|
2778
|
+
maxTokens: 2e5,
|
|
2779
|
+
cost: {
|
|
2780
|
+
prompt: 11e-7,
|
|
2781
|
+
completion: 44e-7
|
|
2782
|
+
},
|
|
2783
|
+
displayName: "o4-mini"
|
|
2784
|
+
},
|
|
2632
2785
|
"local-model": {
|
|
2633
2786
|
maxTokens: Number.MAX_SAFE_INTEGER,
|
|
2634
2787
|
cost: {
|
|
@@ -2638,7 +2791,7 @@ var openaiModels = {
|
|
|
2638
2791
|
displayName: "Local Model"
|
|
2639
2792
|
}
|
|
2640
2793
|
};
|
|
2641
|
-
var openAiModelOptions = (0,
|
|
2794
|
+
var openAiModelOptions = (0, import_lodash_es5.orderBy)(
|
|
2642
2795
|
Object.entries(openaiModels).map(([id, { displayName }]) => ({
|
|
2643
2796
|
value: id,
|
|
2644
2797
|
label: displayName
|
|
@@ -3529,7 +3682,7 @@ var ChatNodeBase = {
|
|
|
3529
3682
|
})
|
|
3530
3683
|
);
|
|
3531
3684
|
const { messages } = getChatNodeMessages(inputs);
|
|
3532
|
-
const isReasoningModel = finalModel.startsWith("o1") || finalModel.startsWith("o3");
|
|
3685
|
+
const isReasoningModel = finalModel.startsWith("o1") || finalModel.startsWith("o3") || finalModel.startsWith("o4");
|
|
3533
3686
|
const completionMessages = await Promise.all(
|
|
3534
3687
|
messages.map((message) => chatMessageToOpenAIChatCompletionMessage(message, { isReasoningModel }))
|
|
3535
3688
|
);
|
|
@@ -4079,7 +4232,7 @@ var chatNode = nodeDefinition(ChatNodeImpl, "Chat");
|
|
|
4079
4232
|
|
|
4080
4233
|
// src/model/nodes/PromptNode.ts
|
|
4081
4234
|
var import_non_secure6 = require("nanoid/non-secure");
|
|
4082
|
-
var
|
|
4235
|
+
var import_lodash_es6 = require("lodash");
|
|
4083
4236
|
var import_ts_dedent5 = require("ts-dedent");
|
|
4084
4237
|
var import_ts_pattern4 = require("ts-pattern");
|
|
4085
4238
|
var PromptNodeImpl = class extends NodeImpl {
|
|
@@ -4248,8 +4401,13 @@ var PromptNodeImpl = class extends NodeImpl {
|
|
|
4248
4401
|
};
|
|
4249
4402
|
}
|
|
4250
4403
|
async process(inputs, context) {
|
|
4251
|
-
const inputMap = (0,
|
|
4252
|
-
const outputValue = interpolate(
|
|
4404
|
+
const inputMap = (0, import_lodash_es6.mapValues)(inputs, (input) => coerceType(input, "string"));
|
|
4405
|
+
const outputValue = interpolate(
|
|
4406
|
+
this.chartNode.data.promptText,
|
|
4407
|
+
inputMap,
|
|
4408
|
+
context.graphInputNodeValues,
|
|
4409
|
+
context.contextValues
|
|
4410
|
+
);
|
|
4253
4411
|
const type = getInputOrData(this.data, inputs, "type", "string");
|
|
4254
4412
|
const isCacheBreakpoint = getInputOrData(this.data, inputs, "isCacheBreakpoint", "boolean");
|
|
4255
4413
|
if (["assistant", "system", "user", "function"].includes(type) === false) {
|
|
@@ -4650,13 +4808,19 @@ var CodeNodeImpl = class extends NodeImpl {
|
|
|
4650
4808
|
};
|
|
4651
4809
|
}
|
|
4652
4810
|
async process(inputs, context) {
|
|
4653
|
-
const outputs = await context.codeRunner.runCode(
|
|
4654
|
-
|
|
4655
|
-
|
|
4656
|
-
|
|
4657
|
-
|
|
4658
|
-
|
|
4659
|
-
|
|
4811
|
+
const outputs = await context.codeRunner.runCode(
|
|
4812
|
+
this.data.code,
|
|
4813
|
+
inputs,
|
|
4814
|
+
{
|
|
4815
|
+
includeFetch: this.data.allowFetch ?? false,
|
|
4816
|
+
includeRequire: this.data.allowRequire ?? false,
|
|
4817
|
+
includeRivet: this.data.allowRivet ?? false,
|
|
4818
|
+
includeProcess: this.data.allowProcess ?? false,
|
|
4819
|
+
includeConsole: this.data.allowConsole ?? false
|
|
4820
|
+
},
|
|
4821
|
+
context.graphInputNodeValues,
|
|
4822
|
+
context.contextValues
|
|
4823
|
+
);
|
|
4660
4824
|
if (outputs == null || typeof outputs !== "object" || "then" in outputs && typeof outputs.then === "function") {
|
|
4661
4825
|
throw new Error("Code node must return an object with output values.");
|
|
4662
4826
|
}
|
|
@@ -5820,6 +5984,7 @@ var GraphInputNodeImpl = class extends NodeImpl {
|
|
|
5820
5984
|
type: this.data.dataType,
|
|
5821
5985
|
value: inputValue
|
|
5822
5986
|
};
|
|
5987
|
+
context.graphInputNodeValues[this.data.id] = value;
|
|
5823
5988
|
return { ["data"]: value };
|
|
5824
5989
|
}
|
|
5825
5990
|
};
|
|
@@ -6104,7 +6269,7 @@ var subGraphNode = nodeDefinition(SubGraphNodeImpl, "Subgraph");
|
|
|
6104
6269
|
|
|
6105
6270
|
// src/model/nodes/ArrayNode.ts
|
|
6106
6271
|
var import_non_secure19 = require("nanoid/non-secure");
|
|
6107
|
-
var
|
|
6272
|
+
var import_lodash_es7 = require("lodash");
|
|
6108
6273
|
var import_ts_dedent18 = require("ts-dedent");
|
|
6109
6274
|
var ArrayNodeImpl = class extends NodeImpl {
|
|
6110
6275
|
static create() {
|
|
@@ -6210,7 +6375,7 @@ var ArrayNodeImpl = class extends NodeImpl {
|
|
|
6210
6375
|
if (Array.isArray(input == null ? void 0 : input.value)) {
|
|
6211
6376
|
for (const value of (input == null ? void 0 : input.value) ?? []) {
|
|
6212
6377
|
if (this.data.flattenDeep) {
|
|
6213
|
-
outputArray.push(...Array.isArray(value) ? (0,
|
|
6378
|
+
outputArray.push(...Array.isArray(value) ? (0, import_lodash_es7.flattenDeep)(value) : [value]);
|
|
6214
6379
|
} else {
|
|
6215
6380
|
outputArray.push(value);
|
|
6216
6381
|
}
|
|
@@ -6350,7 +6515,7 @@ var extractJsonNode = nodeDefinition(ExtractJsonNodeImpl, "Extract JSON");
|
|
|
6350
6515
|
|
|
6351
6516
|
// src/model/nodes/AssemblePromptNode.ts
|
|
6352
6517
|
var import_non_secure21 = require("nanoid/non-secure");
|
|
6353
|
-
var
|
|
6518
|
+
var import_lodash_es8 = require("lodash");
|
|
6354
6519
|
var import_ts_dedent20 = require("ts-dedent");
|
|
6355
6520
|
var AssemblePromptNodeImpl = class extends NodeImpl {
|
|
6356
6521
|
static create() {
|
|
@@ -6457,7 +6622,7 @@ var AssemblePromptNodeImpl = class extends NodeImpl {
|
|
|
6457
6622
|
const output = {};
|
|
6458
6623
|
const isLastMessageCacheBreakpoint = getInputOrData(this.data, inputs, "isLastMessageCacheBreakpoint", "boolean");
|
|
6459
6624
|
const outMessages = [];
|
|
6460
|
-
const inputMessages = (0,
|
|
6625
|
+
const inputMessages = (0, import_lodash_es8.orderBy)(
|
|
6461
6626
|
Object.entries(inputs).filter(([key]) => key.startsWith("message")),
|
|
6462
6627
|
([key]) => key,
|
|
6463
6628
|
"asc"
|
|
@@ -7011,7 +7176,7 @@ var trimChatMessagesNode = nodeDefinition(TrimChatMessagesNodeImpl, "Trim Chat M
|
|
|
7011
7176
|
|
|
7012
7177
|
// src/model/nodes/ExternalCallNode.ts
|
|
7013
7178
|
var import_non_secure25 = require("nanoid/non-secure");
|
|
7014
|
-
var
|
|
7179
|
+
var import_lodash_es9 = require("lodash");
|
|
7015
7180
|
var import_ts_dedent24 = require("ts-dedent");
|
|
7016
7181
|
var ExternalCallNodeImpl = class extends NodeImpl {
|
|
7017
7182
|
static create() {
|
|
@@ -7106,7 +7271,7 @@ var ExternalCallNodeImpl = class extends NodeImpl {
|
|
|
7106
7271
|
}
|
|
7107
7272
|
}
|
|
7108
7273
|
const fn = context.externalFunctions[functionName];
|
|
7109
|
-
const externalContext = (0,
|
|
7274
|
+
const externalContext = (0, import_lodash_es9.omit)(context, ["setGlobal"]);
|
|
7110
7275
|
if (!fn) {
|
|
7111
7276
|
if (this.data.useErrorOutput) {
|
|
7112
7277
|
return {
|
|
@@ -9110,7 +9275,7 @@ var joinNode = nodeDefinition(JoinNodeImpl, "Coalesce");
|
|
|
9110
9275
|
|
|
9111
9276
|
// src/model/nodes/FilterNode.ts
|
|
9112
9277
|
var import_non_secure45 = require("nanoid/non-secure");
|
|
9113
|
-
var
|
|
9278
|
+
var import_lodash_es10 = require("lodash");
|
|
9114
9279
|
var import_ts_dedent44 = require("ts-dedent");
|
|
9115
9280
|
var FilterNodeImpl = class extends NodeImpl {
|
|
9116
9281
|
static create() {
|
|
@@ -9166,7 +9331,7 @@ var FilterNodeImpl = class extends NodeImpl {
|
|
|
9166
9331
|
var _a;
|
|
9167
9332
|
const array = coerceType(inputs["array"], "any[]");
|
|
9168
9333
|
const include = coerceType(inputs["include"], "boolean[]");
|
|
9169
|
-
const zipped = (0,
|
|
9334
|
+
const zipped = (0, import_lodash_es10.zip)(array, include);
|
|
9170
9335
|
const filtered = zipped.filter(([_, include2]) => include2).map(([value, _]) => value);
|
|
9171
9336
|
return {
|
|
9172
9337
|
["filtered"]: {
|
|
@@ -9202,16 +9367,18 @@ var ObjectNodeImpl = class extends NodeImpl {
|
|
|
9202
9367
|
return chartNode;
|
|
9203
9368
|
}
|
|
9204
9369
|
getInputDefinitions() {
|
|
9205
|
-
const
|
|
9206
|
-
|
|
9370
|
+
const jsonTemplate = this.chartNode.data.jsonTemplate ?? "";
|
|
9371
|
+
const matches = jsonTemplate.match(/\{\{([^}]+?)\}\}/g);
|
|
9372
|
+
const allTokens = matches ?? [];
|
|
9373
|
+
const inputTokens = allTokens.map((token) => token.slice(2, -2).trim()).filter((tokenContent) => !tokenContent.startsWith("@graphInputs.") && !tokenContent.startsWith("@context.")).filter((token) => token !== "");
|
|
9374
|
+
return [...new Set(inputTokens)].map((inputName) => {
|
|
9207
9375
|
return {
|
|
9208
|
-
|
|
9209
|
-
|
|
9210
|
-
title: inputName.slice(2, -2),
|
|
9376
|
+
id: inputName,
|
|
9377
|
+
title: inputName,
|
|
9211
9378
|
dataType: "any",
|
|
9212
9379
|
required: false
|
|
9213
9380
|
};
|
|
9214
|
-
})
|
|
9381
|
+
});
|
|
9215
9382
|
}
|
|
9216
9383
|
getOutputDefinitions() {
|
|
9217
9384
|
return [
|
|
@@ -9252,10 +9419,28 @@ var ObjectNodeImpl = class extends NodeImpl {
|
|
|
9252
9419
|
group: ["Objects"]
|
|
9253
9420
|
};
|
|
9254
9421
|
}
|
|
9255
|
-
interpolate(baseString, values2) {
|
|
9256
|
-
return baseString.replace(/("?)\{\{([^}]
|
|
9422
|
+
interpolate(baseString, values2, graphInputNodeValues, contextValues) {
|
|
9423
|
+
return baseString.replace(/("?)\{\{([^}]+?)\}\}("?)/g, (_m, openQuote, key, _closeQuote) => {
|
|
9257
9424
|
const isQuoted = Boolean(openQuote);
|
|
9258
|
-
const
|
|
9425
|
+
const trimmedKey = key.trim();
|
|
9426
|
+
let value;
|
|
9427
|
+
const graphInputPrefix = "@graphInputs.";
|
|
9428
|
+
const contextPrefix = "@context.";
|
|
9429
|
+
if (trimmedKey.startsWith(graphInputPrefix) && graphInputNodeValues) {
|
|
9430
|
+
value = resolveExpressionRawValue(
|
|
9431
|
+
graphInputNodeValues,
|
|
9432
|
+
trimmedKey.substring(graphInputPrefix.length),
|
|
9433
|
+
"graphInputs"
|
|
9434
|
+
);
|
|
9435
|
+
} else if (trimmedKey.startsWith(contextPrefix) && contextValues) {
|
|
9436
|
+
value = resolveExpressionRawValue(
|
|
9437
|
+
contextValues,
|
|
9438
|
+
trimmedKey.substring(contextPrefix.length),
|
|
9439
|
+
"context"
|
|
9440
|
+
);
|
|
9441
|
+
} else {
|
|
9442
|
+
value = values2[trimmedKey];
|
|
9443
|
+
}
|
|
9259
9444
|
if (value == null) {
|
|
9260
9445
|
return "null";
|
|
9261
9446
|
}
|
|
@@ -9268,27 +9453,32 @@ var ObjectNodeImpl = class extends NodeImpl {
|
|
|
9268
9453
|
return JSON.stringify(value);
|
|
9269
9454
|
});
|
|
9270
9455
|
}
|
|
9271
|
-
async process(inputs) {
|
|
9456
|
+
async process(inputs, context) {
|
|
9272
9457
|
const inputMap = Object.keys(inputs).reduce(
|
|
9273
9458
|
(acc, key) => {
|
|
9274
|
-
|
|
9275
|
-
acc[key] = (_a = inputs[key]) == null ? void 0 : _a.value;
|
|
9459
|
+
acc[key] = unwrapPotentialDataValue(inputs[key]);
|
|
9276
9460
|
return acc;
|
|
9277
9461
|
},
|
|
9278
9462
|
{}
|
|
9279
9463
|
);
|
|
9280
|
-
const
|
|
9281
|
-
|
|
9282
|
-
|
|
9283
|
-
|
|
9284
|
-
|
|
9285
|
-
|
|
9286
|
-
|
|
9287
|
-
|
|
9464
|
+
const interpolatedString = this.interpolate(
|
|
9465
|
+
this.chartNode.data.jsonTemplate,
|
|
9466
|
+
inputMap,
|
|
9467
|
+
context.graphInputNodeValues,
|
|
9468
|
+
// Pass graph inputs
|
|
9469
|
+
context.contextValues
|
|
9470
|
+
// Pass context values
|
|
9471
|
+
);
|
|
9472
|
+
let outputValue;
|
|
9473
|
+
try {
|
|
9474
|
+
outputValue = JSON.parse(interpolatedString);
|
|
9475
|
+
} catch (err) {
|
|
9476
|
+
throw new Error(`Failed to parse JSON template: ${err.message}`);
|
|
9288
9477
|
}
|
|
9478
|
+
const outputType = Array.isArray(outputValue) ? "object[]" : "object";
|
|
9289
9479
|
return {
|
|
9290
9480
|
output: {
|
|
9291
|
-
type:
|
|
9481
|
+
type: outputType,
|
|
9292
9482
|
value: outputValue
|
|
9293
9483
|
}
|
|
9294
9484
|
};
|
|
@@ -9364,7 +9554,7 @@ var booleanNode = nodeDefinition(BooleanNodeImpl, "Boolean");
|
|
|
9364
9554
|
|
|
9365
9555
|
// src/model/nodes/CompareNode.ts
|
|
9366
9556
|
var import_non_secure48 = require("nanoid/non-secure");
|
|
9367
|
-
var
|
|
9557
|
+
var import_lodash_es11 = require("lodash");
|
|
9368
9558
|
var import_ts_pattern6 = require("ts-pattern");
|
|
9369
9559
|
var import_ts_dedent47 = require("ts-dedent");
|
|
9370
9560
|
var CompareNodeImpl = class extends NodeImpl {
|
|
@@ -9471,7 +9661,7 @@ var CompareNodeImpl = class extends NodeImpl {
|
|
|
9471
9661
|
return {
|
|
9472
9662
|
["output"]: {
|
|
9473
9663
|
type: "boolean",
|
|
9474
|
-
value: (0, import_ts_pattern6.match)(comparisonFunction).with("==", () => (0,
|
|
9664
|
+
value: (0, import_ts_pattern6.match)(comparisonFunction).with("==", () => (0, import_lodash_es11.isEqual)(value1, value2)).with("!=", () => !(0, import_lodash_es11.isEqual)(value1, value2)).with("<", () => value1 < value2).with(">", () => value1 > value2).with("<=", () => value1 <= value2).with(">=", () => value1 >= value2).with("and", () => !!(value1 && value2)).with("or", () => !!(value1 || value2)).with("xor", () => !!(value1 ? !value2 : value2)).with("nand", () => !(value1 && value2)).with("nor", () => !(value1 || value2)).with("xnor", () => !(value1 ? !value2 : value2)).exhaustive()
|
|
9475
9665
|
}
|
|
9476
9666
|
};
|
|
9477
9667
|
}
|
|
@@ -9780,7 +9970,7 @@ var randomNumberNode = nodeDefinition(RandomNumberNodeImpl, "Random Number");
|
|
|
9780
9970
|
|
|
9781
9971
|
// src/model/nodes/ShuffleNode.ts
|
|
9782
9972
|
var import_non_secure52 = require("nanoid/non-secure");
|
|
9783
|
-
var
|
|
9973
|
+
var import_lodash_es12 = require("lodash");
|
|
9784
9974
|
var import_ts_dedent51 = require("ts-dedent");
|
|
9785
9975
|
var ShuffleNodeImpl = class extends NodeImpl {
|
|
9786
9976
|
static create() {
|
|
@@ -9829,7 +10019,7 @@ var ShuffleNodeImpl = class extends NodeImpl {
|
|
|
9829
10019
|
var _a;
|
|
9830
10020
|
const input = inputs["array"];
|
|
9831
10021
|
const items = input ? isArrayDataValue(input) ? input.value : [input.value] : [];
|
|
9832
|
-
const shuffled = (0,
|
|
10022
|
+
const shuffled = (0, import_lodash_es12.shuffle)(items);
|
|
9833
10023
|
return {
|
|
9834
10024
|
["shuffled"]: {
|
|
9835
10025
|
type: ((_a = inputs["array"]) == null ? void 0 : _a.type) ?? "any[]",
|
|
@@ -10871,7 +11061,7 @@ var GetAllDatasetsNodeImpl = class extends NodeImpl {
|
|
|
10871
11061
|
getEditors() {
|
|
10872
11062
|
return [];
|
|
10873
11063
|
}
|
|
10874
|
-
async process(context) {
|
|
11064
|
+
async process(_inputs, context) {
|
|
10875
11065
|
const { datasetProvider } = context;
|
|
10876
11066
|
if (datasetProvider == null) {
|
|
10877
11067
|
throw new Error("datasetProvider is required");
|
|
@@ -11374,7 +11564,7 @@ var extractMarkdownCodeBlocksNode = nodeDefinition(
|
|
|
11374
11564
|
|
|
11375
11565
|
// src/model/nodes/AssembleMessageNode.ts
|
|
11376
11566
|
var import_non_secure62 = require("nanoid/non-secure");
|
|
11377
|
-
var
|
|
11567
|
+
var import_lodash_es13 = require("lodash");
|
|
11378
11568
|
var import_ts_pattern8 = require("ts-pattern");
|
|
11379
11569
|
var messageTypeToTitle = {
|
|
11380
11570
|
assistant: "Assistant",
|
|
@@ -11531,7 +11721,7 @@ var AssembleMessageNodeImpl = class extends NodeImpl {
|
|
|
11531
11721
|
).otherwise(() => {
|
|
11532
11722
|
throw new Error(`Invalid type: ${type}`);
|
|
11533
11723
|
});
|
|
11534
|
-
const inputParts = (0,
|
|
11724
|
+
const inputParts = (0, import_lodash_es13.orderBy)(
|
|
11535
11725
|
Object.entries(inputs).filter(([key]) => key.startsWith("part")),
|
|
11536
11726
|
([key]) => key,
|
|
11537
11727
|
"asc"
|
|
@@ -12042,9 +12232,9 @@ var import_non_secure67 = require("nanoid/non-secure");
|
|
|
12042
12232
|
var import_ts_dedent61 = require("ts-dedent");
|
|
12043
12233
|
|
|
12044
12234
|
// src/api/looseDataValue.ts
|
|
12045
|
-
var
|
|
12235
|
+
var import_lodash_es14 = require("lodash");
|
|
12046
12236
|
function looseDataValuesToDataValues(values2) {
|
|
12047
|
-
return (0,
|
|
12237
|
+
return (0, import_lodash_es14.mapValues)(values2, (val) => looseDataValueToDataValue(val));
|
|
12048
12238
|
}
|
|
12049
12239
|
function looseDataValueToDataValue(value) {
|
|
12050
12240
|
if (typeof value === "string") {
|
|
@@ -13278,7 +13468,7 @@ var cronNode = nodeDefinition(CronNodeImpl, "Cron");
|
|
|
13278
13468
|
// src/model/nodes/ToTreeNode.ts
|
|
13279
13469
|
var import_non_secure73 = require("nanoid/non-secure");
|
|
13280
13470
|
var import_ts_dedent67 = require("ts-dedent");
|
|
13281
|
-
var
|
|
13471
|
+
var import_lodash_es15 = require("lodash");
|
|
13282
13472
|
var ToTreeNodeImpl = class extends NodeImpl {
|
|
13283
13473
|
static create() {
|
|
13284
13474
|
const chartNode = {
|
|
@@ -13364,7 +13554,7 @@ var ToTreeNodeImpl = class extends NodeImpl {
|
|
|
13364
13554
|
if (!Array.isArray(objects) || objects.length === 0)
|
|
13365
13555
|
return "";
|
|
13366
13556
|
let result = "";
|
|
13367
|
-
const sortedObjects = this.data.useSortAlphabetically ? (0,
|
|
13557
|
+
const sortedObjects = this.data.useSortAlphabetically ? (0, import_lodash_es15.sortBy)(objects, (obj) => String((0, import_lodash_es15.get)(obj, "path", ""))) : objects;
|
|
13368
13558
|
sortedObjects.forEach((obj, index) => {
|
|
13369
13559
|
const isLastItem = index === sortedObjects.length - 1;
|
|
13370
13560
|
const prefix = level === 0 ? "" : isLast ? "\u2514\u2500\u2500 " : "\u251C\u2500\u2500 ";
|
|
@@ -13373,14 +13563,14 @@ var ToTreeNodeImpl = class extends NodeImpl {
|
|
|
13373
13563
|
const interpolationVars = matches.reduce(
|
|
13374
13564
|
(acc, match13) => {
|
|
13375
13565
|
const key = match13;
|
|
13376
|
-
acc[key] = String((0,
|
|
13566
|
+
acc[key] = String((0, import_lodash_es15.get)(obj, key, ""));
|
|
13377
13567
|
return acc;
|
|
13378
13568
|
},
|
|
13379
13569
|
{}
|
|
13380
13570
|
);
|
|
13381
13571
|
const formattedNode = interpolate(this.data.format, interpolationVars);
|
|
13382
13572
|
result += indent + prefix + formattedNode + "\n";
|
|
13383
|
-
const children = (0,
|
|
13573
|
+
const children = (0, import_lodash_es15.get)(obj, this.data.childrenProperty);
|
|
13384
13574
|
if (Array.isArray(children) && children.length > 0) {
|
|
13385
13575
|
const newPath = parentPath ? `${parentPath}/${formattedNode}` : formattedNode;
|
|
13386
13576
|
result += this.buildTree(children, newPath, level + 1, isLastItem);
|
|
@@ -13607,113 +13797,865 @@ until ${condition}${maxIterations}`;
|
|
|
13607
13797
|
};
|
|
13608
13798
|
var loopUntilNode = nodeDefinition(LoopUntilNodeImpl, "Loop Until");
|
|
13609
13799
|
|
|
13610
|
-
// src/model/nodes/
|
|
13611
|
-
var
|
|
13612
|
-
|
|
13613
|
-
|
|
13800
|
+
// src/model/nodes/MCPDiscoveryNode.ts
|
|
13801
|
+
var import_nanoid3 = require("nanoid");
|
|
13802
|
+
|
|
13803
|
+
// src/integrations/mcp/MCPProvider.ts
|
|
13804
|
+
var MCPErrorType = /* @__PURE__ */ ((MCPErrorType2) => {
|
|
13805
|
+
MCPErrorType2["CONFIG_NOT_FOUND"] = "CONFIG_NOT_FOUND";
|
|
13806
|
+
MCPErrorType2["SERVER_NOT_FOUND"] = "SERVER_NOT_FOUND";
|
|
13807
|
+
MCPErrorType2["SERVER_COMMUNICATION_FAILED"] = "SERVER_COMMUNICATION_FAILED";
|
|
13808
|
+
MCPErrorType2["INVALID_SCHEMA"] = "INVALID_SCHEMA";
|
|
13809
|
+
return MCPErrorType2;
|
|
13810
|
+
})(MCPErrorType || {});
|
|
13811
|
+
var MCPError = class extends Error {
|
|
13812
|
+
constructor(type, message, details) {
|
|
13813
|
+
super(message);
|
|
13814
|
+
this.type = type;
|
|
13815
|
+
this.details = details;
|
|
13816
|
+
this.name = "Error";
|
|
13817
|
+
}
|
|
13818
|
+
};
|
|
13819
|
+
|
|
13820
|
+
// src/integrations/mcp/MCPUtils.ts
|
|
13821
|
+
var loadMCPConfiguration = async (context) => {
|
|
13822
|
+
if (context.executor !== "nodejs") {
|
|
13823
|
+
throw new MCPError("CONFIG_NOT_FOUND" /* CONFIG_NOT_FOUND */, "MCP config is not supported in browser environment");
|
|
13824
|
+
}
|
|
13825
|
+
const mcpConfig = context.project.metadata.mcpServer;
|
|
13826
|
+
if (!mcpConfig || mcpConfig.mcpServers == null) {
|
|
13827
|
+
throw new MCPError("CONFIG_NOT_FOUND" /* CONFIG_NOT_FOUND */, "MCP configuration not defined in Project tab");
|
|
13828
|
+
}
|
|
13829
|
+
return mcpConfig;
|
|
13830
|
+
};
|
|
13831
|
+
var getServerOptions = async (context) => {
|
|
13832
|
+
if (context.executor === "nodejs" && context.nativeApi) {
|
|
13833
|
+
try {
|
|
13834
|
+
const config = await loadMCPConfiguration(context);
|
|
13835
|
+
return Object.entries(config.mcpServers).filter(([, config2]) => !config2.disabled).map(([id]) => ({
|
|
13836
|
+
label: id,
|
|
13837
|
+
value: id
|
|
13838
|
+
}));
|
|
13839
|
+
} catch {
|
|
13840
|
+
}
|
|
13841
|
+
}
|
|
13842
|
+
return [];
|
|
13843
|
+
};
|
|
13844
|
+
var getServerHelperMessage = (context, optionsLength) => {
|
|
13845
|
+
if (optionsLength > 0)
|
|
13846
|
+
return "Select an MCP server from local configuration located in the Project tab";
|
|
13847
|
+
if (context.executor !== "nodejs")
|
|
13848
|
+
return "MCP nodes require Node Executor";
|
|
13849
|
+
return "No MCP servers found in config";
|
|
13850
|
+
};
|
|
13851
|
+
|
|
13852
|
+
// src/integrations/mcp/MCPBase.ts
|
|
13853
|
+
var getMCPBaseInputs = (data) => {
|
|
13854
|
+
const inputs = [];
|
|
13855
|
+
if (data.useNameInput) {
|
|
13856
|
+
inputs.push({
|
|
13857
|
+
dataType: "string",
|
|
13858
|
+
id: "name",
|
|
13859
|
+
title: "Name"
|
|
13860
|
+
});
|
|
13861
|
+
}
|
|
13862
|
+
if (data.useVersionInput) {
|
|
13863
|
+
inputs.push({
|
|
13864
|
+
dataType: "string",
|
|
13865
|
+
id: "version",
|
|
13866
|
+
title: "Version"
|
|
13867
|
+
});
|
|
13868
|
+
}
|
|
13869
|
+
if (data.transportType === "http" && data.useServerUrlInput) {
|
|
13870
|
+
inputs.push({
|
|
13871
|
+
dataType: "string",
|
|
13872
|
+
id: "serverUrl",
|
|
13873
|
+
title: "Server URL",
|
|
13874
|
+
description: "The endpoint URL for the MCP server"
|
|
13875
|
+
});
|
|
13876
|
+
}
|
|
13877
|
+
return inputs;
|
|
13878
|
+
};
|
|
13879
|
+
|
|
13880
|
+
// src/model/nodes/MCPDiscoveryNode.ts
|
|
13881
|
+
var MCPDiscoveryNodeImpl = class extends NodeImpl {
|
|
13614
13882
|
static create() {
|
|
13615
13883
|
const chartNode = {
|
|
13616
|
-
type: "
|
|
13617
|
-
title: "",
|
|
13618
|
-
|
|
13619
|
-
id: (0, import_non_secure75.nanoid)(),
|
|
13884
|
+
type: "mcpDiscovery",
|
|
13885
|
+
title: "MCP Discovery",
|
|
13886
|
+
id: (0, import_nanoid3.nanoid)(),
|
|
13620
13887
|
visualData: {
|
|
13621
13888
|
x: 0,
|
|
13622
13889
|
y: 0,
|
|
13623
|
-
width:
|
|
13890
|
+
width: 250
|
|
13624
13891
|
},
|
|
13625
13892
|
data: {
|
|
13626
|
-
|
|
13627
|
-
|
|
13628
|
-
|
|
13629
|
-
|
|
13630
|
-
|
|
13893
|
+
name: "mcp-client",
|
|
13894
|
+
version: "1.0.0",
|
|
13895
|
+
transportType: "stdio",
|
|
13896
|
+
serverUrl: "http://localhost:8080/mcp",
|
|
13897
|
+
serverId: "",
|
|
13898
|
+
useNameInput: false,
|
|
13899
|
+
useVersionInput: false,
|
|
13900
|
+
useToolsOutput: true,
|
|
13901
|
+
usePromptsOutput: true
|
|
13631
13902
|
}
|
|
13632
13903
|
};
|
|
13633
13904
|
return chartNode;
|
|
13634
13905
|
}
|
|
13635
|
-
getInputDefinitions(
|
|
13636
|
-
const
|
|
13637
|
-
|
|
13638
|
-
return [];
|
|
13639
|
-
}
|
|
13640
|
-
const graph = referencedProject.graphs[this.data.graphId];
|
|
13641
|
-
if (!graph) {
|
|
13642
|
-
return [];
|
|
13643
|
-
}
|
|
13644
|
-
const inputNodes = graph.nodes.filter((node) => node.type === "graphInput");
|
|
13645
|
-
const inputIds = [...new Set(inputNodes.map((node) => node.data.id))].sort();
|
|
13646
|
-
return inputIds.map(
|
|
13647
|
-
(id) => ({
|
|
13648
|
-
id,
|
|
13649
|
-
title: id,
|
|
13650
|
-
dataType: inputNodes.find((node) => node.data.id === id).data.dataType
|
|
13651
|
-
})
|
|
13652
|
-
);
|
|
13653
|
-
}
|
|
13654
|
-
getGraphOutputs(referencedProject) {
|
|
13655
|
-
const graph = referencedProject.graphs[this.data.graphId];
|
|
13656
|
-
if (!graph) {
|
|
13657
|
-
return [];
|
|
13658
|
-
}
|
|
13659
|
-
const outputNodes = graph.nodes.filter((node) => node.type === "graphOutput");
|
|
13660
|
-
const outputIds = [...new Set(outputNodes.map((node) => node.data.id))].sort();
|
|
13661
|
-
const outputs = outputIds.map(
|
|
13662
|
-
(id) => ({
|
|
13663
|
-
id,
|
|
13664
|
-
title: id,
|
|
13665
|
-
dataType: outputNodes.find((node) => node.data.id === id).data.dataType
|
|
13666
|
-
})
|
|
13667
|
-
);
|
|
13668
|
-
return outputs;
|
|
13906
|
+
getInputDefinitions() {
|
|
13907
|
+
const inputs = getMCPBaseInputs(this.data);
|
|
13908
|
+
return inputs;
|
|
13669
13909
|
}
|
|
13670
|
-
getOutputDefinitions(
|
|
13671
|
-
const
|
|
13672
|
-
|
|
13673
|
-
|
|
13674
|
-
|
|
13910
|
+
getOutputDefinitions() {
|
|
13911
|
+
const outputDefinitions = [];
|
|
13912
|
+
if (this.data.useToolsOutput) {
|
|
13913
|
+
outputDefinitions.push({
|
|
13914
|
+
id: "tools",
|
|
13915
|
+
title: "Tools",
|
|
13916
|
+
dataType: "object[]",
|
|
13917
|
+
description: "Tools returned from the MCP server"
|
|
13918
|
+
});
|
|
13675
13919
|
}
|
|
13676
|
-
|
|
13677
|
-
|
|
13678
|
-
|
|
13679
|
-
|
|
13680
|
-
|
|
13681
|
-
|
|
13920
|
+
if (this.data.usePromptsOutput) {
|
|
13921
|
+
outputDefinitions.push({
|
|
13922
|
+
id: "prompts",
|
|
13923
|
+
title: "Prompts",
|
|
13924
|
+
dataType: "object[]",
|
|
13925
|
+
description: "Prompts returned from the MCP server"
|
|
13682
13926
|
});
|
|
13683
13927
|
}
|
|
13684
|
-
return
|
|
13928
|
+
return outputDefinitions;
|
|
13685
13929
|
}
|
|
13686
|
-
getEditors(context) {
|
|
13687
|
-
const
|
|
13930
|
+
async getEditors(context) {
|
|
13931
|
+
const editors = [
|
|
13688
13932
|
{
|
|
13689
13933
|
type: "toggle",
|
|
13690
|
-
label: "
|
|
13691
|
-
dataKey: "
|
|
13934
|
+
label: "Output Tools",
|
|
13935
|
+
dataKey: "useToolsOutput",
|
|
13936
|
+
helperMessage: "Toggle on if you want to get a Tools output"
|
|
13692
13937
|
},
|
|
13693
13938
|
{
|
|
13694
13939
|
type: "toggle",
|
|
13695
|
-
label: "Output
|
|
13696
|
-
dataKey: "
|
|
13940
|
+
label: "Output Prompts",
|
|
13941
|
+
dataKey: "usePromptsOutput",
|
|
13942
|
+
helperMessage: "Toggle on if you want to get a Prompts output"
|
|
13943
|
+
},
|
|
13944
|
+
{
|
|
13945
|
+
type: "string",
|
|
13946
|
+
label: "Name",
|
|
13947
|
+
dataKey: "name",
|
|
13948
|
+
useInputToggleDataKey: "useNameInput",
|
|
13949
|
+
helperMessage: "The name for the MCP Client"
|
|
13950
|
+
},
|
|
13951
|
+
{
|
|
13952
|
+
type: "string",
|
|
13953
|
+
label: "Version",
|
|
13954
|
+
dataKey: "version",
|
|
13955
|
+
useInputToggleDataKey: "useVersionInput",
|
|
13956
|
+
helperMessage: "A version for the MCP Client"
|
|
13957
|
+
},
|
|
13958
|
+
{
|
|
13959
|
+
type: "dropdown",
|
|
13960
|
+
label: "Transport Type",
|
|
13961
|
+
dataKey: "transportType",
|
|
13962
|
+
options: [
|
|
13963
|
+
{ label: "HTTP", value: "http" },
|
|
13964
|
+
{ label: "STDIO", value: "stdio" }
|
|
13965
|
+
]
|
|
13697
13966
|
}
|
|
13698
13967
|
];
|
|
13699
|
-
|
|
13700
|
-
|
|
13701
|
-
|
|
13702
|
-
|
|
13703
|
-
|
|
13704
|
-
|
|
13705
|
-
for
|
|
13706
|
-
|
|
13707
|
-
|
|
13708
|
-
|
|
13709
|
-
|
|
13710
|
-
|
|
13711
|
-
|
|
13712
|
-
|
|
13713
|
-
|
|
13714
|
-
|
|
13715
|
-
|
|
13716
|
-
|
|
13968
|
+
if (this.data.transportType === "http") {
|
|
13969
|
+
editors.push({
|
|
13970
|
+
type: "string",
|
|
13971
|
+
label: "Server URL",
|
|
13972
|
+
dataKey: "serverUrl",
|
|
13973
|
+
useInputToggleDataKey: "useServerUrlInput",
|
|
13974
|
+
helperMessage: "The base URL endpoint for the MCP server with `/mcp`"
|
|
13975
|
+
});
|
|
13976
|
+
} else if (this.data.transportType === "stdio") {
|
|
13977
|
+
const serverOptions = await getServerOptions(context);
|
|
13978
|
+
editors.push({
|
|
13979
|
+
type: "dropdown",
|
|
13980
|
+
label: "Server ID",
|
|
13981
|
+
dataKey: "serverId",
|
|
13982
|
+
helperMessage: getServerHelperMessage(context, serverOptions.length),
|
|
13983
|
+
options: serverOptions
|
|
13984
|
+
});
|
|
13985
|
+
}
|
|
13986
|
+
return editors;
|
|
13987
|
+
}
|
|
13988
|
+
getBody(context) {
|
|
13989
|
+
let base;
|
|
13990
|
+
if (this.data.transportType === "http") {
|
|
13991
|
+
base = this.data.useServerUrlInput ? "(Using Server URL Input)" : this.data.serverUrl;
|
|
13992
|
+
} else {
|
|
13993
|
+
base = `Server ID: ${this.data.serverId || "(None)"}`;
|
|
13994
|
+
}
|
|
13995
|
+
const namePart = `Name: ${this.data.name}`;
|
|
13996
|
+
const versionPart = `Version: ${this.data.version}`;
|
|
13997
|
+
const parts = [namePart, versionPart, base];
|
|
13998
|
+
if (context.executor !== "nodejs") {
|
|
13999
|
+
parts.push("(Requires Node Executor)");
|
|
14000
|
+
}
|
|
14001
|
+
return parts.join("\n");
|
|
14002
|
+
}
|
|
14003
|
+
static getUIData() {
|
|
14004
|
+
return {
|
|
14005
|
+
infoBoxBody: import_ts_dedent.dedent`
|
|
14006
|
+
Connects to an MCP (Model Context Protocol) server to discover capabilities like tools and prompts.
|
|
14007
|
+
`,
|
|
14008
|
+
infoBoxTitle: "MCP Discovery Node",
|
|
14009
|
+
contextMenuTitle: "MCP Discovery",
|
|
14010
|
+
group: ["MCP"]
|
|
14011
|
+
};
|
|
14012
|
+
}
|
|
14013
|
+
async process(inputs, context) {
|
|
14014
|
+
const name = getInputOrData(this.data, inputs, "name", "string");
|
|
14015
|
+
const version = getInputOrData(this.data, inputs, "version", "string");
|
|
14016
|
+
const transportType = getInputOrData(this.data, inputs, "transportType", "string");
|
|
14017
|
+
let tools = [];
|
|
14018
|
+
let prompts = [];
|
|
14019
|
+
try {
|
|
14020
|
+
if (!context.mcpProvider) {
|
|
14021
|
+
throw new Error("MCP Provider not found");
|
|
14022
|
+
}
|
|
14023
|
+
if (transportType === "http") {
|
|
14024
|
+
const serverUrl = getInputOrData(this.data, inputs, "serverUrl", "string");
|
|
14025
|
+
if (!serverUrl || serverUrl === "") {
|
|
14026
|
+
throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, "No server URL was provided");
|
|
14027
|
+
}
|
|
14028
|
+
if (!serverUrl.includes("/mcp")) {
|
|
14029
|
+
throw new MCPError(
|
|
14030
|
+
"SERVER_COMMUNICATION_FAILED" /* SERVER_COMMUNICATION_FAILED */,
|
|
14031
|
+
"Include /mcp in your server URL. For example: http://localhost:8080/mcp"
|
|
14032
|
+
);
|
|
14033
|
+
}
|
|
14034
|
+
tools = await context.mcpProvider.getHTTPTools({ name, version }, serverUrl);
|
|
14035
|
+
prompts = await context.mcpProvider.getHTTPrompts({ name, version }, serverUrl);
|
|
14036
|
+
} else if (transportType === "stdio") {
|
|
14037
|
+
const serverId = this.data.serverId ?? "";
|
|
14038
|
+
const mcpConfig = await loadMCPConfiguration(context);
|
|
14039
|
+
if (!mcpConfig.mcpServers[serverId]) {
|
|
14040
|
+
throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, `Server ${serverId} not found in MCP config`);
|
|
14041
|
+
}
|
|
14042
|
+
const serverConfig = {
|
|
14043
|
+
config: mcpConfig.mcpServers[serverId],
|
|
14044
|
+
serverId
|
|
14045
|
+
};
|
|
14046
|
+
tools = await context.mcpProvider.getStdioTools({ name, version }, serverConfig);
|
|
14047
|
+
prompts = await context.mcpProvider.getStdioPrompts({ name, version }, serverConfig);
|
|
14048
|
+
}
|
|
14049
|
+
const output = {};
|
|
14050
|
+
const gptFunctions = tools.map((tool) => ({
|
|
14051
|
+
name: tool.name,
|
|
14052
|
+
description: tool.description ?? "",
|
|
14053
|
+
parameters: tool.inputSchema,
|
|
14054
|
+
strict: false
|
|
14055
|
+
}));
|
|
14056
|
+
if (this.data.useToolsOutput) {
|
|
14057
|
+
output["tools"] = {
|
|
14058
|
+
type: "gpt-function[]",
|
|
14059
|
+
value: gptFunctions
|
|
14060
|
+
};
|
|
14061
|
+
}
|
|
14062
|
+
if (this.data.usePromptsOutput) {
|
|
14063
|
+
output["prompts"] = {
|
|
14064
|
+
type: "object[]",
|
|
14065
|
+
value: prompts.map((prompt) => ({
|
|
14066
|
+
name: prompt.name,
|
|
14067
|
+
description: prompt.description,
|
|
14068
|
+
arguments: prompt.arugments
|
|
14069
|
+
}))
|
|
14070
|
+
};
|
|
14071
|
+
}
|
|
14072
|
+
return output;
|
|
14073
|
+
} catch (err) {
|
|
14074
|
+
if (context.executor === "browser") {
|
|
14075
|
+
throw new Error("Failed to create Client without Node Executor");
|
|
14076
|
+
}
|
|
14077
|
+
throw err;
|
|
14078
|
+
}
|
|
14079
|
+
}
|
|
14080
|
+
};
|
|
14081
|
+
var mcpDiscoveryNode = nodeDefinition(MCPDiscoveryNodeImpl, "MCP Discovery");
|
|
14082
|
+
|
|
14083
|
+
// src/model/nodes/MCPToolCallNode.ts
|
|
14084
|
+
var import_non_secure75 = require("nanoid/non-secure");
|
|
14085
|
+
var import_ts_dedent69 = require("ts-dedent");
|
|
14086
|
+
var MCPToolCallNodeImpl = class extends NodeImpl {
|
|
14087
|
+
static create() {
|
|
14088
|
+
const chartNode = {
|
|
14089
|
+
type: "mcpToolCall",
|
|
14090
|
+
title: "MCP Tool Call",
|
|
14091
|
+
id: (0, import_non_secure75.nanoid)(),
|
|
14092
|
+
visualData: {
|
|
14093
|
+
x: 0,
|
|
14094
|
+
y: 0,
|
|
14095
|
+
width: 250
|
|
14096
|
+
},
|
|
14097
|
+
data: {
|
|
14098
|
+
name: "mcp-tool-call-client",
|
|
14099
|
+
version: "1.0.0",
|
|
14100
|
+
transportType: "stdio",
|
|
14101
|
+
serverUrl: "http://localhost:8080/mcp",
|
|
14102
|
+
serverId: "",
|
|
14103
|
+
toolName: "",
|
|
14104
|
+
toolArguments: import_ts_dedent69.dedent`
|
|
14105
|
+
{
|
|
14106
|
+
"key": "value"
|
|
14107
|
+
}`,
|
|
14108
|
+
toolCallId: "",
|
|
14109
|
+
useNameInput: false,
|
|
14110
|
+
useVersionInput: false,
|
|
14111
|
+
useToolNameInput: true,
|
|
14112
|
+
useToolArgumentsInput: true,
|
|
14113
|
+
useToolCallIdInput: true
|
|
14114
|
+
}
|
|
14115
|
+
};
|
|
14116
|
+
return chartNode;
|
|
14117
|
+
}
|
|
14118
|
+
getInputDefinitions() {
|
|
14119
|
+
const inputs = getMCPBaseInputs(this.data);
|
|
14120
|
+
if (this.data.useToolNameInput) {
|
|
14121
|
+
inputs.push({
|
|
14122
|
+
dataType: "string",
|
|
14123
|
+
id: "toolName",
|
|
14124
|
+
title: "Tool Name"
|
|
14125
|
+
});
|
|
14126
|
+
}
|
|
14127
|
+
if (this.data.useToolArgumentsInput) {
|
|
14128
|
+
inputs.push({
|
|
14129
|
+
dataType: "object",
|
|
14130
|
+
id: "toolArguments",
|
|
14131
|
+
title: "Tool Arguments"
|
|
14132
|
+
});
|
|
14133
|
+
}
|
|
14134
|
+
if (this.data.useToolCallIdInput) {
|
|
14135
|
+
inputs.push({
|
|
14136
|
+
dataType: "object",
|
|
14137
|
+
id: "toolCallId",
|
|
14138
|
+
title: "Tool ID"
|
|
14139
|
+
});
|
|
14140
|
+
}
|
|
14141
|
+
return inputs;
|
|
14142
|
+
}
|
|
14143
|
+
getOutputDefinitions() {
|
|
14144
|
+
const outputDefinitions = [];
|
|
14145
|
+
outputDefinitions.push({
|
|
14146
|
+
id: "response",
|
|
14147
|
+
title: "Response",
|
|
14148
|
+
dataType: "object",
|
|
14149
|
+
description: "Response from the Tool Call"
|
|
14150
|
+
});
|
|
14151
|
+
outputDefinitions.push({
|
|
14152
|
+
id: "toolCallId",
|
|
14153
|
+
title: "Tool ID",
|
|
14154
|
+
dataType: "string",
|
|
14155
|
+
description: "ID associated with the Tool Call"
|
|
14156
|
+
});
|
|
14157
|
+
return outputDefinitions;
|
|
14158
|
+
}
|
|
14159
|
+
async getEditors(context) {
|
|
14160
|
+
const editors = [
|
|
14161
|
+
{
|
|
14162
|
+
type: "string",
|
|
14163
|
+
label: "Name",
|
|
14164
|
+
dataKey: "name",
|
|
14165
|
+
useInputToggleDataKey: "useNameInput",
|
|
14166
|
+
helperMessage: "The name for the MCP Client"
|
|
14167
|
+
},
|
|
14168
|
+
{
|
|
14169
|
+
type: "string",
|
|
14170
|
+
label: "Version",
|
|
14171
|
+
dataKey: "version",
|
|
14172
|
+
useInputToggleDataKey: "useVersionInput",
|
|
14173
|
+
helperMessage: "A version for the MCP Client"
|
|
14174
|
+
},
|
|
14175
|
+
{
|
|
14176
|
+
type: "dropdown",
|
|
14177
|
+
label: "Transport Type",
|
|
14178
|
+
dataKey: "transportType",
|
|
14179
|
+
options: [
|
|
14180
|
+
{ label: "HTTP", value: "http" },
|
|
14181
|
+
{ label: "STDIO", value: "stdio" }
|
|
14182
|
+
]
|
|
14183
|
+
},
|
|
14184
|
+
{
|
|
14185
|
+
type: "string",
|
|
14186
|
+
label: "Tool Name",
|
|
14187
|
+
dataKey: "toolName",
|
|
14188
|
+
useInputToggleDataKey: "useToolNameInput",
|
|
14189
|
+
helperMessage: "The name for the MCP Tool Call"
|
|
14190
|
+
},
|
|
14191
|
+
{
|
|
14192
|
+
type: "code",
|
|
14193
|
+
label: "Tool Arguments",
|
|
14194
|
+
dataKey: "toolArguments",
|
|
14195
|
+
language: "json",
|
|
14196
|
+
useInputToggleDataKey: "useToolArgumentsInput"
|
|
14197
|
+
},
|
|
14198
|
+
{
|
|
14199
|
+
type: "string",
|
|
14200
|
+
label: "Tool ID",
|
|
14201
|
+
dataKey: "toolCallId",
|
|
14202
|
+
useInputToggleDataKey: "useToolCallIdInput",
|
|
14203
|
+
helperMessage: "The name for the MCP Tool Call"
|
|
14204
|
+
}
|
|
14205
|
+
];
|
|
14206
|
+
if (this.data.transportType === "http") {
|
|
14207
|
+
editors.push({
|
|
14208
|
+
type: "string",
|
|
14209
|
+
label: "Server URL",
|
|
14210
|
+
dataKey: "serverUrl",
|
|
14211
|
+
useInputToggleDataKey: "useServerUrlInput",
|
|
14212
|
+
helperMessage: "The endpoint URL for the MCP server to connect"
|
|
14213
|
+
});
|
|
14214
|
+
} else if (this.data.transportType === "stdio") {
|
|
14215
|
+
const serverOptions = await getServerOptions(context);
|
|
14216
|
+
editors.push({
|
|
14217
|
+
type: "dropdown",
|
|
14218
|
+
label: "Server ID",
|
|
14219
|
+
dataKey: "serverId",
|
|
14220
|
+
helperMessage: getServerHelperMessage(context, serverOptions.length),
|
|
14221
|
+
options: serverOptions
|
|
14222
|
+
});
|
|
14223
|
+
}
|
|
14224
|
+
return editors;
|
|
14225
|
+
}
|
|
14226
|
+
getBody(context) {
|
|
14227
|
+
let base;
|
|
14228
|
+
if (this.data.transportType === "http") {
|
|
14229
|
+
base = this.data.useServerUrlInput ? "(Using Server URL Input)" : this.data.serverUrl;
|
|
14230
|
+
} else {
|
|
14231
|
+
base = `Server ID: ${this.data.serverId || "(None)"}`;
|
|
14232
|
+
}
|
|
14233
|
+
const namePart = `Name: ${this.data.name}`;
|
|
14234
|
+
const versionPart = `Version: ${this.data.version}`;
|
|
14235
|
+
const parts = [namePart, versionPart, base];
|
|
14236
|
+
if (context.executor !== "nodejs") {
|
|
14237
|
+
parts.push("(Requires Node Executor)");
|
|
14238
|
+
}
|
|
14239
|
+
return parts.join("\n");
|
|
14240
|
+
}
|
|
14241
|
+
static getUIData() {
|
|
14242
|
+
return {
|
|
14243
|
+
infoBoxBody: import_ts_dedent69.dedent`
|
|
14244
|
+
Connects to an MCP (Model Context Protocol) server and gets a tool call response.
|
|
14245
|
+
`,
|
|
14246
|
+
infoBoxTitle: "MCP Tool Call Node",
|
|
14247
|
+
contextMenuTitle: "MCP Tool Call",
|
|
14248
|
+
group: ["MCP"]
|
|
14249
|
+
};
|
|
14250
|
+
}
|
|
14251
|
+
async process(inputs, context) {
|
|
14252
|
+
const name = getInputOrData(this.data, inputs, "name", "string");
|
|
14253
|
+
const version = getInputOrData(this.data, inputs, "version", "string");
|
|
14254
|
+
const toolName = getInputOrData(this.data, inputs, "toolName", "string");
|
|
14255
|
+
const toolCallId = getInputOrData(this.data, inputs, "toolCallId", "string");
|
|
14256
|
+
let toolArguments;
|
|
14257
|
+
if (this.data.useToolArgumentsInput) {
|
|
14258
|
+
toolArguments = getInputOrData(this.data, inputs, "toolArguments", "object");
|
|
14259
|
+
if (toolArguments == null) {
|
|
14260
|
+
throw new MCPError("INVALID_SCHEMA" /* INVALID_SCHEMA */, "Cannot parse tool argument with input toggle on");
|
|
14261
|
+
}
|
|
14262
|
+
} else {
|
|
14263
|
+
const inputMap = keys(inputs).filter((key) => key.startsWith("input")).reduce(
|
|
14264
|
+
(acc, key) => {
|
|
14265
|
+
const stringValue = coerceTypeOptional(inputs[key], "string") ?? "";
|
|
14266
|
+
const interpolationKey = key.slice("input-".length);
|
|
14267
|
+
acc[interpolationKey] = stringValue;
|
|
14268
|
+
return acc;
|
|
14269
|
+
},
|
|
14270
|
+
{}
|
|
14271
|
+
);
|
|
14272
|
+
const interpolated = interpolate(this.data.toolArguments ?? "", inputMap);
|
|
14273
|
+
toolArguments = JSON.parse(interpolated);
|
|
14274
|
+
}
|
|
14275
|
+
const toolCall = {
|
|
14276
|
+
name: toolName,
|
|
14277
|
+
arguments: toolArguments
|
|
14278
|
+
};
|
|
14279
|
+
const transportType = getInputOrData(this.data, inputs, "transportType", "string");
|
|
14280
|
+
let toolResponse = void 0;
|
|
14281
|
+
try {
|
|
14282
|
+
if (!context.mcpProvider) {
|
|
14283
|
+
throw new Error("MCP Provider not found");
|
|
14284
|
+
}
|
|
14285
|
+
if (transportType === "http") {
|
|
14286
|
+
const serverUrl = getInputOrData(this.data, inputs, "serverUrl", "string");
|
|
14287
|
+
if (!serverUrl || serverUrl === "") {
|
|
14288
|
+
throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, "No server URL was provided");
|
|
14289
|
+
}
|
|
14290
|
+
if (!serverUrl.includes("/mcp")) {
|
|
14291
|
+
throw new MCPError(
|
|
14292
|
+
"SERVER_COMMUNICATION_FAILED" /* SERVER_COMMUNICATION_FAILED */,
|
|
14293
|
+
"Include /mcp in your server URL. For example: http://localhost:8080/mcp"
|
|
14294
|
+
);
|
|
14295
|
+
}
|
|
14296
|
+
toolResponse = await context.mcpProvider.httpToolCall({ name, version }, serverUrl, toolCall);
|
|
14297
|
+
} else if (transportType === "stdio") {
|
|
14298
|
+
const serverId = this.data.serverId ?? "";
|
|
14299
|
+
const mcpConfig = await loadMCPConfiguration(context);
|
|
14300
|
+
if (!mcpConfig.mcpServers[serverId]) {
|
|
14301
|
+
throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, `Server ${serverId} not found in MCP config`);
|
|
14302
|
+
}
|
|
14303
|
+
const serverConfig = {
|
|
14304
|
+
config: mcpConfig.mcpServers[serverId],
|
|
14305
|
+
serverId
|
|
14306
|
+
};
|
|
14307
|
+
toolResponse = await context.mcpProvider.stdioToolCall({ name, version }, serverConfig, toolCall);
|
|
14308
|
+
}
|
|
14309
|
+
const output = {};
|
|
14310
|
+
output["response"] = {
|
|
14311
|
+
type: "object[]",
|
|
14312
|
+
value: toolResponse == null ? void 0 : toolResponse.content
|
|
14313
|
+
};
|
|
14314
|
+
output["toolCallId"] = {
|
|
14315
|
+
type: "string",
|
|
14316
|
+
value: toolCallId
|
|
14317
|
+
};
|
|
14318
|
+
return output;
|
|
14319
|
+
} catch (err) {
|
|
14320
|
+
const { message } = getError(err);
|
|
14321
|
+
if (context.executor === "browser") {
|
|
14322
|
+
throw new Error("Failed to create Client without a node executor");
|
|
14323
|
+
}
|
|
14324
|
+
console.log(message);
|
|
14325
|
+
throw err;
|
|
14326
|
+
}
|
|
14327
|
+
}
|
|
14328
|
+
};
|
|
14329
|
+
var mcpToolCallNode = nodeDefinition(MCPToolCallNodeImpl, "MCP Tool Call");
|
|
14330
|
+
|
|
14331
|
+
// src/model/nodes/MCPGetPromptNode.ts
|
|
14332
|
+
var import_non_secure76 = require("nanoid/non-secure");
|
|
14333
|
+
var MCPGetPromptNodeImpl = class extends NodeImpl {
|
|
14334
|
+
static create() {
|
|
14335
|
+
const chartNode = {
|
|
14336
|
+
type: "mcpGetPrompt",
|
|
14337
|
+
title: "MCP Get Prompt",
|
|
14338
|
+
id: (0, import_non_secure76.nanoid)(),
|
|
14339
|
+
visualData: {
|
|
14340
|
+
x: 0,
|
|
14341
|
+
y: 0,
|
|
14342
|
+
width: 250
|
|
14343
|
+
},
|
|
14344
|
+
data: {
|
|
14345
|
+
name: "mcp-get-prompt-client",
|
|
14346
|
+
version: "1.0.0",
|
|
14347
|
+
transportType: "stdio",
|
|
14348
|
+
serverUrl: "http://localhost:8080/mcp",
|
|
14349
|
+
serverId: "",
|
|
14350
|
+
promptName: "",
|
|
14351
|
+
promptArguments: import_ts_dedent.dedent`
|
|
14352
|
+
{
|
|
14353
|
+
"key": "value"
|
|
14354
|
+
}`,
|
|
14355
|
+
useNameInput: false,
|
|
14356
|
+
useVersionInput: false,
|
|
14357
|
+
usePromptNameInput: false,
|
|
14358
|
+
usePromptArgumentsInput: false
|
|
14359
|
+
}
|
|
14360
|
+
};
|
|
14361
|
+
return chartNode;
|
|
14362
|
+
}
|
|
14363
|
+
getInputDefinitions() {
|
|
14364
|
+
const inputs = getMCPBaseInputs(this.data);
|
|
14365
|
+
if (this.data.usePromptNameInput) {
|
|
14366
|
+
inputs.push({
|
|
14367
|
+
dataType: "string",
|
|
14368
|
+
id: "promptName",
|
|
14369
|
+
title: "Prompt Name"
|
|
14370
|
+
});
|
|
14371
|
+
}
|
|
14372
|
+
if (this.data.usePromptArgumentsInput) {
|
|
14373
|
+
inputs.push({
|
|
14374
|
+
dataType: "object",
|
|
14375
|
+
id: "promptArguments",
|
|
14376
|
+
title: "Prompt Arguments"
|
|
14377
|
+
});
|
|
14378
|
+
}
|
|
14379
|
+
return inputs;
|
|
14380
|
+
}
|
|
14381
|
+
getOutputDefinitions() {
|
|
14382
|
+
const outputDefinitions = [];
|
|
14383
|
+
outputDefinitions.push({
|
|
14384
|
+
id: "prompt",
|
|
14385
|
+
title: "Prompt",
|
|
14386
|
+
dataType: "object",
|
|
14387
|
+
description: "Prompt response result"
|
|
14388
|
+
});
|
|
14389
|
+
return outputDefinitions;
|
|
14390
|
+
}
|
|
14391
|
+
async getEditors(context) {
|
|
14392
|
+
const editors = [
|
|
14393
|
+
{
|
|
14394
|
+
type: "string",
|
|
14395
|
+
label: "Name",
|
|
14396
|
+
dataKey: "name",
|
|
14397
|
+
useInputToggleDataKey: "useNameInput",
|
|
14398
|
+
helperMessage: "The name for the MCP Client"
|
|
14399
|
+
},
|
|
14400
|
+
{
|
|
14401
|
+
type: "string",
|
|
14402
|
+
label: "Version",
|
|
14403
|
+
dataKey: "version",
|
|
14404
|
+
useInputToggleDataKey: "useVersionInput",
|
|
14405
|
+
helperMessage: "A version for the MCP Client"
|
|
14406
|
+
},
|
|
14407
|
+
{
|
|
14408
|
+
type: "dropdown",
|
|
14409
|
+
label: "Transport Type",
|
|
14410
|
+
dataKey: "transportType",
|
|
14411
|
+
options: [
|
|
14412
|
+
{ label: "HTTP", value: "http" },
|
|
14413
|
+
{ label: "STDIO", value: "stdio" }
|
|
14414
|
+
]
|
|
14415
|
+
},
|
|
14416
|
+
{
|
|
14417
|
+
type: "string",
|
|
14418
|
+
label: "Prompt Name",
|
|
14419
|
+
dataKey: "promptName",
|
|
14420
|
+
useInputToggleDataKey: "usePromptNameInput",
|
|
14421
|
+
helperMessage: "The name for the MCP prompt"
|
|
14422
|
+
},
|
|
14423
|
+
{
|
|
14424
|
+
type: "code",
|
|
14425
|
+
label: "Prompt Arguments",
|
|
14426
|
+
dataKey: "promptArguments",
|
|
14427
|
+
useInputToggleDataKey: "usePromptArgumentsInput",
|
|
14428
|
+
language: "json",
|
|
14429
|
+
helperMessage: "Arguments to provide the prompt"
|
|
14430
|
+
}
|
|
14431
|
+
];
|
|
14432
|
+
if (this.data.transportType === "http") {
|
|
14433
|
+
editors.push({
|
|
14434
|
+
type: "string",
|
|
14435
|
+
label: "Server URL",
|
|
14436
|
+
dataKey: "serverUrl",
|
|
14437
|
+
useInputToggleDataKey: "useServerUrlInput",
|
|
14438
|
+
helperMessage: "The endpoint URL for the MCP server to connect"
|
|
14439
|
+
});
|
|
14440
|
+
} else if (this.data.transportType === "stdio") {
|
|
14441
|
+
const serverOptions = await getServerOptions(context);
|
|
14442
|
+
editors.push({
|
|
14443
|
+
type: "dropdown",
|
|
14444
|
+
label: "Server ID",
|
|
14445
|
+
dataKey: "serverId",
|
|
14446
|
+
helperMessage: getServerHelperMessage(context, serverOptions.length),
|
|
14447
|
+
options: serverOptions
|
|
14448
|
+
});
|
|
14449
|
+
}
|
|
14450
|
+
return editors;
|
|
14451
|
+
}
|
|
14452
|
+
getBody(context) {
|
|
14453
|
+
let base;
|
|
14454
|
+
if (this.data.transportType === "http") {
|
|
14455
|
+
base = this.data.useServerUrlInput ? "(Using Server URL Input)" : this.data.serverUrl;
|
|
14456
|
+
} else {
|
|
14457
|
+
base = `Server ID: ${this.data.serverId || "(None)"}`;
|
|
14458
|
+
}
|
|
14459
|
+
const namePart = `Name: ${this.data.name}`;
|
|
14460
|
+
const versionPart = `Version: ${this.data.version}`;
|
|
14461
|
+
const parts = [namePart, versionPart, base];
|
|
14462
|
+
if (context.executor !== "nodejs") {
|
|
14463
|
+
parts.push("(Requires Node Executor)");
|
|
14464
|
+
}
|
|
14465
|
+
return parts.join("\n");
|
|
14466
|
+
}
|
|
14467
|
+
static getUIData() {
|
|
14468
|
+
return {
|
|
14469
|
+
infoBoxBody: import_ts_dedent.dedent`
|
|
14470
|
+
Connects to an MCP (Model Context Protocol) server and gets a prompt response.
|
|
14471
|
+
`,
|
|
14472
|
+
infoBoxTitle: "MCP Get Prompt Node",
|
|
14473
|
+
contextMenuTitle: "MCP Get Prompt",
|
|
14474
|
+
group: ["MCP"]
|
|
14475
|
+
};
|
|
14476
|
+
}
|
|
14477
|
+
async process(inputs, context) {
|
|
14478
|
+
const name = getInputOrData(this.data, inputs, "name", "string");
|
|
14479
|
+
const version = getInputOrData(this.data, inputs, "version", "string");
|
|
14480
|
+
const promptName = getInputOrData(this.data, inputs, "promptName", "string");
|
|
14481
|
+
let promptArguments;
|
|
14482
|
+
if (this.data.usePromptArgumentsInput) {
|
|
14483
|
+
promptArguments = getInputOrData(this.data, inputs, "promptArguments", "object");
|
|
14484
|
+
if (promptArguments == null) {
|
|
14485
|
+
throw new MCPError("INVALID_SCHEMA" /* INVALID_SCHEMA */, "Cannot parse tool argument with input toggle on");
|
|
14486
|
+
}
|
|
14487
|
+
} else {
|
|
14488
|
+
const inputMap = keys(inputs).filter((key) => key.startsWith("input")).reduce(
|
|
14489
|
+
(acc, key) => {
|
|
14490
|
+
const stringValue = coerceTypeOptional(inputs[key], "string") ?? "";
|
|
14491
|
+
const interpolationKey = key.slice("input-".length);
|
|
14492
|
+
acc[interpolationKey] = stringValue;
|
|
14493
|
+
return acc;
|
|
14494
|
+
},
|
|
14495
|
+
{}
|
|
14496
|
+
);
|
|
14497
|
+
const interpolated = interpolate(this.data.promptArguments ?? "", inputMap);
|
|
14498
|
+
promptArguments = JSON.parse(interpolated);
|
|
14499
|
+
}
|
|
14500
|
+
const getPromptRequest = {
|
|
14501
|
+
name: promptName,
|
|
14502
|
+
arguments: promptArguments
|
|
14503
|
+
};
|
|
14504
|
+
const transportType = getInputOrData(this.data, inputs, "transportType", "string");
|
|
14505
|
+
let getPromptResponse = void 0;
|
|
14506
|
+
try {
|
|
14507
|
+
if (!context.mcpProvider) {
|
|
14508
|
+
throw new Error("MCP Provider not found");
|
|
14509
|
+
}
|
|
14510
|
+
if (transportType === "http") {
|
|
14511
|
+
const serverUrl = getInputOrData(this.data, inputs, "serverUrl", "string");
|
|
14512
|
+
if (!serverUrl || serverUrl === "") {
|
|
14513
|
+
throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, "No server URL was provided");
|
|
14514
|
+
}
|
|
14515
|
+
if (!serverUrl.includes("/mcp")) {
|
|
14516
|
+
throw new MCPError(
|
|
14517
|
+
"SERVER_COMMUNICATION_FAILED" /* SERVER_COMMUNICATION_FAILED */,
|
|
14518
|
+
"Include /mcp in your server URL. For example: http://localhost:8080/mcp"
|
|
14519
|
+
);
|
|
14520
|
+
}
|
|
14521
|
+
getPromptResponse = await context.mcpProvider.getHTTPrompt({ name, version }, serverUrl, getPromptRequest);
|
|
14522
|
+
} else if (transportType === "stdio") {
|
|
14523
|
+
const serverId = this.data.serverId ?? "";
|
|
14524
|
+
const mcpConfig = await loadMCPConfiguration(context);
|
|
14525
|
+
if (!mcpConfig.mcpServers[serverId]) {
|
|
14526
|
+
throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, `Server ${serverId} not found in MCP config`);
|
|
14527
|
+
}
|
|
14528
|
+
const serverConfig = {
|
|
14529
|
+
config: mcpConfig.mcpServers[serverId],
|
|
14530
|
+
serverId
|
|
14531
|
+
};
|
|
14532
|
+
getPromptResponse = await context.mcpProvider.getStdioPrompt({ name, version }, serverConfig, getPromptRequest);
|
|
14533
|
+
}
|
|
14534
|
+
const output = {};
|
|
14535
|
+
output["response"] = {
|
|
14536
|
+
type: "object",
|
|
14537
|
+
value: getPromptResponse
|
|
14538
|
+
};
|
|
14539
|
+
return output;
|
|
14540
|
+
} catch (err) {
|
|
14541
|
+
const { message } = getError(err);
|
|
14542
|
+
if (context.executor === "browser") {
|
|
14543
|
+
throw new Error("Failed to create Client without a node executor");
|
|
14544
|
+
}
|
|
14545
|
+
console.log(message);
|
|
14546
|
+
throw err;
|
|
14547
|
+
}
|
|
14548
|
+
}
|
|
14549
|
+
};
|
|
14550
|
+
var mcpGetPromptNode = nodeDefinition(MCPGetPromptNodeImpl, "MCP Get Prompt");
|
|
14551
|
+
|
|
14552
|
+
// src/model/nodes/ReferencedGraphAliasNode.ts
|
|
14553
|
+
var import_non_secure77 = require("nanoid/non-secure");
|
|
14554
|
+
var import_ts_dedent70 = require("ts-dedent");
|
|
14555
|
+
var ReferencedGraphAliasNodeImpl = class extends NodeImpl {
|
|
14556
|
+
static create() {
|
|
14557
|
+
const chartNode = {
|
|
14558
|
+
type: "referencedGraphAlias",
|
|
14559
|
+
title: "",
|
|
14560
|
+
// Always set initially by the editor
|
|
14561
|
+
id: (0, import_non_secure77.nanoid)(),
|
|
14562
|
+
visualData: {
|
|
14563
|
+
x: 0,
|
|
14564
|
+
y: 0,
|
|
14565
|
+
width: 300
|
|
14566
|
+
},
|
|
14567
|
+
data: {
|
|
14568
|
+
projectId: void 0,
|
|
14569
|
+
// Always set initially by the editor
|
|
14570
|
+
graphId: void 0,
|
|
14571
|
+
// Always set initially by the editor
|
|
14572
|
+
useErrorOutput: false
|
|
14573
|
+
}
|
|
14574
|
+
};
|
|
14575
|
+
return chartNode;
|
|
14576
|
+
}
|
|
14577
|
+
getInputDefinitions(_connections, _nodes, _project, referencedProjects) {
|
|
14578
|
+
const referencedProject = referencedProjects[this.data.projectId];
|
|
14579
|
+
if (!referencedProject) {
|
|
14580
|
+
return [];
|
|
14581
|
+
}
|
|
14582
|
+
const graph = referencedProject.graphs[this.data.graphId];
|
|
14583
|
+
if (!graph) {
|
|
14584
|
+
return [];
|
|
14585
|
+
}
|
|
14586
|
+
const inputNodes = graph.nodes.filter((node) => node.type === "graphInput");
|
|
14587
|
+
const inputIds = [...new Set(inputNodes.map((node) => node.data.id))].sort();
|
|
14588
|
+
return inputIds.map(
|
|
14589
|
+
(id) => ({
|
|
14590
|
+
id,
|
|
14591
|
+
title: id,
|
|
14592
|
+
dataType: inputNodes.find((node) => node.data.id === id).data.dataType
|
|
14593
|
+
})
|
|
14594
|
+
);
|
|
14595
|
+
}
|
|
14596
|
+
getGraphOutputs(referencedProject) {
|
|
14597
|
+
const graph = referencedProject.graphs[this.data.graphId];
|
|
14598
|
+
if (!graph) {
|
|
14599
|
+
return [];
|
|
14600
|
+
}
|
|
14601
|
+
const outputNodes = graph.nodes.filter((node) => node.type === "graphOutput");
|
|
14602
|
+
const outputIds = [...new Set(outputNodes.map((node) => node.data.id))].sort();
|
|
14603
|
+
const outputs = outputIds.map(
|
|
14604
|
+
(id) => ({
|
|
14605
|
+
id,
|
|
14606
|
+
title: id,
|
|
14607
|
+
dataType: outputNodes.find((node) => node.data.id === id).data.dataType
|
|
14608
|
+
})
|
|
14609
|
+
);
|
|
14610
|
+
return outputs;
|
|
14611
|
+
}
|
|
14612
|
+
getOutputDefinitions(_connections, _nodes, _project, referencedProjects) {
|
|
14613
|
+
const outputs = [];
|
|
14614
|
+
const referencedProject = referencedProjects[this.data.projectId];
|
|
14615
|
+
if (!referencedProject) {
|
|
14616
|
+
return outputs;
|
|
14617
|
+
}
|
|
14618
|
+
outputs.push(...this.getGraphOutputs(referencedProject));
|
|
14619
|
+
if (this.data.useErrorOutput) {
|
|
14620
|
+
outputs.push({
|
|
14621
|
+
id: "error",
|
|
14622
|
+
title: "Error",
|
|
14623
|
+
dataType: "string"
|
|
14624
|
+
});
|
|
14625
|
+
}
|
|
14626
|
+
return outputs;
|
|
14627
|
+
}
|
|
14628
|
+
getEditors(context) {
|
|
14629
|
+
const definitions = [
|
|
14630
|
+
{
|
|
14631
|
+
type: "toggle",
|
|
14632
|
+
label: "Use Error Output",
|
|
14633
|
+
dataKey: "useErrorOutput"
|
|
14634
|
+
},
|
|
14635
|
+
{
|
|
14636
|
+
type: "toggle",
|
|
14637
|
+
label: "Output Cost & Duration",
|
|
14638
|
+
dataKey: "outputCostDuration"
|
|
14639
|
+
}
|
|
14640
|
+
];
|
|
14641
|
+
const referencedProject = context.referencedProjects[this.data.projectId];
|
|
14642
|
+
if (referencedProject) {
|
|
14643
|
+
const graph = referencedProject.graphs[this.data.graphId];
|
|
14644
|
+
if (graph) {
|
|
14645
|
+
const inputNodes = graph.nodes.filter((node) => node.type === "graphInput");
|
|
14646
|
+
const inputIds = [...new Set(inputNodes.map((node) => node.data.id))].sort();
|
|
14647
|
+
for (const inputId of inputIds) {
|
|
14648
|
+
const inputNode = inputNodes.find((node) => node.data.id === inputId);
|
|
14649
|
+
definitions.push({
|
|
14650
|
+
type: "dynamic",
|
|
14651
|
+
dataKey: "inputData",
|
|
14652
|
+
dynamicDataKey: inputNode.data.id,
|
|
14653
|
+
dataType: inputNode.data.dataType,
|
|
14654
|
+
label: inputNode.data.id,
|
|
14655
|
+
editor: inputNode.data.editor ?? "auto"
|
|
14656
|
+
});
|
|
14657
|
+
}
|
|
14658
|
+
}
|
|
13717
14659
|
}
|
|
13718
14660
|
return definitions;
|
|
13719
14661
|
}
|
|
@@ -13723,7 +14665,7 @@ var ReferencedGraphAliasNodeImpl = class extends NodeImpl {
|
|
|
13723
14665
|
}
|
|
13724
14666
|
static getUIData() {
|
|
13725
14667
|
return {
|
|
13726
|
-
infoBoxBody:
|
|
14668
|
+
infoBoxBody: import_ts_dedent70.dedent`
|
|
13727
14669
|
References a graph from another project. Inputs and outputs are defined by Graph Input and Graph Output nodes within the referenced graph.
|
|
13728
14670
|
`,
|
|
13729
14671
|
infoBoxTitle: "Referenced Graph Alias Node",
|
|
@@ -13817,7 +14759,7 @@ var referencedGraphAliasNode = nodeDefinition(ReferencedGraphAliasNodeImpl, "Ref
|
|
|
13817
14759
|
|
|
13818
14760
|
// src/model/Nodes.ts
|
|
13819
14761
|
var registerBuiltInNodes = (registry2) => {
|
|
13820
|
-
return registry2.register(toYamlNode).register(userInputNode).register(textNode).register(chatNode).register(promptNode).register(extractRegexNode).register(codeNode).register(matchNode).register(ifNode).register(readDirectoryNode).register(readFileNode).register(writeFileNode).register(ifElseNode).register(chunkNode).register(graphInputNode).register(graphOutputNode).register(subGraphNode).register(arrayNode).register(extractJsonNode).register(assemblePromptNode).register(loopControllerNode).register(trimChatMessagesNode).register(extractYamlNode).register(externalCallNode).register(extractObjectPathNode).register(raiseEventNode).register(contextNode).register(coalesceNode).register(passthroughNode).register(popNode).register(setGlobalNode).register(getGlobalNode).register(waitForEventNode).register(gptFunctionNode).register(getEmbeddingNode).register(vectorStoreNode).register(vectorNearestNeighborsNode).register(hashNode).register(abortGraphNode).register(raceInputsNode).register(toJsonNode).register(joinNode).register(filterNode).register(objectNode).register(booleanNode).register(compareNode).register(evaluateNode).register(numberNode).register(randomNumberNode).register(shuffleNode).register(commentNode).register(imageToMDNode).register(imageNode).register(audioNode).register(httpCallNode).register(delayNode).register(appendToDatasetNode).register(createDatasetNode).register(loadDatasetNode).register(getAllDatasetsNode).register(splitNode).register(datasetNearestNeighborsNode).register(getDatasetRowNode).register(sliceNode).register(extractMarkdownCodeBlocksNode).register(assembleMessageNode).register(urlReferenceNode).register(destructureNode).register(replaceDatasetNode).register(listGraphsNode).register(graphReferenceNode).register(callGraphNode).register(delegateFunctionCallNode).register(playAudioNode).register(documentNode).register(chatLoopNode).register(readAllFilesNode).register(toMarkdownTableNode).register(cronNode).register(toTreeNode).register(loopUntilNode).register(referencedGraphAliasNode);
|
|
14762
|
+
return registry2.register(toYamlNode).register(userInputNode).register(textNode).register(chatNode).register(promptNode).register(extractRegexNode).register(codeNode).register(matchNode).register(ifNode).register(readDirectoryNode).register(readFileNode).register(writeFileNode).register(ifElseNode).register(chunkNode).register(graphInputNode).register(graphOutputNode).register(subGraphNode).register(arrayNode).register(extractJsonNode).register(assemblePromptNode).register(loopControllerNode).register(trimChatMessagesNode).register(extractYamlNode).register(externalCallNode).register(extractObjectPathNode).register(raiseEventNode).register(contextNode).register(coalesceNode).register(passthroughNode).register(popNode).register(setGlobalNode).register(getGlobalNode).register(waitForEventNode).register(gptFunctionNode).register(getEmbeddingNode).register(vectorStoreNode).register(vectorNearestNeighborsNode).register(hashNode).register(abortGraphNode).register(raceInputsNode).register(toJsonNode).register(joinNode).register(filterNode).register(objectNode).register(booleanNode).register(compareNode).register(evaluateNode).register(numberNode).register(randomNumberNode).register(shuffleNode).register(commentNode).register(imageToMDNode).register(imageNode).register(audioNode).register(httpCallNode).register(delayNode).register(appendToDatasetNode).register(createDatasetNode).register(loadDatasetNode).register(getAllDatasetsNode).register(splitNode).register(datasetNearestNeighborsNode).register(getDatasetRowNode).register(sliceNode).register(extractMarkdownCodeBlocksNode).register(assembleMessageNode).register(urlReferenceNode).register(destructureNode).register(replaceDatasetNode).register(listGraphsNode).register(graphReferenceNode).register(callGraphNode).register(delegateFunctionCallNode).register(playAudioNode).register(documentNode).register(chatLoopNode).register(readAllFilesNode).register(toMarkdownTableNode).register(cronNode).register(toTreeNode).register(loopUntilNode).register(mcpDiscoveryNode).register(mcpToolCallNode).register(mcpGetPromptNode).register(referencedGraphAliasNode);
|
|
13821
14763
|
};
|
|
13822
14764
|
var globalRivetNodeRegistry = registerBuiltInNodes(new NodeRegistration());
|
|
13823
14765
|
function resetGlobalRivetNodeRegistry() {
|
|
@@ -13891,7 +14833,7 @@ ${parameterName}?: ${value == null ? void 0 : value.type}`).join("\n")}
|
|
|
13891
14833
|
|
|
13892
14834
|
// src/integrations/CodeRunner.ts
|
|
13893
14835
|
var IsomorphicCodeRunner = class {
|
|
13894
|
-
async runCode(code, inputs, options2) {
|
|
14836
|
+
async runCode(code, inputs, options2, graphInputs, contextValues) {
|
|
13895
14837
|
const argNames = ["inputs"];
|
|
13896
14838
|
const args = [inputs];
|
|
13897
14839
|
if (options2.includeRequire) {
|
|
@@ -13912,6 +14854,14 @@ var IsomorphicCodeRunner = class {
|
|
|
13912
14854
|
argNames.push("Rivet");
|
|
13913
14855
|
args.push(exports_exports);
|
|
13914
14856
|
}
|
|
14857
|
+
if (graphInputs) {
|
|
14858
|
+
argNames.push("graphInputs");
|
|
14859
|
+
args.push(graphInputs);
|
|
14860
|
+
}
|
|
14861
|
+
if (contextValues) {
|
|
14862
|
+
argNames.push("context");
|
|
14863
|
+
args.push(contextValues);
|
|
14864
|
+
}
|
|
13915
14865
|
argNames.push(code);
|
|
13916
14866
|
const AsyncFunction = async function() {
|
|
13917
14867
|
}.constructor;
|
|
@@ -13921,7 +14871,7 @@ var IsomorphicCodeRunner = class {
|
|
|
13921
14871
|
}
|
|
13922
14872
|
};
|
|
13923
14873
|
var NotAllowedCodeRunner = class {
|
|
13924
|
-
async runCode(_code, _inputs, _options) {
|
|
14874
|
+
async runCode(_code, _inputs, _options, _graphInputs, _contextValues) {
|
|
13925
14875
|
throw new Error("Dynamic code execution is disabled.");
|
|
13926
14876
|
}
|
|
13927
14877
|
};
|
|
@@ -13946,7 +14896,7 @@ var GraphProcessor = class _GraphProcessor {
|
|
|
13946
14896
|
#isPaused = false;
|
|
13947
14897
|
#parent;
|
|
13948
14898
|
#registry;
|
|
13949
|
-
id = (0,
|
|
14899
|
+
id = (0, import_non_secure78.nanoid)();
|
|
13950
14900
|
#includeTrace = true;
|
|
13951
14901
|
executor;
|
|
13952
14902
|
/** If set, specifies the node(s) that the graph will run TO, instead of the nodes without any dependents. */
|
|
@@ -13991,6 +14941,7 @@ var GraphProcessor = class _GraphProcessor {
|
|
|
13991
14941
|
// @ts-expect-error
|
|
13992
14942
|
#nodesNotInCycle = void 0;
|
|
13993
14943
|
#nodeAbortControllers = /* @__PURE__ */ new Map();
|
|
14944
|
+
#graphInputNodeValues = {};
|
|
13994
14945
|
/** User input nodes that are pending user input. */
|
|
13995
14946
|
#pendingUserInputs = void 0;
|
|
13996
14947
|
get isRunning() {
|
|
@@ -14371,6 +15322,7 @@ var GraphProcessor = class _GraphProcessor {
|
|
|
14371
15322
|
this.#abortSuccessfully = false;
|
|
14372
15323
|
this.#nodeAbortControllers = /* @__PURE__ */ new Map();
|
|
14373
15324
|
this.#loadedProjects = {};
|
|
15325
|
+
this.#graphInputNodeValues = {};
|
|
14374
15326
|
}
|
|
14375
15327
|
/** Main function for running a graph. Runs a graph and returns the outputs from the output nodes of the graph. */
|
|
14376
15328
|
async processGraph(context, inputs = {}, contextValues = {}) {
|
|
@@ -14611,7 +15563,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
|
|
|
14611
15563
|
return;
|
|
14612
15564
|
}
|
|
14613
15565
|
const inputValues = this.#getInputValuesForNode(node);
|
|
14614
|
-
if (this.#excludedDueToControlFlow(node, inputValues, (0,
|
|
15566
|
+
if (this.#excludedDueToControlFlow(node, inputValues, (0, import_non_secure78.nanoid)(), "loop-not-broken")) {
|
|
14615
15567
|
this.#emitTraceEvent(`Node ${node.title} is excluded due to control flow`);
|
|
14616
15568
|
return;
|
|
14617
15569
|
}
|
|
@@ -14745,7 +15697,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
|
|
|
14745
15697
|
return nodeData;
|
|
14746
15698
|
}
|
|
14747
15699
|
async #processNode(node) {
|
|
14748
|
-
const processId = (0,
|
|
15700
|
+
const processId = (0, import_non_secure78.nanoid)();
|
|
14749
15701
|
if (this.#abortController.signal.aborted) {
|
|
14750
15702
|
this.#nodeErrored(node, new Error("Processing aborted"), processId);
|
|
14751
15703
|
return processId;
|
|
@@ -14773,7 +15725,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
|
|
|
14773
15725
|
return;
|
|
14774
15726
|
}
|
|
14775
15727
|
const splittingAmount = Math.min(
|
|
14776
|
-
(0,
|
|
15728
|
+
(0, import_lodash_es16.max)(values(inputValues).map((value) => Array.isArray(value == null ? void 0 : value.value) ? value == null ? void 0 : value.value.length : 1)) ?? 1,
|
|
14777
15729
|
node.splitRunMax ?? 10
|
|
14778
15730
|
);
|
|
14779
15731
|
this.#emitter.emit("nodeStart", { node, inputs: inputValues, processId });
|
|
@@ -14816,7 +15768,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
|
|
|
14816
15768
|
}
|
|
14817
15769
|
} else {
|
|
14818
15770
|
results = await Promise.all(
|
|
14819
|
-
(0,
|
|
15771
|
+
(0, import_lodash_es16.range)(0, splittingAmount).map(async (i) => {
|
|
14820
15772
|
var _a2, _b2, _c2;
|
|
14821
15773
|
const inputs = fromEntries(
|
|
14822
15774
|
entries(inputValues).map(([port, value]) => [
|
|
@@ -14866,15 +15818,15 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
|
|
|
14866
15818
|
}, {});
|
|
14867
15819
|
this.#nodeResults.set(node.id, aggregateResults);
|
|
14868
15820
|
this.#visitedNodes.add(node.id);
|
|
14869
|
-
this.#totalRequestTokens += (0,
|
|
15821
|
+
this.#totalRequestTokens += (0, import_lodash_es16.sum)(results.map((r) => {
|
|
14870
15822
|
var _a2;
|
|
14871
15823
|
return coerceTypeOptional((_a2 = r.output) == null ? void 0 : _a2["requestTokens"], "number") ?? 0;
|
|
14872
15824
|
}));
|
|
14873
|
-
this.#totalResponseTokens += (0,
|
|
15825
|
+
this.#totalResponseTokens += (0, import_lodash_es16.sum)(results.map((r) => {
|
|
14874
15826
|
var _a2;
|
|
14875
15827
|
return coerceTypeOptional((_a2 = r.output) == null ? void 0 : _a2["responseTokens"], "number") ?? 0;
|
|
14876
15828
|
}));
|
|
14877
|
-
this.#totalCost += (0,
|
|
15829
|
+
this.#totalCost += (0, import_lodash_es16.sum)(results.map((r) => {
|
|
14878
15830
|
var _a2;
|
|
14879
15831
|
return coerceTypeOptional((_a2 = r.output) == null ? void 0 : _a2["cost"], "number") ?? 0;
|
|
14880
15832
|
}));
|
|
@@ -15094,7 +16046,8 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
|
|
|
15094
16046
|
});
|
|
15095
16047
|
});
|
|
15096
16048
|
return results2;
|
|
15097
|
-
}
|
|
16049
|
+
},
|
|
16050
|
+
graphInputNodeValues: this.#graphInputNodeValues
|
|
15098
16051
|
};
|
|
15099
16052
|
await this.#waitUntilUnpaused();
|
|
15100
16053
|
const results = await instance.process(inputValues, context);
|
|
@@ -15217,7 +16170,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
|
|
|
15217
16170
|
const connectionDefinition = outputDefinitions.find((def) => def.id === connection.outputId);
|
|
15218
16171
|
return connectionDefinition != null;
|
|
15219
16172
|
});
|
|
15220
|
-
const outputNodes = (0,
|
|
16173
|
+
const outputNodes = (0, import_lodash_es16.uniqBy)(
|
|
15221
16174
|
outputConnections.map((conn) => this.#nodesById[conn.inputNodeId]).filter(isNotNull),
|
|
15222
16175
|
(x) => x.id
|
|
15223
16176
|
);
|
|
@@ -15371,9 +16324,9 @@ var OpenAIEmbeddingGenerator = class {
|
|
|
15371
16324
|
registerIntegration("embeddingGenerator", "openai", (context) => new OpenAIEmbeddingGenerator(context.settings));
|
|
15372
16325
|
|
|
15373
16326
|
// src/recording/ExecutionRecorder.ts
|
|
15374
|
-
var
|
|
16327
|
+
var import_non_secure79 = require("nanoid/non-secure");
|
|
15375
16328
|
var import_emittery3 = __toESM(require("emittery-0-13"), 1);
|
|
15376
|
-
var
|
|
16329
|
+
var import_lodash_es17 = require("lodash");
|
|
15377
16330
|
var toRecordedEventMap = {
|
|
15378
16331
|
graphStart: ({ graph, inputs }) => ({ graphId: graph.metadata.id, inputs }),
|
|
15379
16332
|
graphFinish: ({ graph, outputs }) => ({ graphId: graph.metadata.id, outputs }),
|
|
@@ -15470,16 +16423,16 @@ function toRecordedEvent(event, data) {
|
|
|
15470
16423
|
function mapValuesDeep(obj, fn) {
|
|
15471
16424
|
if (Array.isArray(obj)) {
|
|
15472
16425
|
return obj.map((value) => {
|
|
15473
|
-
if ((0,
|
|
16426
|
+
if ((0, import_lodash_es17.isPlainObject)(value) || Array.isArray(value)) {
|
|
15474
16427
|
return mapValuesDeep(value, fn);
|
|
15475
16428
|
}
|
|
15476
16429
|
return fn(value);
|
|
15477
16430
|
});
|
|
15478
16431
|
}
|
|
15479
|
-
if ((0,
|
|
16432
|
+
if ((0, import_lodash_es17.isPlainObject)(obj)) {
|
|
15480
16433
|
return Object.fromEntries(
|
|
15481
16434
|
Object.entries(obj).map(([key, value]) => {
|
|
15482
|
-
if ((0,
|
|
16435
|
+
if ((0, import_lodash_es17.isPlainObject)(value) || Array.isArray(value)) {
|
|
15483
16436
|
return [key, mapValuesDeep(value, fn)];
|
|
15484
16437
|
}
|
|
15485
16438
|
return [key, fn(value)];
|
|
@@ -15505,7 +16458,7 @@ var ExecutionRecorder = class _ExecutionRecorder {
|
|
|
15505
16458
|
once = void 0;
|
|
15506
16459
|
recordSocket(channel) {
|
|
15507
16460
|
return new Promise((resolve) => {
|
|
15508
|
-
this.recordingId = (0,
|
|
16461
|
+
this.recordingId = (0, import_non_secure79.nanoid)();
|
|
15509
16462
|
const listener = (event) => {
|
|
15510
16463
|
const { message, data } = JSON.parse(event.data);
|
|
15511
16464
|
if (this.#includePartialOutputs === false && message === "partialOutput") {
|
|
@@ -15527,7 +16480,7 @@ var ExecutionRecorder = class _ExecutionRecorder {
|
|
|
15527
16480
|
});
|
|
15528
16481
|
}
|
|
15529
16482
|
record(processor) {
|
|
15530
|
-
this.recordingId = (0,
|
|
16483
|
+
this.recordingId = (0, import_non_secure79.nanoid)();
|
|
15531
16484
|
processor.onAny((event, data) => {
|
|
15532
16485
|
if (this.#includePartialOutputs === false && event === "partialOutput") {
|
|
15533
16486
|
return;
|
|
@@ -15589,7 +16542,7 @@ var ExecutionRecorder = class _ExecutionRecorder {
|
|
|
15589
16542
|
const asString = uint8ArrayToBase64Sync(val);
|
|
15590
16543
|
const existingAsset = Object.entries(serialized.assets).find(([, asset]) => asset === asString);
|
|
15591
16544
|
if (!existingAsset) {
|
|
15592
|
-
const id = (0,
|
|
16545
|
+
const id = (0, import_non_secure79.nanoid)();
|
|
15593
16546
|
serialized.assets[id] = asString;
|
|
15594
16547
|
return `$ASSET:${id}`;
|
|
15595
16548
|
} else {
|
|
@@ -15604,8 +16557,8 @@ var ExecutionRecorder = class _ExecutionRecorder {
|
|
|
15604
16557
|
};
|
|
15605
16558
|
|
|
15606
16559
|
// src/plugins/aidon/nodes/ChatAidonNode.ts
|
|
15607
|
-
var
|
|
15608
|
-
var
|
|
16560
|
+
var import_lodash_es18 = require("lodash");
|
|
16561
|
+
var import_ts_dedent71 = require("ts-dedent");
|
|
15609
16562
|
var registry = globalRivetNodeRegistry;
|
|
15610
16563
|
var ChatAidonNodeImpl = class extends ChatNodeImpl {
|
|
15611
16564
|
create() {
|
|
@@ -15742,7 +16695,7 @@ var ChatAidonNodeImpl = class extends ChatNodeImpl {
|
|
|
15742
16695
|
message: JSON.stringify(data)
|
|
15743
16696
|
});
|
|
15744
16697
|
}
|
|
15745
|
-
inputs = (0,
|
|
16698
|
+
inputs = (0, import_lodash_es18.omit)(inputs, ["functions", "prompt"]);
|
|
15746
16699
|
inputs["prompt"] = messages;
|
|
15747
16700
|
outputs = await super.process(inputs, context);
|
|
15748
16701
|
}
|
|
@@ -15786,7 +16739,7 @@ var createPluginNodeImpl = (chatNode2) => {
|
|
|
15786
16739
|
},
|
|
15787
16740
|
getUIData() {
|
|
15788
16741
|
return {
|
|
15789
|
-
infoBoxBody:
|
|
16742
|
+
infoBoxBody: import_ts_dedent71.dedent`
|
|
15790
16743
|
Makes a call to an Aidon chat model. The settings contains many options for tweaking the model's behavior.
|
|
15791
16744
|
`,
|
|
15792
16745
|
infoBoxTitle: "Chat (Aidon) Node",
|
|
@@ -16001,6 +16954,22 @@ var anthropicModels = {
|
|
|
16001
16954
|
completion: 15e-6
|
|
16002
16955
|
},
|
|
16003
16956
|
displayName: "Claude 3.7 Sonnet"
|
|
16957
|
+
},
|
|
16958
|
+
"claude-sonnet-4-20250514": {
|
|
16959
|
+
maxTokens: 2e5,
|
|
16960
|
+
cost: {
|
|
16961
|
+
prompt: 3e-6,
|
|
16962
|
+
completion: 375e-8
|
|
16963
|
+
},
|
|
16964
|
+
displayName: "Claude Sonnet 4"
|
|
16965
|
+
},
|
|
16966
|
+
"claude-opus-4-20250514": {
|
|
16967
|
+
maxTokens: 2e5,
|
|
16968
|
+
cost: {
|
|
16969
|
+
prompt: 15e-6,
|
|
16970
|
+
completion: 1875e-8
|
|
16971
|
+
},
|
|
16972
|
+
displayName: "Claude Opus 4"
|
|
16004
16973
|
}
|
|
16005
16974
|
};
|
|
16006
16975
|
var anthropicModelOptions = Object.entries(anthropicModels).map(([id, { displayName }]) => ({
|
|
@@ -16011,6 +16980,7 @@ async function* streamChatCompletions2({
|
|
|
16011
16980
|
apiEndpoint,
|
|
16012
16981
|
apiKey,
|
|
16013
16982
|
signal,
|
|
16983
|
+
additionalHeaders,
|
|
16014
16984
|
...rest
|
|
16015
16985
|
}) {
|
|
16016
16986
|
const defaultSignal = new AbortController().signal;
|
|
@@ -16020,7 +16990,8 @@ async function* streamChatCompletions2({
|
|
|
16020
16990
|
"Content-Type": "application/json",
|
|
16021
16991
|
"x-api-key": apiKey,
|
|
16022
16992
|
"anthropic-version": "2023-06-01",
|
|
16023
|
-
"anthropic-dangerous-direct-browser-access": "true"
|
|
16993
|
+
"anthropic-dangerous-direct-browser-access": "true",
|
|
16994
|
+
...additionalHeaders
|
|
16024
16995
|
},
|
|
16025
16996
|
body: JSON.stringify({
|
|
16026
16997
|
...rest,
|
|
@@ -16055,6 +17026,7 @@ async function* streamMessageApi({
|
|
|
16055
17026
|
apiKey,
|
|
16056
17027
|
signal,
|
|
16057
17028
|
beta,
|
|
17029
|
+
additionalHeaders,
|
|
16058
17030
|
...rest
|
|
16059
17031
|
}) {
|
|
16060
17032
|
const defaultSignal = new AbortController().signal;
|
|
@@ -16065,7 +17037,8 @@ async function* streamMessageApi({
|
|
|
16065
17037
|
"x-api-key": apiKey,
|
|
16066
17038
|
"anthropic-version": "2023-06-01",
|
|
16067
17039
|
"anthropic-dangerous-direct-browser-access": "true",
|
|
16068
|
-
...beta ? { "anthropic-beta": beta } : {}
|
|
17040
|
+
...beta ? { "anthropic-beta": beta } : {},
|
|
17041
|
+
...additionalHeaders
|
|
16069
17042
|
},
|
|
16070
17043
|
body: JSON.stringify({
|
|
16071
17044
|
...rest,
|
|
@@ -16104,8 +17077,8 @@ var AnthropicError = class extends Error {
|
|
|
16104
17077
|
};
|
|
16105
17078
|
|
|
16106
17079
|
// src/plugins/anthropic/nodes/ChatAnthropicNode.ts
|
|
16107
|
-
var
|
|
16108
|
-
var
|
|
17080
|
+
var import_non_secure80 = require("nanoid/non-secure");
|
|
17081
|
+
var import_ts_dedent72 = require("ts-dedent");
|
|
16109
17082
|
var import_p_retry2 = __toESM(require("p-retry-4"), 1);
|
|
16110
17083
|
var import_ts_pattern10 = require("ts-pattern");
|
|
16111
17084
|
|
|
@@ -16121,14 +17094,14 @@ var ChatAnthropicNodeImpl = {
|
|
|
16121
17094
|
const chartNode = {
|
|
16122
17095
|
type: "chatAnthropic",
|
|
16123
17096
|
title: "Chat (Anthropic)",
|
|
16124
|
-
id: (0,
|
|
17097
|
+
id: (0, import_non_secure80.nanoid)(),
|
|
16125
17098
|
visualData: {
|
|
16126
17099
|
x: 0,
|
|
16127
17100
|
y: 0,
|
|
16128
17101
|
width: 275
|
|
16129
17102
|
},
|
|
16130
17103
|
data: {
|
|
16131
|
-
model: "claude-
|
|
17104
|
+
model: "claude-sonnet-4-20250514",
|
|
16132
17105
|
useModelInput: false,
|
|
16133
17106
|
temperature: 0.5,
|
|
16134
17107
|
useTemperatureInput: false,
|
|
@@ -16221,6 +17194,14 @@ var ChatAnthropicNodeImpl = {
|
|
|
16221
17194
|
coerced: false
|
|
16222
17195
|
});
|
|
16223
17196
|
}
|
|
17197
|
+
if (data.useHeadersInput) {
|
|
17198
|
+
inputs.push({
|
|
17199
|
+
dataType: "object",
|
|
17200
|
+
id: "headers",
|
|
17201
|
+
title: "Headers",
|
|
17202
|
+
description: "Additional headers to send to the API."
|
|
17203
|
+
});
|
|
17204
|
+
}
|
|
16224
17205
|
return inputs;
|
|
16225
17206
|
},
|
|
16226
17207
|
getOutputDefinitions(data) {
|
|
@@ -16257,7 +17238,7 @@ var ChatAnthropicNodeImpl = {
|
|
|
16257
17238
|
getBody(data) {
|
|
16258
17239
|
var _a;
|
|
16259
17240
|
const modelName = data.overrideModel ? data.overrideModel : ((_a = anthropicModels[data.model]) == null ? void 0 : _a.displayName) ?? "Unknown Model";
|
|
16260
|
-
return
|
|
17241
|
+
return import_ts_dedent72.dedent`
|
|
16261
17242
|
${modelName}
|
|
16262
17243
|
${data.useTopP ? `Top P: ${data.useTopPInput ? "(Using Input)" : data.top_p}` : `Temperature: ${data.useTemperatureInput ? "(Using Input)" : data.temperature}`}
|
|
16263
17244
|
Max Tokens: ${data.maxTokens}
|
|
@@ -16367,6 +17348,14 @@ var ChatAnthropicNodeImpl = {
|
|
|
16367
17348
|
dataKey: "overrideModel",
|
|
16368
17349
|
useInputToggleDataKey: "useOverrideModelInput",
|
|
16369
17350
|
helperMessage: "Overrides the AI model used for the chat node to this value."
|
|
17351
|
+
},
|
|
17352
|
+
{
|
|
17353
|
+
type: "keyValuePair",
|
|
17354
|
+
label: "Headers",
|
|
17355
|
+
dataKey: "headers",
|
|
17356
|
+
useInputToggleDataKey: "useHeadersInput",
|
|
17357
|
+
keyPlaceholder: "Header",
|
|
17358
|
+
helperMessage: "Additional headers to send to the API."
|
|
16370
17359
|
}
|
|
16371
17360
|
]
|
|
16372
17361
|
}
|
|
@@ -16374,7 +17363,7 @@ var ChatAnthropicNodeImpl = {
|
|
|
16374
17363
|
},
|
|
16375
17364
|
getUIData() {
|
|
16376
17365
|
return {
|
|
16377
|
-
infoBoxBody:
|
|
17366
|
+
infoBoxBody: import_ts_dedent72.dedent`
|
|
16378
17367
|
Makes a call to an Anthropic chat model. The settings contains many options for tweaking the model's behavior.
|
|
16379
17368
|
`,
|
|
16380
17369
|
infoBoxTitle: "Chat (Anthropic) Node",
|
|
@@ -16433,6 +17422,18 @@ Assistant: ${content}`;
|
|
|
16433
17422
|
addWarning(output, message);
|
|
16434
17423
|
maxTokens = Math.floor((modelInfo.maxTokens - tokenCountEstimate) * 0.95);
|
|
16435
17424
|
}
|
|
17425
|
+
const headersFromData = (data.headers ?? []).reduce(
|
|
17426
|
+
(acc, header) => {
|
|
17427
|
+
acc[header.key] = header.value;
|
|
17428
|
+
return acc;
|
|
17429
|
+
},
|
|
17430
|
+
{}
|
|
17431
|
+
);
|
|
17432
|
+
const additionalHeaders = data.useHeadersInput ? coerceTypeOptional(inputs["headers"], "object") ?? headersFromData : headersFromData;
|
|
17433
|
+
const allAdditionalHeaders = cleanHeaders({
|
|
17434
|
+
...context.settings.chatNodeHeaders,
|
|
17435
|
+
...additionalHeaders
|
|
17436
|
+
});
|
|
16436
17437
|
try {
|
|
16437
17438
|
return await (0, import_p_retry2.default)(
|
|
16438
17439
|
async () => {
|
|
@@ -16455,7 +17456,7 @@ Assistant: ${content}`;
|
|
|
16455
17456
|
messages,
|
|
16456
17457
|
tools: tools ? tools.map((tool) => ({ name: tool.name, description: tool.description, input_schema: tool.parameters })) : void 0
|
|
16457
17458
|
};
|
|
16458
|
-
const useMessageApi = model.startsWith("claude-3");
|
|
17459
|
+
const useMessageApi = model.startsWith("claude-3") || model.startsWith("claude-sonnet") || model.startsWith("claude-opus");
|
|
16459
17460
|
const cacheKey = JSON.stringify(useMessageApi ? messageOptions : completionOptions);
|
|
16460
17461
|
if (data.cache) {
|
|
16461
17462
|
const cached = cache2.get(cacheKey);
|
|
@@ -16474,6 +17475,7 @@ Assistant: ${content}`;
|
|
|
16474
17475
|
apiKey: apiKey ?? "",
|
|
16475
17476
|
signal: context.signal,
|
|
16476
17477
|
beta: "prompt-caching-2024-07-31",
|
|
17478
|
+
additionalHeaders: allAdditionalHeaders,
|
|
16477
17479
|
...messageOptions
|
|
16478
17480
|
});
|
|
16479
17481
|
const responseParts = [];
|
|
@@ -16587,6 +17589,7 @@ Assistant: ${content}`;
|
|
|
16587
17589
|
apiEndpoint,
|
|
16588
17590
|
apiKey: apiKey ?? "",
|
|
16589
17591
|
signal: context.signal,
|
|
17592
|
+
additionalHeaders: allAdditionalHeaders,
|
|
16590
17593
|
...completionOptions
|
|
16591
17594
|
});
|
|
16592
17595
|
const responseParts = [];
|
|
@@ -16869,8 +17872,8 @@ var anthropicPlugin = {
|
|
|
16869
17872
|
var anthropic_default = anthropicPlugin;
|
|
16870
17873
|
|
|
16871
17874
|
// src/plugins/autoevals/AutoEvalsNode.ts
|
|
16872
|
-
var
|
|
16873
|
-
var
|
|
17875
|
+
var import_non_secure81 = require("nanoid/non-secure");
|
|
17876
|
+
var import_ts_dedent73 = require("ts-dedent");
|
|
16874
17877
|
var import_autoevals = require("autoevals");
|
|
16875
17878
|
var import_ts_pattern11 = require("ts-pattern");
|
|
16876
17879
|
var options = [
|
|
@@ -16889,7 +17892,7 @@ var AutoEvalsNodeImpl = {
|
|
|
16889
17892
|
const chartNode = {
|
|
16890
17893
|
type: "autoevals",
|
|
16891
17894
|
title: "Autoevals",
|
|
16892
|
-
id: (0,
|
|
17895
|
+
id: (0, import_non_secure81.nanoid)(),
|
|
16893
17896
|
visualData: {
|
|
16894
17897
|
x: 0,
|
|
16895
17898
|
y: 0,
|
|
@@ -17004,7 +18007,7 @@ var AutoEvalsNodeImpl = {
|
|
|
17004
18007
|
},
|
|
17005
18008
|
getUIData() {
|
|
17006
18009
|
return {
|
|
17007
|
-
infoBoxBody:
|
|
18010
|
+
infoBoxBody: import_ts_dedent73.dedent`
|
|
17008
18011
|
Evaluates the validity of a response using the autoevals library.
|
|
17009
18012
|
`,
|
|
17010
18013
|
infoBoxTitle: "Autoevals Node",
|
|
@@ -17084,8 +18087,8 @@ var autoevalsPlugin = {
|
|
|
17084
18087
|
var autoevals_default = autoevalsPlugin;
|
|
17085
18088
|
|
|
17086
18089
|
// src/plugins/assemblyAi/LemurQaNode.ts
|
|
17087
|
-
var
|
|
17088
|
-
var
|
|
18090
|
+
var import_non_secure82 = require("nanoid/non-secure");
|
|
18091
|
+
var import_ts_dedent74 = require("ts-dedent");
|
|
17089
18092
|
|
|
17090
18093
|
// src/plugins/assemblyAi/lemurHelpers.ts
|
|
17091
18094
|
var import_assemblyai = require("assemblyai");
|
|
@@ -17205,7 +18208,7 @@ var LemurQaNodeImpl = {
|
|
|
17205
18208
|
const chartNode = {
|
|
17206
18209
|
type: "assemblyAiLemurQa",
|
|
17207
18210
|
title: "LeMUR Question & Answers",
|
|
17208
|
-
id: (0,
|
|
18211
|
+
id: (0, import_non_secure82.nanoid)(),
|
|
17209
18212
|
visualData: {
|
|
17210
18213
|
x: 0,
|
|
17211
18214
|
y: 0,
|
|
@@ -17271,7 +18274,7 @@ var LemurQaNodeImpl = {
|
|
|
17271
18274
|
},
|
|
17272
18275
|
getUIData() {
|
|
17273
18276
|
return {
|
|
17274
|
-
infoBoxBody:
|
|
18277
|
+
infoBoxBody: import_ts_dedent74.dedent`Use AssemblyAI LeMUR to ask questions about transcripts`,
|
|
17275
18278
|
infoBoxTitle: "Use AssemblyAI LeMUR Question & Answer",
|
|
17276
18279
|
contextMenuTitle: "LeMUR Q&A",
|
|
17277
18280
|
group: ["AI", "AssemblyAI"]
|
|
@@ -17343,14 +18346,14 @@ function applyQuestionEditors(data, question) {
|
|
|
17343
18346
|
var lemurQaNode = pluginNodeDefinition(LemurQaNodeImpl, "LeMUR Q&A");
|
|
17344
18347
|
|
|
17345
18348
|
// src/plugins/assemblyAi/TranscribeAudioNode.ts
|
|
17346
|
-
var
|
|
17347
|
-
var
|
|
18349
|
+
var import_non_secure83 = require("nanoid/non-secure");
|
|
18350
|
+
var import_ts_dedent75 = require("ts-dedent");
|
|
17348
18351
|
var TranscribeAudioNodeImpl = {
|
|
17349
18352
|
create() {
|
|
17350
18353
|
const chartNode = {
|
|
17351
18354
|
type: "assemblyAiTranscribeAudio",
|
|
17352
18355
|
title: "Transcribe Audio",
|
|
17353
|
-
id: (0,
|
|
18356
|
+
id: (0, import_non_secure83.nanoid)(),
|
|
17354
18357
|
visualData: {
|
|
17355
18358
|
x: 0,
|
|
17356
18359
|
y: 0,
|
|
@@ -17405,7 +18408,7 @@ var TranscribeAudioNodeImpl = {
|
|
|
17405
18408
|
},
|
|
17406
18409
|
getUIData() {
|
|
17407
18410
|
return {
|
|
17408
|
-
infoBoxBody:
|
|
18411
|
+
infoBoxBody: import_ts_dedent75.dedent`Use AssemblyAI to transcribe audio`,
|
|
17409
18412
|
infoBoxTitle: "Transcribe Audio Node",
|
|
17410
18413
|
contextMenuTitle: "Transcribe Audio",
|
|
17411
18414
|
group: ["AI", "AssemblyAI"]
|
|
@@ -17464,15 +18467,15 @@ function getAdditionalParameters(data) {
|
|
|
17464
18467
|
}
|
|
17465
18468
|
|
|
17466
18469
|
// src/plugins/assemblyAi/LemurSummaryNode.ts
|
|
17467
|
-
var
|
|
17468
|
-
var
|
|
18470
|
+
var import_non_secure84 = require("nanoid/non-secure");
|
|
18471
|
+
var import_ts_dedent76 = require("ts-dedent");
|
|
17469
18472
|
var import_assemblyai3 = require("assemblyai");
|
|
17470
18473
|
var LemurSummaryNodeImpl = {
|
|
17471
18474
|
create() {
|
|
17472
18475
|
const chartNode = {
|
|
17473
18476
|
type: "assemblyAiLemurSummary",
|
|
17474
18477
|
title: "LeMUR Summary",
|
|
17475
|
-
id: (0,
|
|
18478
|
+
id: (0, import_non_secure84.nanoid)(),
|
|
17476
18479
|
visualData: {
|
|
17477
18480
|
x: 0,
|
|
17478
18481
|
y: 0,
|
|
@@ -17518,7 +18521,7 @@ var LemurSummaryNodeImpl = {
|
|
|
17518
18521
|
},
|
|
17519
18522
|
getUIData() {
|
|
17520
18523
|
return {
|
|
17521
|
-
infoBoxBody:
|
|
18524
|
+
infoBoxBody: import_ts_dedent76.dedent`Use AssemblyAI LeMUR Summary to summarize transcripts`,
|
|
17522
18525
|
infoBoxTitle: "Use AssemblyAI LeMUR Summary",
|
|
17523
18526
|
contextMenuTitle: "LeMUR Summary",
|
|
17524
18527
|
group: ["AI", "AssemblyAI"]
|
|
@@ -17542,15 +18545,15 @@ var LemurSummaryNodeImpl = {
|
|
|
17542
18545
|
var lemurSummaryNode = pluginNodeDefinition(LemurSummaryNodeImpl, "LeMUR Summary");
|
|
17543
18546
|
|
|
17544
18547
|
// src/plugins/assemblyAi/LemurTaskNode.ts
|
|
17545
|
-
var
|
|
17546
|
-
var
|
|
18548
|
+
var import_non_secure85 = require("nanoid/non-secure");
|
|
18549
|
+
var import_ts_dedent77 = require("ts-dedent");
|
|
17547
18550
|
var import_assemblyai4 = require("assemblyai");
|
|
17548
18551
|
var LemurTaskNodeImpl = {
|
|
17549
18552
|
create() {
|
|
17550
18553
|
const chartNode = {
|
|
17551
18554
|
type: "assemblyAiLemurTask",
|
|
17552
18555
|
title: "LeMUR Task",
|
|
17553
|
-
id: (0,
|
|
18556
|
+
id: (0, import_non_secure85.nanoid)(),
|
|
17554
18557
|
visualData: {
|
|
17555
18558
|
x: 0,
|
|
17556
18559
|
y: 0,
|
|
@@ -17596,7 +18599,7 @@ var LemurTaskNodeImpl = {
|
|
|
17596
18599
|
},
|
|
17597
18600
|
getUIData() {
|
|
17598
18601
|
return {
|
|
17599
|
-
infoBoxBody:
|
|
18602
|
+
infoBoxBody: import_ts_dedent77.dedent`Use AssemblyAI LeMUR Custom Task to ask anything.`,
|
|
17600
18603
|
infoBoxTitle: "Use AssemblyAI LeMUR Custom Task",
|
|
17601
18604
|
contextMenuTitle: "LeMUR Custom Task",
|
|
17602
18605
|
group: ["AI", "AssemblyAI"]
|
|
@@ -17622,14 +18625,14 @@ var LemurTaskNodeImpl = {
|
|
|
17622
18625
|
var lemurTaskNode = pluginNodeDefinition(LemurTaskNodeImpl, "LeMUR Task");
|
|
17623
18626
|
|
|
17624
18627
|
// src/plugins/assemblyAi/LemurActionItemsNode.ts
|
|
17625
|
-
var
|
|
17626
|
-
var
|
|
18628
|
+
var import_non_secure86 = require("nanoid/non-secure");
|
|
18629
|
+
var import_ts_dedent78 = require("ts-dedent");
|
|
17627
18630
|
var LemurActionItemsNodeImpl = {
|
|
17628
18631
|
create() {
|
|
17629
18632
|
const chartNode = {
|
|
17630
18633
|
type: "assemblyAiLemurActionItems",
|
|
17631
18634
|
title: "LeMUR Action Items",
|
|
17632
|
-
id: (0,
|
|
18635
|
+
id: (0, import_non_secure86.nanoid)(),
|
|
17633
18636
|
visualData: {
|
|
17634
18637
|
x: 0,
|
|
17635
18638
|
y: 0,
|
|
@@ -17675,7 +18678,7 @@ var LemurActionItemsNodeImpl = {
|
|
|
17675
18678
|
},
|
|
17676
18679
|
getUIData() {
|
|
17677
18680
|
return {
|
|
17678
|
-
infoBoxBody:
|
|
18681
|
+
infoBoxBody: import_ts_dedent78.dedent`Use AssemblyAI LeMUR Action Items to extract action items`,
|
|
17679
18682
|
infoBoxTitle: "Use AssemblyAI LeMUR Action Items",
|
|
17680
18683
|
contextMenuTitle: "LeMUR Action Items",
|
|
17681
18684
|
group: ["AI", "AssemblyAI"]
|
|
@@ -17727,12 +18730,12 @@ var assemblyAiPlugin = {
|
|
|
17727
18730
|
var assemblyAi_default = assemblyAiPlugin;
|
|
17728
18731
|
|
|
17729
18732
|
// src/plugins/huggingface/nodes/ChatHuggingFace.ts
|
|
17730
|
-
var
|
|
18733
|
+
var import_non_secure87 = require("nanoid/non-secure");
|
|
17731
18734
|
var import_inference = require("@huggingface/inference");
|
|
17732
18735
|
var ChatHuggingFaceNodeImpl = {
|
|
17733
18736
|
create() {
|
|
17734
18737
|
return {
|
|
17735
|
-
id: (0,
|
|
18738
|
+
id: (0, import_non_secure87.nanoid)(),
|
|
17736
18739
|
type: "chatHuggingFace",
|
|
17737
18740
|
data: {
|
|
17738
18741
|
model: "",
|
|
@@ -17962,13 +18965,13 @@ var ChatHuggingFaceNodeImpl = {
|
|
|
17962
18965
|
var chatHuggingFaceNode = pluginNodeDefinition(ChatHuggingFaceNodeImpl, "Chat (Hugging Face)");
|
|
17963
18966
|
|
|
17964
18967
|
// src/plugins/huggingface/nodes/TextToImageHuggingFace.ts
|
|
17965
|
-
var
|
|
18968
|
+
var import_non_secure88 = require("nanoid/non-secure");
|
|
17966
18969
|
var import_inference2 = require("@huggingface/inference");
|
|
17967
|
-
var
|
|
18970
|
+
var import_ts_dedent79 = require("ts-dedent");
|
|
17968
18971
|
var TextToImageHuggingFaceNodeImpl = {
|
|
17969
18972
|
create() {
|
|
17970
18973
|
return {
|
|
17971
|
-
id: (0,
|
|
18974
|
+
id: (0, import_non_secure88.nanoid)(),
|
|
17972
18975
|
type: "textToImageHuggingFace",
|
|
17973
18976
|
data: {
|
|
17974
18977
|
model: "",
|
|
@@ -18109,7 +19112,7 @@ var TextToImageHuggingFaceNodeImpl = {
|
|
|
18109
19112
|
];
|
|
18110
19113
|
},
|
|
18111
19114
|
getBody(data) {
|
|
18112
|
-
return
|
|
19115
|
+
return import_ts_dedent79.dedent`
|
|
18113
19116
|
Model: ${data.useModelInput ? "(Using Input)" : data.model}
|
|
18114
19117
|
`;
|
|
18115
19118
|
},
|
|
@@ -18285,7 +19288,7 @@ var pinecone_default = pineconePlugin;
|
|
|
18285
19288
|
|
|
18286
19289
|
// src/plugins/gentrace/plugin.ts
|
|
18287
19290
|
var import_core = require("@gentrace/core");
|
|
18288
|
-
var
|
|
19291
|
+
var import_lodash_es19 = require("lodash");
|
|
18289
19292
|
var apiKeyConfigSpec = {
|
|
18290
19293
|
type: "secret",
|
|
18291
19294
|
label: "Gentrace API Key",
|
|
@@ -20844,7 +21847,7 @@ ${additional.join("\n")}`;
|
|
|
20844
21847
|
var runThreadNode = pluginNodeDefinition(RunThreadNodeImpl, "Run Thread");
|
|
20845
21848
|
|
|
20846
21849
|
// src/plugins/openai/nodes/ThreadMessageNode.ts
|
|
20847
|
-
var
|
|
21850
|
+
var import_lodash_es20 = require("lodash");
|
|
20848
21851
|
var ThreadMessageNodeImpl = {
|
|
20849
21852
|
create() {
|
|
20850
21853
|
return {
|
|
@@ -20969,7 +21972,7 @@ var ThreadMessageNodeImpl = {
|
|
|
20969
21972
|
if (data.useMetadataInput && inputData["metadata"]) {
|
|
20970
21973
|
metadata = coerceTypeOptional(inputData["metadata"], "object");
|
|
20971
21974
|
}
|
|
20972
|
-
const inputMap = (0,
|
|
21975
|
+
const inputMap = (0, import_lodash_es20.mapValues)(inputData, (input) => coerceType(input, "string"));
|
|
20973
21976
|
const interpolated = interpolate(text, inputMap);
|
|
20974
21977
|
return {
|
|
20975
21978
|
["message"]: {
|
|
@@ -21017,7 +22020,7 @@ var openAIPlugin = {
|
|
|
21017
22020
|
};
|
|
21018
22021
|
|
|
21019
22022
|
// src/plugins/google/google.ts
|
|
21020
|
-
var
|
|
22023
|
+
var import_genai = require("@google/genai");
|
|
21021
22024
|
var googleModelsDeprecated = {
|
|
21022
22025
|
"gemini-pro": {
|
|
21023
22026
|
maxTokens: 32760,
|
|
@@ -21037,12 +22040,20 @@ var googleModelsDeprecated = {
|
|
|
21037
22040
|
}
|
|
21038
22041
|
};
|
|
21039
22042
|
var generativeAiGoogleModels = {
|
|
21040
|
-
"gemini-2.
|
|
22043
|
+
"gemini-2.5-flash-preview-04-17": {
|
|
21041
22044
|
maxTokens: 1048576,
|
|
21042
22045
|
cost: {
|
|
21043
22046
|
prompt: 0.15 / 1e3,
|
|
21044
22047
|
completion: 0.6 / 1e3
|
|
21045
22048
|
},
|
|
22049
|
+
displayName: "Gemini 2.5 Flash Preview"
|
|
22050
|
+
},
|
|
22051
|
+
"gemini-2.0-flash-001": {
|
|
22052
|
+
maxTokens: 1048576,
|
|
22053
|
+
cost: {
|
|
22054
|
+
prompt: 0.1 / 1e3,
|
|
22055
|
+
completion: 0.4 / 1e3
|
|
22056
|
+
},
|
|
21046
22057
|
displayName: "Gemini 2.0 Flash"
|
|
21047
22058
|
},
|
|
21048
22059
|
"gemini-2.0-pro-exp-02-05": {
|
|
@@ -21142,36 +22153,42 @@ async function* streamGenerativeAi({
|
|
|
21142
22153
|
topP,
|
|
21143
22154
|
topK,
|
|
21144
22155
|
signal,
|
|
21145
|
-
tools
|
|
22156
|
+
tools,
|
|
22157
|
+
thinkingBudget,
|
|
22158
|
+
additionalHeaders
|
|
21146
22159
|
}) {
|
|
21147
22160
|
var _a, _b, _c, _d, _e;
|
|
21148
|
-
const {
|
|
21149
|
-
const genAi = new
|
|
21150
|
-
const
|
|
22161
|
+
const { GoogleGenAI } = await import("@google/genai");
|
|
22162
|
+
const genAi = new GoogleGenAI({ apiKey });
|
|
22163
|
+
const result = await genAi.models.generateContentStream({
|
|
21151
22164
|
model,
|
|
21152
|
-
|
|
21153
|
-
|
|
22165
|
+
contents: prompt,
|
|
22166
|
+
config: {
|
|
22167
|
+
systemInstruction: systemPrompt,
|
|
21154
22168
|
maxOutputTokens,
|
|
21155
22169
|
temperature,
|
|
21156
22170
|
topP,
|
|
21157
|
-
topK
|
|
21158
|
-
|
|
21159
|
-
|
|
22171
|
+
topK,
|
|
22172
|
+
tools,
|
|
22173
|
+
abortSignal: signal,
|
|
22174
|
+
thinkingConfig: {
|
|
22175
|
+
thinkingBudget
|
|
22176
|
+
},
|
|
22177
|
+
httpOptions: {
|
|
22178
|
+
headers: {
|
|
22179
|
+
...additionalHeaders
|
|
22180
|
+
}
|
|
22181
|
+
}
|
|
22182
|
+
}
|
|
21160
22183
|
});
|
|
21161
|
-
const
|
|
21162
|
-
{
|
|
21163
|
-
contents: prompt
|
|
21164
|
-
},
|
|
21165
|
-
{ signal }
|
|
21166
|
-
);
|
|
21167
|
-
for await (const chunk of result.stream) {
|
|
22184
|
+
for await (const chunk of result) {
|
|
21168
22185
|
const outChunk = {
|
|
21169
22186
|
completion: void 0,
|
|
21170
22187
|
finish_reason: void 0,
|
|
21171
22188
|
function_calls: void 0,
|
|
21172
22189
|
model
|
|
21173
22190
|
};
|
|
21174
|
-
const functionCalls = chunk.functionCalls
|
|
22191
|
+
const functionCalls = chunk.functionCalls;
|
|
21175
22192
|
if (functionCalls) {
|
|
21176
22193
|
outChunk.function_calls = functionCalls;
|
|
21177
22194
|
}
|
|
@@ -21232,26 +22249,26 @@ async function* streamChatCompletions3({
|
|
|
21232
22249
|
}
|
|
21233
22250
|
|
|
21234
22251
|
// src/plugins/google/nodes/ChatGoogleNode.ts
|
|
21235
|
-
var
|
|
21236
|
-
var
|
|
22252
|
+
var import_non_secure89 = require("nanoid/non-secure");
|
|
22253
|
+
var import_ts_dedent80 = require("ts-dedent");
|
|
21237
22254
|
var import_p_retry3 = __toESM(require("p-retry-4"), 1);
|
|
21238
22255
|
var import_ts_pattern12 = require("ts-pattern");
|
|
21239
|
-
var
|
|
21240
|
-
var
|
|
22256
|
+
var import_genai2 = require("@google/genai");
|
|
22257
|
+
var import_lodash_es21 = require("lodash");
|
|
21241
22258
|
var cache3 = /* @__PURE__ */ new Map();
|
|
21242
22259
|
var ChatGoogleNodeImpl = {
|
|
21243
22260
|
create() {
|
|
21244
22261
|
const chartNode = {
|
|
21245
22262
|
type: "chatGoogle",
|
|
21246
22263
|
title: "Chat (Google)",
|
|
21247
|
-
id: (0,
|
|
22264
|
+
id: (0, import_non_secure89.nanoid)(),
|
|
21248
22265
|
visualData: {
|
|
21249
22266
|
x: 0,
|
|
21250
22267
|
y: 0,
|
|
21251
22268
|
width: 275
|
|
21252
22269
|
},
|
|
21253
22270
|
data: {
|
|
21254
|
-
model: "gemini-2.
|
|
22271
|
+
model: "gemini-2.5-flash-preview-04-17",
|
|
21255
22272
|
useModelInput: false,
|
|
21256
22273
|
temperature: 0.5,
|
|
21257
22274
|
useTemperatureInput: false,
|
|
@@ -21265,7 +22282,9 @@ var ChatGoogleNodeImpl = {
|
|
|
21265
22282
|
useMaxTokensInput: false,
|
|
21266
22283
|
cache: false,
|
|
21267
22284
|
useAsGraphPartialOutput: true,
|
|
21268
|
-
useToolCalling: false
|
|
22285
|
+
useToolCalling: false,
|
|
22286
|
+
thinkingBudget: void 0,
|
|
22287
|
+
useThinkingBudgetInput: false
|
|
21269
22288
|
}
|
|
21270
22289
|
};
|
|
21271
22290
|
return chartNode;
|
|
@@ -21323,11 +22342,27 @@ var ChatGoogleNodeImpl = {
|
|
|
21323
22342
|
description: "Tools available for the model to call."
|
|
21324
22343
|
});
|
|
21325
22344
|
}
|
|
22345
|
+
if (data.useThinkingBudgetInput) {
|
|
22346
|
+
inputs.push({
|
|
22347
|
+
dataType: "number",
|
|
22348
|
+
id: "thinkingBudget",
|
|
22349
|
+
title: "Thinking Budget",
|
|
22350
|
+
description: "The token budget for the model to think before responding."
|
|
22351
|
+
});
|
|
22352
|
+
}
|
|
21326
22353
|
inputs.push({
|
|
21327
22354
|
dataType: ["chat-message", "chat-message[]"],
|
|
21328
22355
|
id: "prompt",
|
|
21329
22356
|
title: "Prompt"
|
|
21330
22357
|
});
|
|
22358
|
+
if (data.useHeadersInput) {
|
|
22359
|
+
inputs.push({
|
|
22360
|
+
dataType: "object",
|
|
22361
|
+
id: "headers",
|
|
22362
|
+
title: "Headers",
|
|
22363
|
+
description: "Additional headers to send to the API."
|
|
22364
|
+
});
|
|
22365
|
+
}
|
|
21331
22366
|
return inputs;
|
|
21332
22367
|
},
|
|
21333
22368
|
getOutputDefinitions(data) {
|
|
@@ -21361,10 +22396,11 @@ var ChatGoogleNodeImpl = {
|
|
|
21361
22396
|
},
|
|
21362
22397
|
getBody(data) {
|
|
21363
22398
|
var _a;
|
|
21364
|
-
return
|
|
22399
|
+
return import_ts_dedent80.dedent`
|
|
21365
22400
|
${((_a = generativeAiGoogleModels[data.model]) == null ? void 0 : _a.displayName) ?? `Google (${data.model})`}
|
|
21366
22401
|
${data.useTopP ? `Top P: ${data.useTopPInput ? "(Using Input)" : data.top_p}` : `Temperature: ${data.useTemperatureInput ? "(Using Input)" : data.temperature}`}
|
|
21367
22402
|
Max Tokens: ${data.maxTokens}
|
|
22403
|
+
Thinking Budget: ${data.thinkingBudget ?? "Automatic"}
|
|
21368
22404
|
`;
|
|
21369
22405
|
},
|
|
21370
22406
|
getEditors() {
|
|
@@ -21409,6 +22445,17 @@ var ChatGoogleNodeImpl = {
|
|
|
21409
22445
|
max: Number.MAX_SAFE_INTEGER,
|
|
21410
22446
|
step: 1
|
|
21411
22447
|
},
|
|
22448
|
+
{
|
|
22449
|
+
type: "number",
|
|
22450
|
+
label: "Thinking Budget",
|
|
22451
|
+
dataKey: "thinkingBudget",
|
|
22452
|
+
allowEmpty: true,
|
|
22453
|
+
step: 1,
|
|
22454
|
+
min: 0,
|
|
22455
|
+
max: Number.MAX_SAFE_INTEGER,
|
|
22456
|
+
useInputToggleDataKey: "useThinkingBudgetInput",
|
|
22457
|
+
helperMessage: "The token budget for the model to think before responding. Leave blank for automatic budget."
|
|
22458
|
+
},
|
|
21412
22459
|
{
|
|
21413
22460
|
type: "toggle",
|
|
21414
22461
|
label: "Enable Tool Calling",
|
|
@@ -21423,12 +22470,20 @@ var ChatGoogleNodeImpl = {
|
|
|
21423
22470
|
type: "toggle",
|
|
21424
22471
|
label: "Use for subgraph partial output",
|
|
21425
22472
|
dataKey: "useAsGraphPartialOutput"
|
|
22473
|
+
},
|
|
22474
|
+
{
|
|
22475
|
+
type: "keyValuePair",
|
|
22476
|
+
label: "Headers",
|
|
22477
|
+
dataKey: "headers",
|
|
22478
|
+
useInputToggleDataKey: "useHeadersInput",
|
|
22479
|
+
keyPlaceholder: "Header",
|
|
22480
|
+
helperMessage: "Additional headers to send to the API."
|
|
21426
22481
|
}
|
|
21427
22482
|
];
|
|
21428
22483
|
},
|
|
21429
22484
|
getUIData() {
|
|
21430
22485
|
return {
|
|
21431
|
-
infoBoxBody:
|
|
22486
|
+
infoBoxBody: import_ts_dedent80.dedent`
|
|
21432
22487
|
Makes a call to an Google chat model. The settings contains many options for tweaking the model's behavior.
|
|
21433
22488
|
`,
|
|
21434
22489
|
infoBoxTitle: "Chat (Google) Node",
|
|
@@ -21444,6 +22499,7 @@ var ChatGoogleNodeImpl = {
|
|
|
21444
22499
|
const temperature = getInputOrData(data, inputs, "temperature", "number");
|
|
21445
22500
|
const topP = getInputOrData(data, inputs, "top_p", "number");
|
|
21446
22501
|
const useTopP = getInputOrData(data, inputs, "useTopP", "boolean");
|
|
22502
|
+
const thinkingBudget = getInputOrData(data, inputs, "thinkingBudget", "number");
|
|
21447
22503
|
const { messages } = getChatGoogleNodeMessages(inputs);
|
|
21448
22504
|
let prompt = await Promise.all(
|
|
21449
22505
|
messages.map(async (message) => {
|
|
@@ -21542,8 +22598,8 @@ var ChatGoogleNodeImpl = {
|
|
|
21542
22598
|
name: tool.name,
|
|
21543
22599
|
description: tool.description,
|
|
21544
22600
|
parameters: Object.keys(tool.parameters.properties).length === 0 ? void 0 : {
|
|
21545
|
-
type:
|
|
21546
|
-
properties: (0,
|
|
22601
|
+
type: import_genai2.Type.OBJECT,
|
|
22602
|
+
properties: (0, import_lodash_es21.mapValues)(tool.parameters.properties, (p) => ({
|
|
21547
22603
|
// gemini doesn't support union property types, it uses openapi style not jsonschema, what a mess
|
|
21548
22604
|
type: Array.isArray(p.type) ? p.type.filter((t) => t !== "null")[0] : p.type,
|
|
21549
22605
|
description: p.description
|
|
@@ -21567,6 +22623,18 @@ var ChatGoogleNodeImpl = {
|
|
|
21567
22623
|
throw new Error("Google Application Credentials or Google API Key is not defined.");
|
|
21568
22624
|
}
|
|
21569
22625
|
}
|
|
22626
|
+
const headersFromData = (data.headers ?? []).reduce(
|
|
22627
|
+
(acc, header) => {
|
|
22628
|
+
acc[header.key] = header.value;
|
|
22629
|
+
return acc;
|
|
22630
|
+
},
|
|
22631
|
+
{}
|
|
22632
|
+
);
|
|
22633
|
+
const additionalHeaders = data.useHeadersInput ? coerceTypeOptional(inputs["headers"], "object") ?? headersFromData : headersFromData;
|
|
22634
|
+
const allAdditionalHeaders = cleanHeaders({
|
|
22635
|
+
...context.settings.chatNodeHeaders,
|
|
22636
|
+
...additionalHeaders
|
|
22637
|
+
});
|
|
21570
22638
|
try {
|
|
21571
22639
|
return await (0, import_p_retry3.default)(
|
|
21572
22640
|
async () => {
|
|
@@ -21579,7 +22647,9 @@ var ChatGoogleNodeImpl = {
|
|
|
21579
22647
|
maxOutputTokens: maxTokens,
|
|
21580
22648
|
systemPrompt,
|
|
21581
22649
|
topK: void 0,
|
|
21582
|
-
tools
|
|
22650
|
+
tools,
|
|
22651
|
+
thinkingBudget,
|
|
22652
|
+
additionalHeaders: allAdditionalHeaders
|
|
21583
22653
|
};
|
|
21584
22654
|
const cacheKey = JSON.stringify(options2);
|
|
21585
22655
|
if (data.cache) {
|
|
@@ -21604,7 +22674,9 @@ var ChatGoogleNodeImpl = {
|
|
|
21604
22674
|
topK: void 0,
|
|
21605
22675
|
apiKey,
|
|
21606
22676
|
systemPrompt,
|
|
21607
|
-
tools
|
|
22677
|
+
tools,
|
|
22678
|
+
thinkingBudget,
|
|
22679
|
+
additionalHeaders: allAdditionalHeaders
|
|
21608
22680
|
});
|
|
21609
22681
|
} else {
|
|
21610
22682
|
chunks = streamChatCompletions3({
|
|
@@ -21622,6 +22694,15 @@ var ChatGoogleNodeImpl = {
|
|
|
21622
22694
|
}
|
|
21623
22695
|
const responseParts = [];
|
|
21624
22696
|
const functionCalls = [];
|
|
22697
|
+
let throttleLastCalledTime = Date.now();
|
|
22698
|
+
const onPartialOutput = (output2) => {
|
|
22699
|
+
var _a2;
|
|
22700
|
+
const now = Date.now();
|
|
22701
|
+
if (now - throttleLastCalledTime > (context.settings.throttleChatNode ?? 100)) {
|
|
22702
|
+
(_a2 = context.onPartialOutputs) == null ? void 0 : _a2.call(context, output2);
|
|
22703
|
+
throttleLastCalledTime = now;
|
|
22704
|
+
}
|
|
22705
|
+
};
|
|
21625
22706
|
for await (const chunk of chunks) {
|
|
21626
22707
|
if (chunk.completion) {
|
|
21627
22708
|
responseParts.push(chunk.completion);
|
|
@@ -21641,8 +22722,9 @@ var ChatGoogleNodeImpl = {
|
|
|
21641
22722
|
}))
|
|
21642
22723
|
};
|
|
21643
22724
|
}
|
|
21644
|
-
|
|
22725
|
+
onPartialOutput == null ? void 0 : onPartialOutput(output);
|
|
21645
22726
|
}
|
|
22727
|
+
(_a = context.onPartialOutputs) == null ? void 0 : _a.call(context, output);
|
|
21646
22728
|
const endTime = Date.now();
|
|
21647
22729
|
output["all-messages"] = {
|
|
21648
22730
|
type: "chat-message[]",
|
|
@@ -21794,7 +22876,7 @@ var plugins = {
|
|
|
21794
22876
|
};
|
|
21795
22877
|
|
|
21796
22878
|
// src/integrations/DatasetProvider.ts
|
|
21797
|
-
var
|
|
22879
|
+
var import_lodash_es22 = require("lodash");
|
|
21798
22880
|
var InMemoryDatasetProvider = class {
|
|
21799
22881
|
#datasets;
|
|
21800
22882
|
constructor(datasets) {
|
|
@@ -21874,7 +22956,7 @@ var InMemoryDatasetProvider = class {
|
|
|
21874
22956
|
return sorted.slice(0, k).map((r) => ({ ...r.row, distance: r.similarity }));
|
|
21875
22957
|
}
|
|
21876
22958
|
async exportDatasetsForProject(_projectId) {
|
|
21877
|
-
return (0,
|
|
22959
|
+
return (0, import_lodash_es22.cloneDeep)(this.#datasets);
|
|
21878
22960
|
}
|
|
21879
22961
|
};
|
|
21880
22962
|
var dotProductSimilarity = (a, b) => {
|
|
@@ -21960,6 +23042,23 @@ async function* getProcessorEvents(processor, spec) {
|
|
|
21960
23042
|
}
|
|
21961
23043
|
}
|
|
21962
23044
|
}
|
|
23045
|
+
var createOnStreamUserEvents = (eventList, handleUserEvent) => {
|
|
23046
|
+
if (!(eventList == null ? void 0 : eventList.trim())) {
|
|
23047
|
+
return void 0;
|
|
23048
|
+
}
|
|
23049
|
+
const events = eventList.split(",").map((e) => e.trim()).filter(Boolean);
|
|
23050
|
+
if (!events.length) {
|
|
23051
|
+
return void 0;
|
|
23052
|
+
}
|
|
23053
|
+
return Object.fromEntries(
|
|
23054
|
+
events.map((event) => [
|
|
23055
|
+
event,
|
|
23056
|
+
async (data) => {
|
|
23057
|
+
await handleUserEvent(event, data);
|
|
23058
|
+
}
|
|
23059
|
+
])
|
|
23060
|
+
);
|
|
23061
|
+
};
|
|
21963
23062
|
function getProcessorSSEStream(processor, spec) {
|
|
21964
23063
|
const encoder = new TextEncoder();
|
|
21965
23064
|
function sendEvent(controller, type, data) {
|
|
@@ -21971,6 +23070,18 @@ data: ${JSON.stringify(data)}
|
|
|
21971
23070
|
}
|
|
21972
23071
|
return new ReadableStream({
|
|
21973
23072
|
async start(controller) {
|
|
23073
|
+
const userEventHandler = async (eventName, data) => {
|
|
23074
|
+
sendEvent(controller, "event", {
|
|
23075
|
+
name: eventName,
|
|
23076
|
+
message: coerceType(data, "string")
|
|
23077
|
+
});
|
|
23078
|
+
};
|
|
23079
|
+
const streamEvents = createOnStreamUserEvents(spec.userStreamEvents, userEventHandler);
|
|
23080
|
+
if (streamEvents) {
|
|
23081
|
+
for (const [name, fn] of Object.entries(streamEvents)) {
|
|
23082
|
+
processor.onUserEvent(name, fn);
|
|
23083
|
+
}
|
|
23084
|
+
}
|
|
21974
23085
|
try {
|
|
21975
23086
|
for await (const event of getProcessorEvents(processor, spec)) {
|
|
21976
23087
|
sendEvent(controller, event.type, event);
|
|
@@ -22125,6 +23236,7 @@ function coreCreateProcessor(project, options2) {
|
|
|
22125
23236
|
nativeApi: options2.nativeApi,
|
|
22126
23237
|
datasetProvider: options2.datasetProvider,
|
|
22127
23238
|
audioProvider: options2.audioProvider,
|
|
23239
|
+
mcpProvider: options2.mcpProvider,
|
|
22128
23240
|
codeRunner: options2.codeRunner,
|
|
22129
23241
|
projectPath: options2.projectPath,
|
|
22130
23242
|
projectReferenceLoader: options2.projectReferenceLoader,
|
|
@@ -22223,6 +23335,10 @@ var Rivet = void 0;
|
|
|
22223
23335
|
LoadDatasetNodeImpl,
|
|
22224
23336
|
LoopControllerNodeImpl,
|
|
22225
23337
|
LoopUntilNodeImpl,
|
|
23338
|
+
MCPError,
|
|
23339
|
+
MCPErrorType,
|
|
23340
|
+
MCPGetPromptNodeImpl,
|
|
23341
|
+
MCPToolCallNodeImpl,
|
|
22226
23342
|
MatchNodeImpl,
|
|
22227
23343
|
NodeImpl,
|
|
22228
23344
|
NodeRegistration,
|
|
@@ -22295,6 +23411,7 @@ var Rivet = void 0;
|
|
|
22295
23411
|
coreCreateProcessor,
|
|
22296
23412
|
coreRunGraph,
|
|
22297
23413
|
createDatasetNode,
|
|
23414
|
+
createOnStreamUserEvents,
|
|
22298
23415
|
cronNode,
|
|
22299
23416
|
dataTypeDisplayNames,
|
|
22300
23417
|
dataTypes,
|
|
@@ -22372,6 +23489,9 @@ var Rivet = void 0;
|
|
|
22372
23489
|
looseDataValueToDataValue,
|
|
22373
23490
|
looseDataValuesToDataValues,
|
|
22374
23491
|
matchNode,
|
|
23492
|
+
mcpDiscoveryNode,
|
|
23493
|
+
mcpGetPromptNode,
|
|
23494
|
+
mcpToolCallNode,
|
|
22375
23495
|
newId,
|
|
22376
23496
|
nodeDefinition,
|
|
22377
23497
|
numberNode,
|