@alpic80/rivet-core 1.24.0-aidon.5 → 1.24.2-aidon.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +9 -6
  2. package/dist/cjs/bundle.cjs +1534 -278
  3. package/dist/cjs/bundle.cjs.map +4 -4
  4. package/dist/esm/api/createProcessor.js +2 -0
  5. package/dist/esm/api/streaming.js +48 -2
  6. package/dist/esm/exports.js +1 -0
  7. package/dist/esm/integrations/CodeRunner.js +10 -2
  8. package/dist/esm/integrations/mcp/MCPBase.js +100 -0
  9. package/dist/esm/integrations/mcp/MCPProvider.js +23 -0
  10. package/dist/esm/integrations/mcp/MCPUtils.js +33 -0
  11. package/dist/esm/model/GraphProcessor.js +7 -1
  12. package/dist/esm/model/NodeRegistration.js +0 -1
  13. package/dist/esm/model/Nodes.js +9 -0
  14. package/dist/esm/model/nodes/ChatNodeBase.js +1 -1
  15. package/dist/esm/model/nodes/CodeNode.js +1 -1
  16. package/dist/esm/model/nodes/GetAllDatasetsNode.js +1 -1
  17. package/dist/esm/model/nodes/GraphInputNode.js +2 -0
  18. package/dist/esm/model/nodes/HttpCallNode.js +2 -2
  19. package/dist/esm/model/nodes/MCPDiscoveryNode.js +239 -0
  20. package/dist/esm/model/nodes/MCPGetPromptNode.js +262 -0
  21. package/dist/esm/model/nodes/MCPToolCallNode.js +290 -0
  22. package/dist/esm/model/nodes/ObjectNode.js +42 -21
  23. package/dist/esm/model/nodes/PromptNode.js +1 -1
  24. package/dist/esm/model/nodes/SubGraphNode.js +1 -0
  25. package/dist/esm/model/nodes/TextNode.js +13 -2
  26. package/dist/esm/plugins/aidon/nodes/ChatAidonNode.js +7 -5
  27. package/dist/esm/plugins/aidon/plugin.js +15 -0
  28. package/dist/esm/plugins/anthropic/anthropic.js +22 -3
  29. package/dist/esm/plugins/anthropic/nodes/ChatAnthropicNode.js +33 -3
  30. package/dist/esm/plugins/google/google.js +29 -14
  31. package/dist/esm/plugins/google/nodes/ChatGoogleNode.js +70 -5
  32. package/dist/esm/plugins/huggingface/nodes/ChatHuggingFace.js +4 -2
  33. package/dist/esm/plugins/huggingface/nodes/TextToImageHuggingFace.js +5 -3
  34. package/dist/esm/utils/interpolation.js +155 -17
  35. package/dist/esm/utils/openai.js +24 -0
  36. package/dist/types/api/createProcessor.d.ts +3 -2
  37. package/dist/types/api/streaming.d.ts +7 -1
  38. package/dist/types/exports.d.ts +1 -0
  39. package/dist/types/integrations/CodeRunner.d.ts +4 -3
  40. package/dist/types/integrations/mcp/MCPBase.d.ts +20 -0
  41. package/dist/types/integrations/mcp/MCPProvider.d.ts +153 -0
  42. package/dist/types/integrations/mcp/MCPUtils.d.ts +9 -0
  43. package/dist/types/model/GraphProcessor.d.ts +5 -1
  44. package/dist/types/model/Nodes.d.ts +13 -2
  45. package/dist/types/model/ProcessContext.d.ts +5 -1
  46. package/dist/types/model/Project.d.ts +2 -0
  47. package/dist/types/model/nodes/GetAllDatasetsNode.d.ts +2 -2
  48. package/dist/types/model/nodes/MCPDiscoveryNode.d.ts +9 -0
  49. package/dist/types/model/nodes/MCPGetPromptNode.d.ts +23 -0
  50. package/dist/types/model/nodes/MCPToolCallNode.d.ts +26 -0
  51. package/dist/types/model/nodes/ObjectNode.d.ts +3 -2
  52. package/dist/types/model/nodes/TextNode.d.ts +2 -1
  53. package/dist/types/plugins/anthropic/anthropic.d.ts +21 -3
  54. package/dist/types/plugins/anthropic/nodes/ChatAnthropicNode.d.ts +5 -0
  55. package/dist/types/plugins/google/google.d.ts +12 -2
  56. package/dist/types/plugins/google/nodes/ChatGoogleNode.d.ts +7 -0
  57. package/dist/types/utils/interpolation.d.ts +6 -1
  58. package/dist/types/utils/openai.d.ts +24 -0
  59. package/package.json +7 -7
@@ -89,6 +89,10 @@ __export(src_exports, {
89
89
  LoadDatasetNodeImpl: () => LoadDatasetNodeImpl,
90
90
  LoopControllerNodeImpl: () => LoopControllerNodeImpl,
91
91
  LoopUntilNodeImpl: () => LoopUntilNodeImpl,
92
+ MCPError: () => MCPError,
93
+ MCPErrorType: () => MCPErrorType,
94
+ MCPGetPromptNodeImpl: () => MCPGetPromptNodeImpl,
95
+ MCPToolCallNodeImpl: () => MCPToolCallNodeImpl,
92
96
  MatchNodeImpl: () => MatchNodeImpl,
93
97
  NodeImpl: () => NodeImpl,
94
98
  NodeRegistration: () => NodeRegistration,
@@ -161,6 +165,7 @@ __export(src_exports, {
161
165
  coreCreateProcessor: () => coreCreateProcessor,
162
166
  coreRunGraph: () => coreRunGraph,
163
167
  createDatasetNode: () => createDatasetNode,
168
+ createOnStreamUserEvents: () => createOnStreamUserEvents,
164
169
  cronNode: () => cronNode,
165
170
  dataTypeDisplayNames: () => dataTypeDisplayNames,
166
171
  dataTypes: () => dataTypes,
@@ -238,6 +243,9 @@ __export(src_exports, {
238
243
  looseDataValueToDataValue: () => looseDataValueToDataValue,
239
244
  looseDataValuesToDataValues: () => looseDataValuesToDataValues,
240
245
  matchNode: () => matchNode,
246
+ mcpDiscoveryNode: () => mcpDiscoveryNode,
247
+ mcpGetPromptNode: () => mcpGetPromptNode,
248
+ mcpToolCallNode: () => mcpToolCallNode,
241
249
  newId: () => newId,
242
250
  nodeDefinition: () => nodeDefinition,
243
251
  numberNode: () => numberNode,
@@ -352,6 +360,10 @@ __export(exports_exports, {
352
360
  LoadDatasetNodeImpl: () => LoadDatasetNodeImpl,
353
361
  LoopControllerNodeImpl: () => LoopControllerNodeImpl,
354
362
  LoopUntilNodeImpl: () => LoopUntilNodeImpl,
363
+ MCPError: () => MCPError,
364
+ MCPErrorType: () => MCPErrorType,
365
+ MCPGetPromptNodeImpl: () => MCPGetPromptNodeImpl,
366
+ MCPToolCallNodeImpl: () => MCPToolCallNodeImpl,
355
367
  MatchNodeImpl: () => MatchNodeImpl,
356
368
  NodeImpl: () => NodeImpl,
357
369
  NodeRegistration: () => NodeRegistration,
@@ -423,6 +435,7 @@ __export(exports_exports, {
423
435
  coreCreateProcessor: () => coreCreateProcessor,
424
436
  coreRunGraph: () => coreRunGraph,
425
437
  createDatasetNode: () => createDatasetNode,
438
+ createOnStreamUserEvents: () => createOnStreamUserEvents,
426
439
  cronNode: () => cronNode,
427
440
  dataTypeDisplayNames: () => dataTypeDisplayNames,
428
441
  dataTypes: () => dataTypes,
@@ -500,6 +513,9 @@ __export(exports_exports, {
500
513
  looseDataValueToDataValue: () => looseDataValueToDataValue,
501
514
  looseDataValuesToDataValues: () => looseDataValuesToDataValues,
502
515
  matchNode: () => matchNode,
516
+ mcpDiscoveryNode: () => mcpDiscoveryNode,
517
+ mcpGetPromptNode: () => mcpGetPromptNode,
518
+ mcpToolCallNode: () => mcpToolCallNode,
503
519
  newId: () => newId,
504
520
  nodeDefinition: () => nodeDefinition,
505
521
  numberNode: () => numberNode,
@@ -1782,10 +1798,10 @@ var DEFAULT_CHAT_ENDPOINT = "https://api.openai.com/v1/chat/completions";
1782
1798
  var DEFAULT_CHAT_NODE_TIMEOUT = 3e4;
1783
1799
 
1784
1800
  // src/model/GraphProcessor.ts
1785
- var import_lodash_es15 = require("lodash");
1801
+ var import_lodash_es16 = require("lodash");
1786
1802
  var import_p_queue = __toESM(require("p-queue-6"), 1);
1787
1803
  var import_emittery2 = __toESM(require("emittery-0-13"), 1);
1788
- var import_non_secure76 = require("nanoid/non-secure");
1804
+ var import_non_secure78 = require("nanoid/non-secure");
1789
1805
  var import_ts_pattern9 = require("ts-pattern");
1790
1806
 
1791
1807
  // src/model/NodeImpl.ts
@@ -2114,8 +2130,10 @@ var import_non_secure4 = require("nanoid/non-secure");
2114
2130
  var import_ts_dedent3 = require("ts-dedent");
2115
2131
 
2116
2132
  // src/utils/interpolation.ts
2117
- var TOKEN_MATCH_REGEX = /\{\{(?!\{)([^{}\s][^{}]*[^{}\s]|[^{}\s])\}\}(?!\})/g;
2118
- var ESCAPED_TOKEN_REGEX = /\{{3}([^{}]+)\}{3}/g;
2133
+ var import_lodash_es4 = require("lodash");
2134
+ var TOKEN_MATCH_REGEX = /\{\{([^}]+?)\}\}/g;
2135
+ var ESCAPED_TOKEN_REGEX = /\{\{\{([^}]+?)\}\}\}/g;
2136
+ var ESCAPED_ESCAPED_TOKEN_REGEX = /\\\{\\\{([^}]+?)\\\}\\\}/g;
2119
2137
  var processingFunctions = {
2120
2138
  indent: (input, spaces = 0) => {
2121
2139
  const indent = " ".repeat(spaces);
@@ -2167,6 +2185,68 @@ var processingFunctions = {
2167
2185
  return lines.join("\n");
2168
2186
  }
2169
2187
  };
2188
+ function unwrapPotentialDataValue(value) {
2189
+ if (typeof value === "object" && value !== null && typeof value.type === "string" && Object.prototype.hasOwnProperty.call(value, "value")) {
2190
+ return value.value;
2191
+ }
2192
+ return value;
2193
+ }
2194
+ function resolveExpressionRawValue(source, expression, sourceType) {
2195
+ if (!source) {
2196
+ return void 0;
2197
+ }
2198
+ const match13 = expression.trim().match(/^([^[.\s]+)\s*(.*)$/);
2199
+ let key;
2200
+ let path;
2201
+ if (match13 && typeof match13[1] === "string") {
2202
+ key = match13[1];
2203
+ const rawPath = match13[2];
2204
+ if (rawPath) {
2205
+ path = rawPath.trim().replace(/\s*(\.|\[|\])\s*/g, "$1");
2206
+ } else {
2207
+ path = void 0;
2208
+ }
2209
+ } else {
2210
+ key = expression.trim();
2211
+ path = void 0;
2212
+ }
2213
+ if (!key) {
2214
+ return void 0;
2215
+ }
2216
+ const topLevelValue = source[key];
2217
+ if (topLevelValue === void 0) {
2218
+ return void 0;
2219
+ }
2220
+ const baseValue = unwrapPotentialDataValue(topLevelValue);
2221
+ let finalValue;
2222
+ if (path) {
2223
+ try {
2224
+ finalValue = (0, import_lodash_es4.get)(baseValue, path);
2225
+ finalValue = unwrapPotentialDataValue(finalValue);
2226
+ } catch (error) {
2227
+ console.warn(`Error accessing path "${path}" in ${sourceType} value for key "${key}":`, error);
2228
+ return void 0;
2229
+ }
2230
+ } else {
2231
+ finalValue = baseValue;
2232
+ }
2233
+ return finalValue;
2234
+ }
2235
+ function resolveExpressionToString(source, expression, sourceType) {
2236
+ const finalValue = resolveExpressionRawValue(source, expression, sourceType);
2237
+ if (finalValue === void 0) {
2238
+ return void 0;
2239
+ }
2240
+ if (typeof finalValue === "object" && finalValue !== null) {
2241
+ try {
2242
+ return JSON.stringify(finalValue);
2243
+ } catch (error) {
2244
+ console.warn(`Error stringifying object/array in ${sourceType} for expression "${expression}":`, error);
2245
+ return "[object Object]";
2246
+ }
2247
+ }
2248
+ return String(finalValue);
2249
+ }
2170
2250
  function parseProcessing(instruction) {
2171
2251
  const parts = instruction.trim().split(/\s+/);
2172
2252
  return {
@@ -2186,26 +2266,57 @@ function applyProcessing(value, processingChain) {
2186
2266
  return processingFunc(result, param);
2187
2267
  }, value);
2188
2268
  }
2189
- function interpolate(baseString, values2) {
2190
- return baseString.replace(TOKEN_MATCH_REGEX, (_m, p1) => {
2191
- const [token, ...processing] = p1.split("|");
2192
- const value = values2[token.trim()];
2193
- if (value === void 0)
2269
+ function interpolate(template, variables, graphInputValues, contextValues) {
2270
+ return template.replace(ESCAPED_TOKEN_REGEX, (_match, expression) => {
2271
+ return `\\{\\{${expression}\\}\\}`;
2272
+ }).replace(/\{\{((?:@graphInputs|@context)\..*?|[^}]+?)\}\}/g, (_match, expressionWithMaybeProcessing) => {
2273
+ const parts = expressionWithMaybeProcessing.split("|").map((s) => s.trim());
2274
+ const expression = parts[0];
2275
+ const processingChain = parts.slice(1).join("|");
2276
+ let resolvedValue;
2277
+ if (expression.startsWith("@graphInputs.")) {
2278
+ resolvedValue = resolveExpressionToString(
2279
+ graphInputValues,
2280
+ expression.substring("@graphInputs.".length),
2281
+ "graphInputs"
2282
+ );
2283
+ } else if (expression.startsWith("@context.")) {
2284
+ resolvedValue = resolveExpressionToString(contextValues, expression.substring("@context.".length), "context");
2285
+ } else {
2286
+ const simpleVar = variables[expression];
2287
+ if (simpleVar !== void 0) {
2288
+ resolvedValue = String(unwrapPotentialDataValue(simpleVar) ?? "");
2289
+ } else {
2290
+ resolvedValue = void 0;
2291
+ }
2292
+ }
2293
+ if (resolvedValue === void 0) {
2294
+ console.warn(`Interpolation variable or path "${expression}" not found or resolved to undefined.`);
2194
2295
  return "";
2195
- if (processing.length > 0) {
2196
- return applyProcessing(value, p1);
2197
2296
  }
2198
- return value;
2199
- }).replace(ESCAPED_TOKEN_REGEX, (_m, p1) => {
2200
- return `{{${p1}}}`;
2297
+ if (processingChain) {
2298
+ return applyProcessing(resolvedValue, processingChain);
2299
+ }
2300
+ return resolvedValue;
2301
+ }).replace(ESCAPED_ESCAPED_TOKEN_REGEX, (_match, expression) => {
2302
+ return `{{${expression}}}`;
2201
2303
  });
2202
2304
  }
2203
2305
  function extractInterpolationVariables(template) {
2204
- const matches = template.matchAll(TOKEN_MATCH_REGEX);
2306
+ const matches = template.replace(ESCAPED_TOKEN_REGEX, (_match, content) => {
2307
+ return `\\{\\{${content}\\}\\}`;
2308
+ }).matchAll(TOKEN_MATCH_REGEX);
2205
2309
  const variables = /* @__PURE__ */ new Set();
2206
2310
  for (const match13 of matches) {
2207
- const [token] = match13[1].split("|");
2208
- variables.add(token.trim());
2311
+ if (match13[1]) {
2312
+ const [tokenPart] = match13[1].split("|");
2313
+ if (tokenPart) {
2314
+ const token = tokenPart.trim();
2315
+ if (!token.startsWith("@graphInputs.") && !token.startsWith("@context.")) {
2316
+ variables.add(token);
2317
+ }
2318
+ }
2319
+ }
2209
2320
  }
2210
2321
  return Array.from(variables);
2211
2322
  }
@@ -2223,7 +2334,9 @@ var TextNodeImpl = class extends NodeImpl {
2223
2334
  width: 300
2224
2335
  },
2225
2336
  data: {
2226
- text: "{{input}}"
2337
+ text: "{{input}}",
2338
+ normalizeLineEndings: true
2339
+ // Default to true for better compatibility
2227
2340
  }
2228
2341
  };
2229
2342
  return chartNode;
@@ -2263,6 +2376,12 @@ var TextNodeImpl = class extends NodeImpl {
2263
2376
  dataKey: "text",
2264
2377
  language: "prompt-interpolation-markdown",
2265
2378
  theme: "prompt-interpolation"
2379
+ },
2380
+ {
2381
+ type: "toggle",
2382
+ label: "Normalize Line Endings",
2383
+ dataKey: "normalizeLineEndings",
2384
+ helperMessage: "Normalize line endings to use only LF (\\n) instead of CRLF (\\r\\n)."
2266
2385
  }
2267
2386
  ];
2268
2387
  }
@@ -2275,7 +2394,7 @@ var TextNodeImpl = class extends NodeImpl {
2275
2394
  text: truncated
2276
2395
  };
2277
2396
  }
2278
- async process(inputs) {
2397
+ async process(inputs, context) {
2279
2398
  const inputMap = Object.keys(inputs).reduce(
2280
2399
  (acc, key) => {
2281
2400
  const stringValue = coerceTypeOptional(inputs[key], "string") ?? "";
@@ -2284,7 +2403,17 @@ var TextNodeImpl = class extends NodeImpl {
2284
2403
  },
2285
2404
  {}
2286
2405
  );
2287
- const outputValue = interpolate(this.chartNode.data.text, inputMap);
2406
+ let outputValue = interpolate(
2407
+ this.chartNode.data.text,
2408
+ inputMap,
2409
+ context.graphInputNodeValues,
2410
+ // Pass graph inputs
2411
+ context.contextValues
2412
+ // Pass context values
2413
+ );
2414
+ if (this.chartNode.data.normalizeLineEndings) {
2415
+ outputValue = outputValue.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
2416
+ }
2288
2417
  return {
2289
2418
  output: {
2290
2419
  type: "string",
@@ -2326,7 +2455,7 @@ __export(openai_exports, {
2326
2455
  openaiModels: () => openaiModels,
2327
2456
  streamChatCompletions: () => streamChatCompletions
2328
2457
  });
2329
- var import_lodash_es4 = require("lodash");
2458
+ var import_lodash_es5 = require("lodash");
2330
2459
 
2331
2460
  // src/utils/fetchEventSource.ts
2332
2461
  var EventSourceResponse = class extends Response {
@@ -2629,6 +2758,30 @@ var openaiModels = {
2629
2758
  },
2630
2759
  displayName: "GPT-4o Audio (Preview)"
2631
2760
  },
2761
+ "gpt-4.1": {
2762
+ maxTokens: 1047576,
2763
+ cost: {
2764
+ prompt: 2e-6,
2765
+ completion: 8e-6
2766
+ },
2767
+ displayName: "GPT-4.1"
2768
+ },
2769
+ o3: {
2770
+ maxTokens: 2e5,
2771
+ cost: {
2772
+ prompt: 1e-5,
2773
+ completion: 4e-5
2774
+ },
2775
+ displayName: "o3"
2776
+ },
2777
+ "o4-mini": {
2778
+ maxTokens: 2e5,
2779
+ cost: {
2780
+ prompt: 11e-7,
2781
+ completion: 44e-7
2782
+ },
2783
+ displayName: "o4-mini"
2784
+ },
2632
2785
  "local-model": {
2633
2786
  maxTokens: Number.MAX_SAFE_INTEGER,
2634
2787
  cost: {
@@ -2638,7 +2791,7 @@ var openaiModels = {
2638
2791
  displayName: "Local Model"
2639
2792
  }
2640
2793
  };
2641
- var openAiModelOptions = (0, import_lodash_es4.orderBy)(
2794
+ var openAiModelOptions = (0, import_lodash_es5.orderBy)(
2642
2795
  Object.entries(openaiModels).map(([id, { displayName }]) => ({
2643
2796
  value: id,
2644
2797
  label: displayName
@@ -3529,7 +3682,7 @@ var ChatNodeBase = {
3529
3682
  })
3530
3683
  );
3531
3684
  const { messages } = getChatNodeMessages(inputs);
3532
- const isReasoningModel = finalModel.startsWith("o1") || finalModel.startsWith("o3");
3685
+ const isReasoningModel = finalModel.startsWith("o1") || finalModel.startsWith("o3") || finalModel.startsWith("o4");
3533
3686
  const completionMessages = await Promise.all(
3534
3687
  messages.map((message) => chatMessageToOpenAIChatCompletionMessage(message, { isReasoningModel }))
3535
3688
  );
@@ -4079,7 +4232,7 @@ var chatNode = nodeDefinition(ChatNodeImpl, "Chat");
4079
4232
 
4080
4233
  // src/model/nodes/PromptNode.ts
4081
4234
  var import_non_secure6 = require("nanoid/non-secure");
4082
- var import_lodash_es5 = require("lodash");
4235
+ var import_lodash_es6 = require("lodash");
4083
4236
  var import_ts_dedent5 = require("ts-dedent");
4084
4237
  var import_ts_pattern4 = require("ts-pattern");
4085
4238
  var PromptNodeImpl = class extends NodeImpl {
@@ -4248,8 +4401,13 @@ var PromptNodeImpl = class extends NodeImpl {
4248
4401
  };
4249
4402
  }
4250
4403
  async process(inputs, context) {
4251
- const inputMap = (0, import_lodash_es5.mapValues)(inputs, (input) => coerceType(input, "string"));
4252
- const outputValue = interpolate(this.chartNode.data.promptText, inputMap);
4404
+ const inputMap = (0, import_lodash_es6.mapValues)(inputs, (input) => coerceType(input, "string"));
4405
+ const outputValue = interpolate(
4406
+ this.chartNode.data.promptText,
4407
+ inputMap,
4408
+ context.graphInputNodeValues,
4409
+ context.contextValues
4410
+ );
4253
4411
  const type = getInputOrData(this.data, inputs, "type", "string");
4254
4412
  const isCacheBreakpoint = getInputOrData(this.data, inputs, "isCacheBreakpoint", "boolean");
4255
4413
  if (["assistant", "system", "user", "function"].includes(type) === false) {
@@ -4650,13 +4808,19 @@ var CodeNodeImpl = class extends NodeImpl {
4650
4808
  };
4651
4809
  }
4652
4810
  async process(inputs, context) {
4653
- const outputs = await context.codeRunner.runCode(this.data.code, inputs, {
4654
- includeFetch: this.data.allowFetch ?? false,
4655
- includeRequire: this.data.allowRequire ?? false,
4656
- includeRivet: this.data.allowRivet ?? false,
4657
- includeProcess: this.data.allowProcess ?? false,
4658
- includeConsole: this.data.allowConsole ?? false
4659
- });
4811
+ const outputs = await context.codeRunner.runCode(
4812
+ this.data.code,
4813
+ inputs,
4814
+ {
4815
+ includeFetch: this.data.allowFetch ?? false,
4816
+ includeRequire: this.data.allowRequire ?? false,
4817
+ includeRivet: this.data.allowRivet ?? false,
4818
+ includeProcess: this.data.allowProcess ?? false,
4819
+ includeConsole: this.data.allowConsole ?? false
4820
+ },
4821
+ context.graphInputNodeValues,
4822
+ context.contextValues
4823
+ );
4660
4824
  if (outputs == null || typeof outputs !== "object" || "then" in outputs && typeof outputs.then === "function") {
4661
4825
  throw new Error("Code node must return an object with output values.");
4662
4826
  }
@@ -5820,6 +5984,7 @@ var GraphInputNodeImpl = class extends NodeImpl {
5820
5984
  type: this.data.dataType,
5821
5985
  value: inputValue
5822
5986
  };
5987
+ context.graphInputNodeValues[this.data.id] = value;
5823
5988
  return { ["data"]: value };
5824
5989
  }
5825
5990
  };
@@ -6079,6 +6244,7 @@ var SubGraphNodeImpl = class extends NodeImpl {
6079
6244
  }
6080
6245
  return outputs;
6081
6246
  } catch (err) {
6247
+ console.error(`Error Processing subgraph: ${getError(err).message}`);
6082
6248
  if (!this.data.useErrorOutput) {
6083
6249
  throw err;
6084
6250
  }
@@ -6104,7 +6270,7 @@ var subGraphNode = nodeDefinition(SubGraphNodeImpl, "Subgraph");
6104
6270
 
6105
6271
  // src/model/nodes/ArrayNode.ts
6106
6272
  var import_non_secure19 = require("nanoid/non-secure");
6107
- var import_lodash_es6 = require("lodash");
6273
+ var import_lodash_es7 = require("lodash");
6108
6274
  var import_ts_dedent18 = require("ts-dedent");
6109
6275
  var ArrayNodeImpl = class extends NodeImpl {
6110
6276
  static create() {
@@ -6210,7 +6376,7 @@ var ArrayNodeImpl = class extends NodeImpl {
6210
6376
  if (Array.isArray(input == null ? void 0 : input.value)) {
6211
6377
  for (const value of (input == null ? void 0 : input.value) ?? []) {
6212
6378
  if (this.data.flattenDeep) {
6213
- outputArray.push(...Array.isArray(value) ? (0, import_lodash_es6.flattenDeep)(value) : [value]);
6379
+ outputArray.push(...Array.isArray(value) ? (0, import_lodash_es7.flattenDeep)(value) : [value]);
6214
6380
  } else {
6215
6381
  outputArray.push(value);
6216
6382
  }
@@ -6350,7 +6516,7 @@ var extractJsonNode = nodeDefinition(ExtractJsonNodeImpl, "Extract JSON");
6350
6516
 
6351
6517
  // src/model/nodes/AssemblePromptNode.ts
6352
6518
  var import_non_secure21 = require("nanoid/non-secure");
6353
- var import_lodash_es7 = require("lodash");
6519
+ var import_lodash_es8 = require("lodash");
6354
6520
  var import_ts_dedent20 = require("ts-dedent");
6355
6521
  var AssemblePromptNodeImpl = class extends NodeImpl {
6356
6522
  static create() {
@@ -6457,7 +6623,7 @@ var AssemblePromptNodeImpl = class extends NodeImpl {
6457
6623
  const output = {};
6458
6624
  const isLastMessageCacheBreakpoint = getInputOrData(this.data, inputs, "isLastMessageCacheBreakpoint", "boolean");
6459
6625
  const outMessages = [];
6460
- const inputMessages = (0, import_lodash_es7.orderBy)(
6626
+ const inputMessages = (0, import_lodash_es8.orderBy)(
6461
6627
  Object.entries(inputs).filter(([key]) => key.startsWith("message")),
6462
6628
  ([key]) => key,
6463
6629
  "asc"
@@ -7011,7 +7177,7 @@ var trimChatMessagesNode = nodeDefinition(TrimChatMessagesNodeImpl, "Trim Chat M
7011
7177
 
7012
7178
  // src/model/nodes/ExternalCallNode.ts
7013
7179
  var import_non_secure25 = require("nanoid/non-secure");
7014
- var import_lodash_es8 = require("lodash");
7180
+ var import_lodash_es9 = require("lodash");
7015
7181
  var import_ts_dedent24 = require("ts-dedent");
7016
7182
  var ExternalCallNodeImpl = class extends NodeImpl {
7017
7183
  static create() {
@@ -7106,7 +7272,7 @@ var ExternalCallNodeImpl = class extends NodeImpl {
7106
7272
  }
7107
7273
  }
7108
7274
  const fn = context.externalFunctions[functionName];
7109
- const externalContext = (0, import_lodash_es8.omit)(context, ["setGlobal"]);
7275
+ const externalContext = (0, import_lodash_es9.omit)(context, ["setGlobal"]);
7110
7276
  if (!fn) {
7111
7277
  if (this.data.useErrorOutput) {
7112
7278
  return {
@@ -9110,7 +9276,7 @@ var joinNode = nodeDefinition(JoinNodeImpl, "Coalesce");
9110
9276
 
9111
9277
  // src/model/nodes/FilterNode.ts
9112
9278
  var import_non_secure45 = require("nanoid/non-secure");
9113
- var import_lodash_es9 = require("lodash");
9279
+ var import_lodash_es10 = require("lodash");
9114
9280
  var import_ts_dedent44 = require("ts-dedent");
9115
9281
  var FilterNodeImpl = class extends NodeImpl {
9116
9282
  static create() {
@@ -9166,7 +9332,7 @@ var FilterNodeImpl = class extends NodeImpl {
9166
9332
  var _a;
9167
9333
  const array = coerceType(inputs["array"], "any[]");
9168
9334
  const include = coerceType(inputs["include"], "boolean[]");
9169
- const zipped = (0, import_lodash_es9.zip)(array, include);
9335
+ const zipped = (0, import_lodash_es10.zip)(array, include);
9170
9336
  const filtered = zipped.filter(([_, include2]) => include2).map(([value, _]) => value);
9171
9337
  return {
9172
9338
  ["filtered"]: {
@@ -9202,16 +9368,18 @@ var ObjectNodeImpl = class extends NodeImpl {
9202
9368
  return chartNode;
9203
9369
  }
9204
9370
  getInputDefinitions() {
9205
- const inputNames = [...new Set(this.chartNode.data.jsonTemplate.match(/\{\{([^}]+)\}\}/g))];
9206
- return (inputNames == null ? void 0 : inputNames.map((inputName) => {
9371
+ const jsonTemplate = this.chartNode.data.jsonTemplate ?? "";
9372
+ const matches = jsonTemplate.match(/\{\{([^}]+?)\}\}/g);
9373
+ const allTokens = matches ?? [];
9374
+ const inputTokens = allTokens.map((token) => token.slice(2, -2).trim()).filter((tokenContent) => !tokenContent.startsWith("@graphInputs.") && !tokenContent.startsWith("@context.")).filter((token) => token !== "");
9375
+ return [...new Set(inputTokens)].map((inputName) => {
9207
9376
  return {
9208
- // id and title should not have the {{ and }}
9209
- id: inputName.slice(2, -2),
9210
- title: inputName.slice(2, -2),
9377
+ id: inputName,
9378
+ title: inputName,
9211
9379
  dataType: "any",
9212
9380
  required: false
9213
9381
  };
9214
- })) ?? [];
9382
+ });
9215
9383
  }
9216
9384
  getOutputDefinitions() {
9217
9385
  return [
@@ -9252,10 +9420,28 @@ var ObjectNodeImpl = class extends NodeImpl {
9252
9420
  group: ["Objects"]
9253
9421
  };
9254
9422
  }
9255
- interpolate(baseString, values2) {
9256
- return baseString.replace(/("?)\{\{([^}]+)\}\}("?)/g, (_m, openQuote, key, _closeQuote) => {
9423
+ interpolate(baseString, values2, graphInputNodeValues, contextValues) {
9424
+ return baseString.replace(/("?)\{\{([^}]+?)\}\}("?)/g, (_m, openQuote, key, _closeQuote) => {
9257
9425
  const isQuoted = Boolean(openQuote);
9258
- const value = values2[key];
9426
+ const trimmedKey = key.trim();
9427
+ let value;
9428
+ const graphInputPrefix = "@graphInputs.";
9429
+ const contextPrefix = "@context.";
9430
+ if (trimmedKey.startsWith(graphInputPrefix) && graphInputNodeValues) {
9431
+ value = resolveExpressionRawValue(
9432
+ graphInputNodeValues,
9433
+ trimmedKey.substring(graphInputPrefix.length),
9434
+ "graphInputs"
9435
+ );
9436
+ } else if (trimmedKey.startsWith(contextPrefix) && contextValues) {
9437
+ value = resolveExpressionRawValue(
9438
+ contextValues,
9439
+ trimmedKey.substring(contextPrefix.length),
9440
+ "context"
9441
+ );
9442
+ } else {
9443
+ value = values2[trimmedKey];
9444
+ }
9259
9445
  if (value == null) {
9260
9446
  return "null";
9261
9447
  }
@@ -9268,27 +9454,32 @@ var ObjectNodeImpl = class extends NodeImpl {
9268
9454
  return JSON.stringify(value);
9269
9455
  });
9270
9456
  }
9271
- async process(inputs) {
9457
+ async process(inputs, context) {
9272
9458
  const inputMap = Object.keys(inputs).reduce(
9273
9459
  (acc, key) => {
9274
- var _a;
9275
- acc[key] = (_a = inputs[key]) == null ? void 0 : _a.value;
9460
+ acc[key] = unwrapPotentialDataValue(inputs[key]);
9276
9461
  return acc;
9277
9462
  },
9278
9463
  {}
9279
9464
  );
9280
- const outputValue = JSON.parse(this.interpolate(this.chartNode.data.jsonTemplate, inputMap));
9281
- if (Array.isArray(outputValue)) {
9282
- return {
9283
- output: {
9284
- type: "object[]",
9285
- value: outputValue
9286
- }
9287
- };
9465
+ const interpolatedString = this.interpolate(
9466
+ this.chartNode.data.jsonTemplate,
9467
+ inputMap,
9468
+ context.graphInputNodeValues,
9469
+ // Pass graph inputs
9470
+ context.contextValues
9471
+ // Pass context values
9472
+ );
9473
+ let outputValue;
9474
+ try {
9475
+ outputValue = JSON.parse(interpolatedString);
9476
+ } catch (err) {
9477
+ throw new Error(`Failed to parse JSON template: ${err.message}`);
9288
9478
  }
9479
+ const outputType = Array.isArray(outputValue) ? "object[]" : "object";
9289
9480
  return {
9290
9481
  output: {
9291
- type: "object",
9482
+ type: outputType,
9292
9483
  value: outputValue
9293
9484
  }
9294
9485
  };
@@ -9364,7 +9555,7 @@ var booleanNode = nodeDefinition(BooleanNodeImpl, "Boolean");
9364
9555
 
9365
9556
  // src/model/nodes/CompareNode.ts
9366
9557
  var import_non_secure48 = require("nanoid/non-secure");
9367
- var import_lodash_es10 = require("lodash");
9558
+ var import_lodash_es11 = require("lodash");
9368
9559
  var import_ts_pattern6 = require("ts-pattern");
9369
9560
  var import_ts_dedent47 = require("ts-dedent");
9370
9561
  var CompareNodeImpl = class extends NodeImpl {
@@ -9471,7 +9662,7 @@ var CompareNodeImpl = class extends NodeImpl {
9471
9662
  return {
9472
9663
  ["output"]: {
9473
9664
  type: "boolean",
9474
- value: (0, import_ts_pattern6.match)(comparisonFunction).with("==", () => (0, import_lodash_es10.isEqual)(value1, value2)).with("!=", () => !(0, import_lodash_es10.isEqual)(value1, value2)).with("<", () => value1 < value2).with(">", () => value1 > value2).with("<=", () => value1 <= value2).with(">=", () => value1 >= value2).with("and", () => !!(value1 && value2)).with("or", () => !!(value1 || value2)).with("xor", () => !!(value1 ? !value2 : value2)).with("nand", () => !(value1 && value2)).with("nor", () => !(value1 || value2)).with("xnor", () => !(value1 ? !value2 : value2)).exhaustive()
9665
+ value: (0, import_ts_pattern6.match)(comparisonFunction).with("==", () => (0, import_lodash_es11.isEqual)(value1, value2)).with("!=", () => !(0, import_lodash_es11.isEqual)(value1, value2)).with("<", () => value1 < value2).with(">", () => value1 > value2).with("<=", () => value1 <= value2).with(">=", () => value1 >= value2).with("and", () => !!(value1 && value2)).with("or", () => !!(value1 || value2)).with("xor", () => !!(value1 ? !value2 : value2)).with("nand", () => !(value1 && value2)).with("nor", () => !(value1 || value2)).with("xnor", () => !(value1 ? !value2 : value2)).exhaustive()
9475
9666
  }
9476
9667
  };
9477
9668
  }
@@ -9780,7 +9971,7 @@ var randomNumberNode = nodeDefinition(RandomNumberNodeImpl, "Random Number");
9780
9971
 
9781
9972
  // src/model/nodes/ShuffleNode.ts
9782
9973
  var import_non_secure52 = require("nanoid/non-secure");
9783
- var import_lodash_es11 = require("lodash");
9974
+ var import_lodash_es12 = require("lodash");
9784
9975
  var import_ts_dedent51 = require("ts-dedent");
9785
9976
  var ShuffleNodeImpl = class extends NodeImpl {
9786
9977
  static create() {
@@ -9829,7 +10020,7 @@ var ShuffleNodeImpl = class extends NodeImpl {
9829
10020
  var _a;
9830
10021
  const input = inputs["array"];
9831
10022
  const items = input ? isArrayDataValue(input) ? input.value : [input.value] : [];
9832
- const shuffled = (0, import_lodash_es11.shuffle)(items);
10023
+ const shuffled = (0, import_lodash_es12.shuffle)(items);
9833
10024
  return {
9834
10025
  ["shuffled"]: {
9835
10026
  type: ((_a = inputs["array"]) == null ? void 0 : _a.type) ?? "any[]",
@@ -10356,8 +10547,9 @@ var HttpCallNodeImpl = class extends NodeImpl {
10356
10547
  ];
10357
10548
  }
10358
10549
  getBody() {
10550
+ var _a;
10359
10551
  return import_ts_dedent.dedent`
10360
- ${this.data.useMethodInput ? "(Method Using Input)" : this.data.method} ${this.data.useUrlInput ? "(URL Using Input)" : this.data.url} ${this.data.useHeadersInput ? "\nHeaders: (Using Input)" : this.data.headers.trim() ? `
10552
+ ${this.data.useMethodInput ? "(Method Using Input)" : this.data.method} ${this.data.useUrlInput ? "(URL Using Input)" : this.data.url} ${this.data.useHeadersInput ? "\nHeaders: (Using Input)" : ((_a = this.data.headers) == null ? void 0 : _a.trim()) ? `
10361
10553
  Headers: ${this.data.headers}` : ""}${this.data.useBodyInput ? "\nBody: (Using Input)" : this.data.body.trim() ? `
10362
10554
  Body: ${this.data.body}` : ""}${this.data.errorOnNon200 ? "\nError on non-200" : ""}
10363
10555
  `;
@@ -10373,7 +10565,7 @@ Body: ${this.data.body}` : ""}${this.data.errorOnNon200 ? "\nError on non-200" :
10373
10565
  };
10374
10566
  }
10375
10567
  async process(inputs, context) {
10376
- var _a;
10568
+ var _a, _b;
10377
10569
  const method = getInputOrData(this.data, inputs, "method", "string");
10378
10570
  const url = getInputOrData(this.data, inputs, "url", "string");
10379
10571
  try {
@@ -10391,7 +10583,7 @@ Body: ${this.data.body}` : ""}${this.data.errorOnNon200 ? "\nError on non-200" :
10391
10583
  } else {
10392
10584
  headers = coerceType(headersInput, "object");
10393
10585
  }
10394
- } else if (this.data.headers.trim()) {
10586
+ } else if ((_a = this.data.headers) == null ? void 0 : _a.trim()) {
10395
10587
  headers = JSON.parse(this.data.headers);
10396
10588
  }
10397
10589
  let body;
@@ -10437,7 +10629,7 @@ Body: ${this.data.body}` : ""}${this.data.errorOnNon200 ? "\nError on non-200" :
10437
10629
  type: "string",
10438
10630
  value: responseText
10439
10631
  };
10440
- if ((_a = response.headers.get("content-type")) == null ? void 0 : _a.includes("application/json")) {
10632
+ if ((_b = response.headers.get("content-type")) == null ? void 0 : _b.includes("application/json")) {
10441
10633
  const jsonData = JSON.parse(responseText);
10442
10634
  output["json"] = {
10443
10635
  type: "object",
@@ -10871,7 +11063,7 @@ var GetAllDatasetsNodeImpl = class extends NodeImpl {
10871
11063
  getEditors() {
10872
11064
  return [];
10873
11065
  }
10874
- async process(context) {
11066
+ async process(_inputs, context) {
10875
11067
  const { datasetProvider } = context;
10876
11068
  if (datasetProvider == null) {
10877
11069
  throw new Error("datasetProvider is required");
@@ -11374,7 +11566,7 @@ var extractMarkdownCodeBlocksNode = nodeDefinition(
11374
11566
 
11375
11567
  // src/model/nodes/AssembleMessageNode.ts
11376
11568
  var import_non_secure62 = require("nanoid/non-secure");
11377
- var import_lodash_es12 = require("lodash");
11569
+ var import_lodash_es13 = require("lodash");
11378
11570
  var import_ts_pattern8 = require("ts-pattern");
11379
11571
  var messageTypeToTitle = {
11380
11572
  assistant: "Assistant",
@@ -11531,7 +11723,7 @@ var AssembleMessageNodeImpl = class extends NodeImpl {
11531
11723
  ).otherwise(() => {
11532
11724
  throw new Error(`Invalid type: ${type}`);
11533
11725
  });
11534
- const inputParts = (0, import_lodash_es12.orderBy)(
11726
+ const inputParts = (0, import_lodash_es13.orderBy)(
11535
11727
  Object.entries(inputs).filter(([key]) => key.startsWith("part")),
11536
11728
  ([key]) => key,
11537
11729
  "asc"
@@ -12042,9 +12234,9 @@ var import_non_secure67 = require("nanoid/non-secure");
12042
12234
  var import_ts_dedent61 = require("ts-dedent");
12043
12235
 
12044
12236
  // src/api/looseDataValue.ts
12045
- var import_lodash_es13 = require("lodash");
12237
+ var import_lodash_es14 = require("lodash");
12046
12238
  function looseDataValuesToDataValues(values2) {
12047
- return (0, import_lodash_es13.mapValues)(values2, (val) => looseDataValueToDataValue(val));
12239
+ return (0, import_lodash_es14.mapValues)(values2, (val) => looseDataValueToDataValue(val));
12048
12240
  }
12049
12241
  function looseDataValueToDataValue(value) {
12050
12242
  if (typeof value === "string") {
@@ -13278,7 +13470,7 @@ var cronNode = nodeDefinition(CronNodeImpl, "Cron");
13278
13470
  // src/model/nodes/ToTreeNode.ts
13279
13471
  var import_non_secure73 = require("nanoid/non-secure");
13280
13472
  var import_ts_dedent67 = require("ts-dedent");
13281
- var import_lodash_es14 = require("lodash");
13473
+ var import_lodash_es15 = require("lodash");
13282
13474
  var ToTreeNodeImpl = class extends NodeImpl {
13283
13475
  static create() {
13284
13476
  const chartNode = {
@@ -13364,7 +13556,7 @@ var ToTreeNodeImpl = class extends NodeImpl {
13364
13556
  if (!Array.isArray(objects) || objects.length === 0)
13365
13557
  return "";
13366
13558
  let result = "";
13367
- const sortedObjects = this.data.useSortAlphabetically ? (0, import_lodash_es14.sortBy)(objects, (obj) => String((0, import_lodash_es14.get)(obj, "path", ""))) : objects;
13559
+ const sortedObjects = this.data.useSortAlphabetically ? (0, import_lodash_es15.sortBy)(objects, (obj) => String((0, import_lodash_es15.get)(obj, "path", ""))) : objects;
13368
13560
  sortedObjects.forEach((obj, index) => {
13369
13561
  const isLastItem = index === sortedObjects.length - 1;
13370
13562
  const prefix = level === 0 ? "" : isLast ? "\u2514\u2500\u2500 " : "\u251C\u2500\u2500 ";
@@ -13373,14 +13565,14 @@ var ToTreeNodeImpl = class extends NodeImpl {
13373
13565
  const interpolationVars = matches.reduce(
13374
13566
  (acc, match13) => {
13375
13567
  const key = match13;
13376
- acc[key] = String((0, import_lodash_es14.get)(obj, key, ""));
13568
+ acc[key] = String((0, import_lodash_es15.get)(obj, key, ""));
13377
13569
  return acc;
13378
13570
  },
13379
13571
  {}
13380
13572
  );
13381
13573
  const formattedNode = interpolate(this.data.format, interpolationVars);
13382
13574
  result += indent + prefix + formattedNode + "\n";
13383
- const children = (0, import_lodash_es14.get)(obj, this.data.childrenProperty);
13575
+ const children = (0, import_lodash_es15.get)(obj, this.data.childrenProperty);
13384
13576
  if (Array.isArray(children) && children.length > 0) {
13385
13577
  const newPath = parentPath ? `${parentPath}/${formattedNode}` : formattedNode;
13386
13578
  result += this.buildTree(children, newPath, level + 1, isLastItem);
@@ -13607,109 +13799,952 @@ until ${condition}${maxIterations}`;
13607
13799
  };
13608
13800
  var loopUntilNode = nodeDefinition(LoopUntilNodeImpl, "Loop Until");
13609
13801
 
13610
- // src/model/nodes/ReferencedGraphAliasNode.ts
13611
- var import_non_secure75 = require("nanoid/non-secure");
13612
- var import_ts_dedent69 = require("ts-dedent");
13613
- var ReferencedGraphAliasNodeImpl = class extends NodeImpl {
13802
+ // src/model/nodes/MCPDiscoveryNode.ts
13803
+ var import_nanoid3 = require("nanoid");
13804
+
13805
+ // src/integrations/mcp/MCPProvider.ts
13806
+ var MCPErrorType = /* @__PURE__ */ ((MCPErrorType2) => {
13807
+ MCPErrorType2["CONFIG_NOT_FOUND"] = "CONFIG_NOT_FOUND";
13808
+ MCPErrorType2["SERVER_NOT_FOUND"] = "SERVER_NOT_FOUND";
13809
+ MCPErrorType2["SERVER_COMMUNICATION_FAILED"] = "SERVER_COMMUNICATION_FAILED";
13810
+ MCPErrorType2["INVALID_SCHEMA"] = "INVALID_SCHEMA";
13811
+ return MCPErrorType2;
13812
+ })(MCPErrorType || {});
13813
+ var MCPError = class extends Error {
13814
+ constructor(type, message, details) {
13815
+ super(message);
13816
+ this.type = type;
13817
+ this.details = details;
13818
+ this.name = "Error";
13819
+ }
13820
+ };
13821
+
13822
+ // src/integrations/mcp/MCPUtils.ts
13823
+ var loadMCPConfiguration = async (context) => {
13824
+ if (context.executor !== "nodejs") {
13825
+ throw new MCPError("CONFIG_NOT_FOUND" /* CONFIG_NOT_FOUND */, "MCP config is not supported in browser environment");
13826
+ }
13827
+ const mcpConfig = context.project.metadata.mcpServer;
13828
+ if (!mcpConfig || mcpConfig.mcpServers == null) {
13829
+ throw new MCPError("CONFIG_NOT_FOUND" /* CONFIG_NOT_FOUND */, "MCP configuration not defined in Project tab");
13830
+ }
13831
+ return mcpConfig;
13832
+ };
13833
+ var getServerOptions = async (context) => {
13834
+ if (context.executor === "nodejs" && context.nativeApi) {
13835
+ try {
13836
+ const config = await loadMCPConfiguration(context);
13837
+ return Object.entries(config.mcpServers).filter(([, config2]) => !config2.disabled).map(([id]) => ({
13838
+ label: id,
13839
+ value: id
13840
+ }));
13841
+ } catch {
13842
+ }
13843
+ }
13844
+ return [];
13845
+ };
13846
+ var getServerHelperMessage = (context, optionsLength) => {
13847
+ if (optionsLength > 0)
13848
+ return "Select an MCP server from local configuration located in the Project tab";
13849
+ if (context.executor !== "nodejs")
13850
+ return "MCP nodes require Node Executor";
13851
+ return "No MCP servers found in config";
13852
+ };
13853
+
13854
+ // src/integrations/mcp/MCPBase.ts
13855
+ var getMCPBaseInputs = (data) => {
13856
+ const inputs = [];
13857
+ if (data.useNameInput) {
13858
+ inputs.push({
13859
+ dataType: "string",
13860
+ id: "name",
13861
+ title: "Name"
13862
+ });
13863
+ }
13864
+ if (data.useVersionInput) {
13865
+ inputs.push({
13866
+ dataType: "string",
13867
+ id: "version",
13868
+ title: "Version"
13869
+ });
13870
+ }
13871
+ if (data.transportType === "http" && data.useServerUrlInput) {
13872
+ inputs.push({
13873
+ dataType: "string",
13874
+ id: "serverUrl",
13875
+ title: "Server URL",
13876
+ description: "The endpoint URL for the MCP server"
13877
+ });
13878
+ }
13879
+ if (data.transportType === "http" && data.useHeadersInput) {
13880
+ inputs.push({
13881
+ dataType: "object",
13882
+ id: "headers",
13883
+ title: "Headers"
13884
+ });
13885
+ }
13886
+ return inputs;
13887
+ };
13888
+
13889
+ // src/model/nodes/MCPDiscoveryNode.ts
13890
+ var MCPDiscoveryNodeImpl = class extends NodeImpl {
13614
13891
  static create() {
13615
13892
  const chartNode = {
13616
- type: "referencedGraphAlias",
13617
- title: "",
13618
- // Always set initially by the editor
13619
- id: (0, import_non_secure75.nanoid)(),
13893
+ type: "mcpDiscovery",
13894
+ title: "MCP Discovery",
13895
+ id: (0, import_nanoid3.nanoid)(),
13620
13896
  visualData: {
13621
13897
  x: 0,
13622
13898
  y: 0,
13623
- width: 300
13899
+ width: 250
13624
13900
  },
13625
13901
  data: {
13626
- projectId: void 0,
13627
- // Always set initially by the editor
13628
- graphId: void 0,
13629
- // Always set initially by the editor
13630
- useErrorOutput: false
13902
+ name: "mcp-client",
13903
+ version: "1.0.0",
13904
+ transportType: "stdio",
13905
+ serverUrl: "http://localhost:8080/mcp",
13906
+ headers: "",
13907
+ serverId: "",
13908
+ useNameInput: false,
13909
+ useVersionInput: false,
13910
+ useToolsOutput: true,
13911
+ usePromptsOutput: true
13631
13912
  }
13632
13913
  };
13633
13914
  return chartNode;
13634
13915
  }
13635
- getInputDefinitions(_connections, _nodes, _project, referencedProjects) {
13636
- const referencedProject = referencedProjects[this.data.projectId];
13637
- if (!referencedProject) {
13638
- return [];
13639
- }
13640
- const graph = referencedProject.graphs[this.data.graphId];
13641
- if (!graph) {
13642
- return [];
13643
- }
13644
- const inputNodes = graph.nodes.filter((node) => node.type === "graphInput");
13645
- const inputIds = [...new Set(inputNodes.map((node) => node.data.id))].sort();
13646
- return inputIds.map(
13647
- (id) => ({
13648
- id,
13649
- title: id,
13650
- dataType: inputNodes.find((node) => node.data.id === id).data.dataType
13651
- })
13652
- );
13653
- }
13654
- getGraphOutputs(referencedProject) {
13655
- const graph = referencedProject.graphs[this.data.graphId];
13656
- if (!graph) {
13657
- return [];
13658
- }
13659
- const outputNodes = graph.nodes.filter((node) => node.type === "graphOutput");
13660
- const outputIds = [...new Set(outputNodes.map((node) => node.data.id))].sort();
13661
- const outputs = outputIds.map(
13662
- (id) => ({
13663
- id,
13664
- title: id,
13665
- dataType: outputNodes.find((node) => node.data.id === id).data.dataType
13666
- })
13667
- );
13668
- return outputs;
13916
+ getInputDefinitions() {
13917
+ const inputs = getMCPBaseInputs(this.data);
13918
+ return inputs;
13669
13919
  }
13670
- getOutputDefinitions(_connections, _nodes, _project, referencedProjects) {
13671
- const outputs = [];
13672
- const referencedProject = referencedProjects[this.data.projectId];
13673
- if (!referencedProject) {
13674
- return outputs;
13920
+ getOutputDefinitions() {
13921
+ const outputDefinitions = [];
13922
+ if (this.data.useToolsOutput) {
13923
+ outputDefinitions.push({
13924
+ id: "tools",
13925
+ title: "Tools",
13926
+ dataType: "object[]",
13927
+ description: "Tools returned from the MCP server"
13928
+ });
13675
13929
  }
13676
- outputs.push(...this.getGraphOutputs(referencedProject));
13677
- if (this.data.useErrorOutput) {
13678
- outputs.push({
13679
- id: "error",
13680
- title: "Error",
13681
- dataType: "string"
13930
+ if (this.data.usePromptsOutput) {
13931
+ outputDefinitions.push({
13932
+ id: "prompts",
13933
+ title: "Prompts",
13934
+ dataType: "object[]",
13935
+ description: "Prompts returned from the MCP server"
13682
13936
  });
13683
13937
  }
13684
- return outputs;
13938
+ return outputDefinitions;
13685
13939
  }
13686
- getEditors(context) {
13687
- const definitions = [
13940
+ async getEditors(context) {
13941
+ const editors = [
13688
13942
  {
13689
13943
  type: "toggle",
13690
- label: "Use Error Output",
13691
- dataKey: "useErrorOutput"
13944
+ label: "Output Tools",
13945
+ dataKey: "useToolsOutput",
13946
+ helperMessage: "Toggle on if you want to get a Tools output"
13692
13947
  },
13693
13948
  {
13694
13949
  type: "toggle",
13695
- label: "Output Cost & Duration",
13696
- dataKey: "outputCostDuration"
13950
+ label: "Output Prompts",
13951
+ dataKey: "usePromptsOutput",
13952
+ helperMessage: "Toggle on if you want to get a Prompts output"
13953
+ },
13954
+ {
13955
+ type: "string",
13956
+ label: "Name",
13957
+ dataKey: "name",
13958
+ useInputToggleDataKey: "useNameInput",
13959
+ helperMessage: "The name for the MCP Client"
13960
+ },
13961
+ {
13962
+ type: "string",
13963
+ label: "Version",
13964
+ dataKey: "version",
13965
+ useInputToggleDataKey: "useVersionInput",
13966
+ helperMessage: "A version for the MCP Client"
13967
+ },
13968
+ {
13969
+ type: "dropdown",
13970
+ label: "Transport Type",
13971
+ dataKey: "transportType",
13972
+ options: [
13973
+ { label: "HTTP", value: "http" },
13974
+ { label: "STDIO", value: "stdio" }
13975
+ ]
13697
13976
  }
13698
13977
  ];
13699
- const referencedProject = context.referencedProjects[this.data.projectId];
13700
- if (referencedProject) {
13701
- const graph = referencedProject.graphs[this.data.graphId];
13702
- if (graph) {
13703
- const inputNodes = graph.nodes.filter((node) => node.type === "graphInput");
13704
- const inputIds = [...new Set(inputNodes.map((node) => node.data.id))].sort();
13705
- for (const inputId of inputIds) {
13706
- const inputNode = inputNodes.find((node) => node.data.id === inputId);
13707
- definitions.push({
13708
- type: "dynamic",
13709
- dataKey: "inputData",
13710
- dynamicDataKey: inputNode.data.id,
13711
- dataType: inputNode.data.dataType,
13712
- label: inputNode.data.id,
13978
+ if (this.data.transportType === "http") {
13979
+ editors.push(
13980
+ {
13981
+ type: "string",
13982
+ label: "Server URL",
13983
+ dataKey: "serverUrl",
13984
+ useInputToggleDataKey: "useServerUrlInput",
13985
+ helperMessage: "The base URL endpoint for the MCP server with `/mcp`"
13986
+ },
13987
+ {
13988
+ type: "code",
13989
+ label: "Headers",
13990
+ dataKey: "headers",
13991
+ useInputToggleDataKey: "useHeadersInput",
13992
+ language: "json"
13993
+ }
13994
+ );
13995
+ } else if (this.data.transportType === "stdio") {
13996
+ const serverOptions = await getServerOptions(context);
13997
+ editors.push({
13998
+ type: "dropdown",
13999
+ label: "Server ID",
14000
+ dataKey: "serverId",
14001
+ helperMessage: getServerHelperMessage(context, serverOptions.length),
14002
+ options: serverOptions
14003
+ });
14004
+ }
14005
+ return editors;
14006
+ }
14007
+ getBody(context) {
14008
+ var _a;
14009
+ let base;
14010
+ let headers = "";
14011
+ if (this.data.transportType === "http") {
14012
+ base = this.data.useServerUrlInput ? "(Using Server URL Input)" : this.data.serverUrl;
14013
+ headers = this.data.useHeadersInput ? "\nHeaders: (Using Input)" : ((_a = this.data.headers) == null ? void 0 : _a.trim()) ? `
14014
+ Headers: ${this.data.headers}` : "";
14015
+ } else {
14016
+ base = `Server ID: ${this.data.serverId || "(None)"}`;
14017
+ }
14018
+ const namePart = `Name: ${this.data.name}`;
14019
+ const versionPart = `Version: ${this.data.version}`;
14020
+ const parts = [namePart, versionPart, base, headers];
14021
+ if (context.executor !== "nodejs") {
14022
+ parts.push("(Requires Node Executor)");
14023
+ }
14024
+ return parts.join("\n");
14025
+ }
14026
+ static getUIData() {
14027
+ return {
14028
+ infoBoxBody: import_ts_dedent.dedent`
14029
+ Connects to an MCP (Model Context Protocol) server to discover capabilities like tools and prompts.
14030
+ `,
14031
+ infoBoxTitle: "MCP Discovery Node",
14032
+ contextMenuTitle: "MCP Discovery",
14033
+ group: ["MCP"]
14034
+ };
14035
+ }
14036
+ async process(inputs, context) {
14037
+ var _a;
14038
+ const name = getInputOrData(this.data, inputs, "name", "string");
14039
+ const version = getInputOrData(this.data, inputs, "version", "string");
14040
+ const transportType = getInputOrData(this.data, inputs, "transportType", "string");
14041
+ let tools = [];
14042
+ let prompts = [];
14043
+ try {
14044
+ if (!context.mcpProvider) {
14045
+ throw new Error("MCP Provider not found");
14046
+ }
14047
+ if (transportType === "http") {
14048
+ const serverUrl = getInputOrData(this.data, inputs, "serverUrl", "string");
14049
+ if (!serverUrl || serverUrl === "") {
14050
+ throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, "No server URL was provided");
14051
+ }
14052
+ if (!serverUrl.includes("/mcp")) {
14053
+ throw new MCPError(
14054
+ "SERVER_COMMUNICATION_FAILED" /* SERVER_COMMUNICATION_FAILED */,
14055
+ "Include /mcp in your server URL. For example: http://localhost:8080/mcp"
14056
+ );
14057
+ }
14058
+ let headers;
14059
+ if (this.data.useHeadersInput) {
14060
+ const headersInput = inputs["headers"];
14061
+ if ((headersInput == null ? void 0 : headersInput.type) === "string") {
14062
+ headers = JSON.parse(headersInput.value);
14063
+ } else if ((headersInput == null ? void 0 : headersInput.type) === "object") {
14064
+ headers = headersInput.value;
14065
+ } else {
14066
+ headers = coerceType(headersInput, "object");
14067
+ }
14068
+ } else if ((_a = this.data.headers) == null ? void 0 : _a.trim()) {
14069
+ headers = JSON.parse(this.data.headers);
14070
+ }
14071
+ tools = await context.mcpProvider.getHTTPTools({ name, version }, serverUrl, headers);
14072
+ prompts = await context.mcpProvider.getHTTPrompts({ name, version }, serverUrl, headers);
14073
+ } else if (transportType === "stdio") {
14074
+ const serverId = this.data.serverId ?? "";
14075
+ const mcpConfig = await loadMCPConfiguration(context);
14076
+ if (!mcpConfig.mcpServers[serverId]) {
14077
+ throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, `Server ${serverId} not found in MCP config`);
14078
+ }
14079
+ const serverConfig = {
14080
+ config: mcpConfig.mcpServers[serverId],
14081
+ serverId
14082
+ };
14083
+ tools = await context.mcpProvider.getStdioTools({ name, version }, serverConfig);
14084
+ prompts = await context.mcpProvider.getStdioPrompts({ name, version }, serverConfig);
14085
+ }
14086
+ const output = {};
14087
+ const gptFunctions = tools.map((tool) => ({
14088
+ name: tool.name,
14089
+ description: tool.description ?? "",
14090
+ parameters: tool.inputSchema,
14091
+ strict: false
14092
+ }));
14093
+ if (this.data.useToolsOutput) {
14094
+ output["tools"] = {
14095
+ type: "gpt-function[]",
14096
+ value: gptFunctions
14097
+ };
14098
+ }
14099
+ if (this.data.usePromptsOutput) {
14100
+ output["prompts"] = {
14101
+ type: "object[]",
14102
+ value: prompts.map((prompt) => ({
14103
+ name: prompt.name,
14104
+ description: prompt.description,
14105
+ arguments: prompt.arugments
14106
+ }))
14107
+ };
14108
+ }
14109
+ return output;
14110
+ } catch (err) {
14111
+ if (context.executor === "browser") {
14112
+ throw new Error("Failed to create Client without Node Executor");
14113
+ }
14114
+ throw err;
14115
+ }
14116
+ }
14117
+ };
14118
+ var mcpDiscoveryNode = nodeDefinition(MCPDiscoveryNodeImpl, "MCP Discovery");
14119
+
14120
+ // src/model/nodes/MCPToolCallNode.ts
14121
+ var import_non_secure75 = require("nanoid/non-secure");
14122
+ var import_ts_dedent69 = require("ts-dedent");
14123
+ var MCPToolCallNodeImpl = class extends NodeImpl {
14124
+ static create() {
14125
+ const chartNode = {
14126
+ type: "mcpToolCall",
14127
+ title: "MCP Tool Call",
14128
+ id: (0, import_non_secure75.nanoid)(),
14129
+ visualData: {
14130
+ x: 0,
14131
+ y: 0,
14132
+ width: 250
14133
+ },
14134
+ data: {
14135
+ name: "mcp-tool-call-client",
14136
+ version: "1.0.0",
14137
+ transportType: "stdio",
14138
+ serverUrl: "http://localhost:8080/mcp",
14139
+ headers: "",
14140
+ serverId: "",
14141
+ toolName: "",
14142
+ toolArguments: import_ts_dedent69.dedent`
14143
+ {
14144
+ "key": "value"
14145
+ }`,
14146
+ toolCallId: "",
14147
+ useNameInput: false,
14148
+ useVersionInput: false,
14149
+ useToolNameInput: true,
14150
+ useToolArgumentsInput: true,
14151
+ useToolCallIdInput: true
14152
+ }
14153
+ };
14154
+ return chartNode;
14155
+ }
14156
+ getInputDefinitions() {
14157
+ const inputs = getMCPBaseInputs(this.data);
14158
+ if (this.data.useToolNameInput) {
14159
+ inputs.push({
14160
+ dataType: "string",
14161
+ id: "toolName",
14162
+ title: "Tool Name"
14163
+ });
14164
+ }
14165
+ if (this.data.useToolArgumentsInput) {
14166
+ inputs.push({
14167
+ dataType: "object",
14168
+ id: "toolArguments",
14169
+ title: "Tool Arguments"
14170
+ });
14171
+ }
14172
+ if (this.data.useToolCallIdInput) {
14173
+ inputs.push({
14174
+ dataType: "object",
14175
+ id: "toolCallId",
14176
+ title: "Tool ID"
14177
+ });
14178
+ }
14179
+ return inputs;
14180
+ }
14181
+ getOutputDefinitions() {
14182
+ const outputDefinitions = [];
14183
+ outputDefinitions.push({
14184
+ id: "response",
14185
+ title: "Response",
14186
+ dataType: "object",
14187
+ description: "Response from the Tool Call"
14188
+ });
14189
+ outputDefinitions.push({
14190
+ id: "toolCallId",
14191
+ title: "Tool ID",
14192
+ dataType: "string",
14193
+ description: "ID associated with the Tool Call"
14194
+ });
14195
+ return outputDefinitions;
14196
+ }
14197
+ async getEditors(context) {
14198
+ const editors = [
14199
+ {
14200
+ type: "string",
14201
+ label: "Name",
14202
+ dataKey: "name",
14203
+ useInputToggleDataKey: "useNameInput",
14204
+ helperMessage: "The name for the MCP Client"
14205
+ },
14206
+ {
14207
+ type: "string",
14208
+ label: "Version",
14209
+ dataKey: "version",
14210
+ useInputToggleDataKey: "useVersionInput",
14211
+ helperMessage: "A version for the MCP Client"
14212
+ },
14213
+ {
14214
+ type: "dropdown",
14215
+ label: "Transport Type",
14216
+ dataKey: "transportType",
14217
+ options: [
14218
+ { label: "HTTP", value: "http" },
14219
+ { label: "STDIO", value: "stdio" }
14220
+ ]
14221
+ },
14222
+ {
14223
+ type: "string",
14224
+ label: "Tool Name",
14225
+ dataKey: "toolName",
14226
+ useInputToggleDataKey: "useToolNameInput",
14227
+ helperMessage: "The name for the MCP Tool Call"
14228
+ },
14229
+ {
14230
+ type: "code",
14231
+ label: "Tool Arguments",
14232
+ dataKey: "toolArguments",
14233
+ language: "json",
14234
+ useInputToggleDataKey: "useToolArgumentsInput"
14235
+ },
14236
+ {
14237
+ type: "string",
14238
+ label: "Tool ID",
14239
+ dataKey: "toolCallId",
14240
+ useInputToggleDataKey: "useToolCallIdInput",
14241
+ helperMessage: "The ID associated with the tool call"
14242
+ }
14243
+ ];
14244
+ if (this.data.transportType === "http") {
14245
+ editors.push(
14246
+ {
14247
+ type: "string",
14248
+ label: "Server URL",
14249
+ dataKey: "serverUrl",
14250
+ useInputToggleDataKey: "useServerUrlInput",
14251
+ helperMessage: "The base URL endpoint for the MCP server with `/mcp`"
14252
+ },
14253
+ {
14254
+ type: "code",
14255
+ label: "Headers",
14256
+ dataKey: "headers",
14257
+ useInputToggleDataKey: "useHeadersInput",
14258
+ language: "json"
14259
+ }
14260
+ );
14261
+ } else if (this.data.transportType === "stdio") {
14262
+ const serverOptions = await getServerOptions(context);
14263
+ editors.push({
14264
+ type: "dropdown",
14265
+ label: "Server ID",
14266
+ dataKey: "serverId",
14267
+ helperMessage: getServerHelperMessage(context, serverOptions.length),
14268
+ options: serverOptions
14269
+ });
14270
+ }
14271
+ return editors;
14272
+ }
14273
+ getBody(context) {
14274
+ var _a;
14275
+ let base;
14276
+ let headers = "";
14277
+ if (this.data.transportType === "http") {
14278
+ base = this.data.useServerUrlInput ? "(Using Server URL Input)" : this.data.serverUrl;
14279
+ headers = this.data.useHeadersInput ? "\nHeaders: (Using Input)" : ((_a = this.data.headers) == null ? void 0 : _a.trim()) ? `
14280
+ Headers: ${this.data.headers}` : "";
14281
+ } else {
14282
+ base = `Server ID: ${this.data.serverId || "(None)"}`;
14283
+ }
14284
+ const namePart = `Name: ${this.data.name}`;
14285
+ const versionPart = `Version: ${this.data.version}`;
14286
+ const parts = [namePart, versionPart, base, headers];
14287
+ if (context.executor !== "nodejs") {
14288
+ parts.push("(Requires Node Executor)");
14289
+ }
14290
+ return parts.join("\n");
14291
+ }
14292
+ static getUIData() {
14293
+ return {
14294
+ infoBoxBody: import_ts_dedent69.dedent`
14295
+ Connects to an MCP (Model Context Protocol) server and gets a tool call response.
14296
+ `,
14297
+ infoBoxTitle: "MCP Tool Call Node",
14298
+ contextMenuTitle: "MCP Tool Call",
14299
+ group: ["MCP"]
14300
+ };
14301
+ }
14302
+ async process(inputs, context) {
14303
+ var _a;
14304
+ const name = getInputOrData(this.data, inputs, "name", "string");
14305
+ const version = getInputOrData(this.data, inputs, "version", "string");
14306
+ const toolName = getInputOrData(this.data, inputs, "toolName", "string");
14307
+ const toolCallId = getInputOrData(this.data, inputs, "toolCallId", "string");
14308
+ let toolArguments;
14309
+ if (this.data.useToolArgumentsInput) {
14310
+ toolArguments = getInputOrData(this.data, inputs, "toolArguments", "object");
14311
+ if (toolArguments == null) {
14312
+ throw new MCPError("INVALID_SCHEMA" /* INVALID_SCHEMA */, "Cannot parse tool argument with input toggle on");
14313
+ }
14314
+ } else {
14315
+ const inputMap = keys(inputs).filter((key) => key.startsWith("input")).reduce(
14316
+ (acc, key) => {
14317
+ const stringValue = coerceTypeOptional(inputs[key], "string") ?? "";
14318
+ const interpolationKey = key.slice("input-".length);
14319
+ acc[interpolationKey] = stringValue;
14320
+ return acc;
14321
+ },
14322
+ {}
14323
+ );
14324
+ const interpolated = interpolate(this.data.toolArguments ?? "", inputMap);
14325
+ toolArguments = JSON.parse(interpolated);
14326
+ }
14327
+ const toolCall = {
14328
+ name: toolName,
14329
+ arguments: toolArguments
14330
+ };
14331
+ const transportType = getInputOrData(this.data, inputs, "transportType", "string");
14332
+ let toolResponse = void 0;
14333
+ try {
14334
+ if (!context.mcpProvider) {
14335
+ throw new Error("MCP Provider not found");
14336
+ }
14337
+ if (transportType === "http") {
14338
+ const serverUrl = getInputOrData(this.data, inputs, "serverUrl", "string");
14339
+ if (!serverUrl || serverUrl === "") {
14340
+ throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, "No server URL was provided");
14341
+ }
14342
+ if (!serverUrl.includes("/mcp")) {
14343
+ throw new MCPError(
14344
+ "SERVER_COMMUNICATION_FAILED" /* SERVER_COMMUNICATION_FAILED */,
14345
+ "Include /mcp in your server URL. For example: http://localhost:8080/mcp"
14346
+ );
14347
+ }
14348
+ let headers;
14349
+ if (this.data.useHeadersInput) {
14350
+ const headersInput = inputs["headers"];
14351
+ if ((headersInput == null ? void 0 : headersInput.type) === "string") {
14352
+ headers = JSON.parse(headersInput.value);
14353
+ } else if ((headersInput == null ? void 0 : headersInput.type) === "object") {
14354
+ headers = headersInput.value;
14355
+ } else {
14356
+ headers = coerceType(headersInput, "object");
14357
+ }
14358
+ } else if ((_a = this.data.headers) == null ? void 0 : _a.trim()) {
14359
+ headers = JSON.parse(this.data.headers);
14360
+ }
14361
+ toolResponse = await context.mcpProvider.httpToolCall({ name, version }, serverUrl, headers, toolCall);
14362
+ } else if (transportType === "stdio") {
14363
+ const serverId = this.data.serverId ?? "";
14364
+ const mcpConfig = await loadMCPConfiguration(context);
14365
+ if (!mcpConfig.mcpServers[serverId]) {
14366
+ throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, `Server ${serverId} not found in MCP config`);
14367
+ }
14368
+ const serverConfig = {
14369
+ config: mcpConfig.mcpServers[serverId],
14370
+ serverId
14371
+ };
14372
+ toolResponse = await context.mcpProvider.stdioToolCall({ name, version }, serverConfig, toolCall);
14373
+ }
14374
+ const output = {};
14375
+ output["response"] = {
14376
+ type: "object[]",
14377
+ value: toolResponse == null ? void 0 : toolResponse.content
14378
+ };
14379
+ output["toolCallId"] = {
14380
+ type: "string",
14381
+ value: toolCallId
14382
+ };
14383
+ return output;
14384
+ } catch (err) {
14385
+ const { message } = getError(err);
14386
+ if (context.executor === "browser") {
14387
+ throw new Error("Failed to create Client without a node executor");
14388
+ }
14389
+ console.log(message);
14390
+ throw err;
14391
+ }
14392
+ }
14393
+ };
14394
+ var mcpToolCallNode = nodeDefinition(MCPToolCallNodeImpl, "MCP Tool Call");
14395
+
14396
+ // src/model/nodes/MCPGetPromptNode.ts
14397
+ var import_non_secure76 = require("nanoid/non-secure");
14398
+ var MCPGetPromptNodeImpl = class extends NodeImpl {
14399
+ static create() {
14400
+ const chartNode = {
14401
+ type: "mcpGetPrompt",
14402
+ title: "MCP Get Prompt",
14403
+ id: (0, import_non_secure76.nanoid)(),
14404
+ visualData: {
14405
+ x: 0,
14406
+ y: 0,
14407
+ width: 250
14408
+ },
14409
+ data: {
14410
+ name: "mcp-get-prompt-client",
14411
+ version: "1.0.0",
14412
+ transportType: "stdio",
14413
+ serverUrl: "http://localhost:8080/mcp",
14414
+ headers: "",
14415
+ serverId: "",
14416
+ promptName: "",
14417
+ promptArguments: import_ts_dedent.dedent`
14418
+ {
14419
+ "key": "value"
14420
+ }`,
14421
+ useNameInput: false,
14422
+ useVersionInput: false,
14423
+ usePromptNameInput: false,
14424
+ usePromptArgumentsInput: false
14425
+ }
14426
+ };
14427
+ return chartNode;
14428
+ }
14429
+ getInputDefinitions() {
14430
+ const inputs = getMCPBaseInputs(this.data);
14431
+ if (this.data.usePromptNameInput) {
14432
+ inputs.push({
14433
+ dataType: "string",
14434
+ id: "promptName",
14435
+ title: "Prompt Name"
14436
+ });
14437
+ }
14438
+ if (this.data.usePromptArgumentsInput) {
14439
+ inputs.push({
14440
+ dataType: "object",
14441
+ id: "promptArguments",
14442
+ title: "Prompt Arguments"
14443
+ });
14444
+ }
14445
+ return inputs;
14446
+ }
14447
+ getOutputDefinitions() {
14448
+ const outputDefinitions = [];
14449
+ outputDefinitions.push({
14450
+ id: "prompt",
14451
+ title: "Prompt",
14452
+ dataType: "object",
14453
+ description: "Prompt response result"
14454
+ });
14455
+ return outputDefinitions;
14456
+ }
14457
+ async getEditors(context) {
14458
+ const editors = [
14459
+ {
14460
+ type: "string",
14461
+ label: "Name",
14462
+ dataKey: "name",
14463
+ useInputToggleDataKey: "useNameInput",
14464
+ helperMessage: "The name for the MCP Client"
14465
+ },
14466
+ {
14467
+ type: "string",
14468
+ label: "Version",
14469
+ dataKey: "version",
14470
+ useInputToggleDataKey: "useVersionInput",
14471
+ helperMessage: "A version for the MCP Client"
14472
+ },
14473
+ {
14474
+ type: "dropdown",
14475
+ label: "Transport Type",
14476
+ dataKey: "transportType",
14477
+ options: [
14478
+ { label: "HTTP", value: "http" },
14479
+ { label: "STDIO", value: "stdio" }
14480
+ ]
14481
+ },
14482
+ {
14483
+ type: "string",
14484
+ label: "Prompt Name",
14485
+ dataKey: "promptName",
14486
+ useInputToggleDataKey: "usePromptNameInput",
14487
+ helperMessage: "The name for the MCP prompt"
14488
+ },
14489
+ {
14490
+ type: "code",
14491
+ label: "Prompt Arguments",
14492
+ dataKey: "promptArguments",
14493
+ useInputToggleDataKey: "usePromptArgumentsInput",
14494
+ language: "json",
14495
+ helperMessage: "Arguments to provide the prompt"
14496
+ }
14497
+ ];
14498
+ if (this.data.transportType === "http") {
14499
+ editors.push(
14500
+ {
14501
+ type: "string",
14502
+ label: "Server URL",
14503
+ dataKey: "serverUrl",
14504
+ useInputToggleDataKey: "useServerUrlInput",
14505
+ helperMessage: "The base URL endpoint for the MCP server with `/mcp`"
14506
+ },
14507
+ {
14508
+ type: "code",
14509
+ label: "Headers",
14510
+ dataKey: "headers",
14511
+ useInputToggleDataKey: "useHeadersInput",
14512
+ language: "json"
14513
+ }
14514
+ );
14515
+ } else if (this.data.transportType === "stdio") {
14516
+ const serverOptions = await getServerOptions(context);
14517
+ editors.push({
14518
+ type: "dropdown",
14519
+ label: "Server ID",
14520
+ dataKey: "serverId",
14521
+ helperMessage: getServerHelperMessage(context, serverOptions.length),
14522
+ options: serverOptions
14523
+ });
14524
+ }
14525
+ return editors;
14526
+ }
14527
+ getBody(context) {
14528
+ var _a;
14529
+ let base;
14530
+ let headers = "";
14531
+ if (this.data.transportType === "http") {
14532
+ base = this.data.useServerUrlInput ? "(Using Server URL Input)" : this.data.serverUrl;
14533
+ headers = this.data.useHeadersInput ? "\nHeaders: (Using Input)" : ((_a = this.data.headers) == null ? void 0 : _a.trim()) ? `
14534
+ Headers: ${this.data.headers}` : "";
14535
+ } else {
14536
+ base = `Server ID: ${this.data.serverId || "(None)"}`;
14537
+ }
14538
+ const namePart = `Name: ${this.data.name}`;
14539
+ const versionPart = `Version: ${this.data.version}`;
14540
+ const parts = [namePart, versionPart, base, headers];
14541
+ if (context.executor !== "nodejs") {
14542
+ parts.push("(Requires Node Executor)");
14543
+ }
14544
+ return parts.join("\n");
14545
+ }
14546
+ static getUIData() {
14547
+ return {
14548
+ infoBoxBody: import_ts_dedent.dedent`
14549
+ Connects to an MCP (Model Context Protocol) server and gets a prompt response.
14550
+ `,
14551
+ infoBoxTitle: "MCP Get Prompt Node",
14552
+ contextMenuTitle: "MCP Get Prompt",
14553
+ group: ["MCP"]
14554
+ };
14555
+ }
14556
+ async process(inputs, context) {
14557
+ var _a;
14558
+ const name = getInputOrData(this.data, inputs, "name", "string");
14559
+ const version = getInputOrData(this.data, inputs, "version", "string");
14560
+ const promptName = getInputOrData(this.data, inputs, "promptName", "string");
14561
+ let promptArguments;
14562
+ if (this.data.usePromptArgumentsInput) {
14563
+ promptArguments = getInputOrData(this.data, inputs, "promptArguments", "object");
14564
+ if (promptArguments == null) {
14565
+ throw new MCPError("INVALID_SCHEMA" /* INVALID_SCHEMA */, "Cannot parse tool argument with input toggle on");
14566
+ }
14567
+ } else {
14568
+ const inputMap = keys(inputs).filter((key) => key.startsWith("input")).reduce(
14569
+ (acc, key) => {
14570
+ const stringValue = coerceTypeOptional(inputs[key], "string") ?? "";
14571
+ const interpolationKey = key.slice("input-".length);
14572
+ acc[interpolationKey] = stringValue;
14573
+ return acc;
14574
+ },
14575
+ {}
14576
+ );
14577
+ const interpolated = interpolate(this.data.promptArguments ?? "", inputMap);
14578
+ promptArguments = JSON.parse(interpolated);
14579
+ }
14580
+ const getPromptRequest = {
14581
+ name: promptName,
14582
+ arguments: promptArguments
14583
+ };
14584
+ const transportType = getInputOrData(this.data, inputs, "transportType", "string");
14585
+ let getPromptResponse = void 0;
14586
+ try {
14587
+ if (!context.mcpProvider) {
14588
+ throw new Error("MCP Provider not found");
14589
+ }
14590
+ if (transportType === "http") {
14591
+ const serverUrl = getInputOrData(this.data, inputs, "serverUrl", "string");
14592
+ if (!serverUrl || serverUrl === "") {
14593
+ throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, "No server URL was provided");
14594
+ }
14595
+ if (!serverUrl.includes("/mcp")) {
14596
+ throw new MCPError(
14597
+ "SERVER_COMMUNICATION_FAILED" /* SERVER_COMMUNICATION_FAILED */,
14598
+ "Include /mcp in your server URL. For example: http://localhost:8080/mcp"
14599
+ );
14600
+ }
14601
+ let headers;
14602
+ if (this.data.useHeadersInput) {
14603
+ const headersInput = inputs["headers"];
14604
+ if ((headersInput == null ? void 0 : headersInput.type) === "string") {
14605
+ headers = JSON.parse(headersInput.value);
14606
+ } else if ((headersInput == null ? void 0 : headersInput.type) === "object") {
14607
+ headers = headersInput.value;
14608
+ } else {
14609
+ headers = coerceType(headersInput, "object");
14610
+ }
14611
+ } else if ((_a = this.data.headers) == null ? void 0 : _a.trim()) {
14612
+ headers = JSON.parse(this.data.headers);
14613
+ }
14614
+ getPromptResponse = await context.mcpProvider.getHTTPrompt({ name, version }, serverUrl, headers, getPromptRequest);
14615
+ } else if (transportType === "stdio") {
14616
+ const serverId = this.data.serverId ?? "";
14617
+ const mcpConfig = await loadMCPConfiguration(context);
14618
+ if (!mcpConfig.mcpServers[serverId]) {
14619
+ throw new MCPError("SERVER_NOT_FOUND" /* SERVER_NOT_FOUND */, `Server ${serverId} not found in MCP config`);
14620
+ }
14621
+ const serverConfig = {
14622
+ config: mcpConfig.mcpServers[serverId],
14623
+ serverId
14624
+ };
14625
+ getPromptResponse = await context.mcpProvider.getStdioPrompt({ name, version }, serverConfig, getPromptRequest);
14626
+ }
14627
+ const output = {};
14628
+ output["response"] = {
14629
+ type: "object",
14630
+ value: getPromptResponse
14631
+ };
14632
+ return output;
14633
+ } catch (err) {
14634
+ const { message } = getError(err);
14635
+ if (context.executor === "browser") {
14636
+ throw new Error("Failed to create Client without a node executor");
14637
+ }
14638
+ console.log(message);
14639
+ throw err;
14640
+ }
14641
+ }
14642
+ };
14643
+ var mcpGetPromptNode = nodeDefinition(MCPGetPromptNodeImpl, "MCP Get Prompt");
14644
+
14645
+ // src/model/nodes/ReferencedGraphAliasNode.ts
14646
+ var import_non_secure77 = require("nanoid/non-secure");
14647
+ var import_ts_dedent70 = require("ts-dedent");
14648
+ var ReferencedGraphAliasNodeImpl = class extends NodeImpl {
14649
+ static create() {
14650
+ const chartNode = {
14651
+ type: "referencedGraphAlias",
14652
+ title: "",
14653
+ // Always set initially by the editor
14654
+ id: (0, import_non_secure77.nanoid)(),
14655
+ visualData: {
14656
+ x: 0,
14657
+ y: 0,
14658
+ width: 300
14659
+ },
14660
+ data: {
14661
+ projectId: void 0,
14662
+ // Always set initially by the editor
14663
+ graphId: void 0,
14664
+ // Always set initially by the editor
14665
+ useErrorOutput: false
14666
+ }
14667
+ };
14668
+ return chartNode;
14669
+ }
14670
+ getInputDefinitions(_connections, _nodes, _project, referencedProjects) {
14671
+ const referencedProject = referencedProjects[this.data.projectId];
14672
+ if (!referencedProject) {
14673
+ return [];
14674
+ }
14675
+ const graph = referencedProject.graphs[this.data.graphId];
14676
+ if (!graph) {
14677
+ return [];
14678
+ }
14679
+ const inputNodes = graph.nodes.filter((node) => node.type === "graphInput");
14680
+ const inputIds = [...new Set(inputNodes.map((node) => node.data.id))].sort();
14681
+ return inputIds.map(
14682
+ (id) => ({
14683
+ id,
14684
+ title: id,
14685
+ dataType: inputNodes.find((node) => node.data.id === id).data.dataType
14686
+ })
14687
+ );
14688
+ }
14689
+ getGraphOutputs(referencedProject) {
14690
+ const graph = referencedProject.graphs[this.data.graphId];
14691
+ if (!graph) {
14692
+ return [];
14693
+ }
14694
+ const outputNodes = graph.nodes.filter((node) => node.type === "graphOutput");
14695
+ const outputIds = [...new Set(outputNodes.map((node) => node.data.id))].sort();
14696
+ const outputs = outputIds.map(
14697
+ (id) => ({
14698
+ id,
14699
+ title: id,
14700
+ dataType: outputNodes.find((node) => node.data.id === id).data.dataType
14701
+ })
14702
+ );
14703
+ return outputs;
14704
+ }
14705
+ getOutputDefinitions(_connections, _nodes, _project, referencedProjects) {
14706
+ const outputs = [];
14707
+ const referencedProject = referencedProjects[this.data.projectId];
14708
+ if (!referencedProject) {
14709
+ return outputs;
14710
+ }
14711
+ outputs.push(...this.getGraphOutputs(referencedProject));
14712
+ if (this.data.useErrorOutput) {
14713
+ outputs.push({
14714
+ id: "error",
14715
+ title: "Error",
14716
+ dataType: "string"
14717
+ });
14718
+ }
14719
+ return outputs;
14720
+ }
14721
+ getEditors(context) {
14722
+ const definitions = [
14723
+ {
14724
+ type: "toggle",
14725
+ label: "Use Error Output",
14726
+ dataKey: "useErrorOutput"
14727
+ },
14728
+ {
14729
+ type: "toggle",
14730
+ label: "Output Cost & Duration",
14731
+ dataKey: "outputCostDuration"
14732
+ }
14733
+ ];
14734
+ const referencedProject = context.referencedProjects[this.data.projectId];
14735
+ if (referencedProject) {
14736
+ const graph = referencedProject.graphs[this.data.graphId];
14737
+ if (graph) {
14738
+ const inputNodes = graph.nodes.filter((node) => node.type === "graphInput");
14739
+ const inputIds = [...new Set(inputNodes.map((node) => node.data.id))].sort();
14740
+ for (const inputId of inputIds) {
14741
+ const inputNode = inputNodes.find((node) => node.data.id === inputId);
14742
+ definitions.push({
14743
+ type: "dynamic",
14744
+ dataKey: "inputData",
14745
+ dynamicDataKey: inputNode.data.id,
14746
+ dataType: inputNode.data.dataType,
14747
+ label: inputNode.data.id,
13713
14748
  editor: inputNode.data.editor ?? "auto"
13714
14749
  });
13715
14750
  }
@@ -13723,7 +14758,7 @@ var ReferencedGraphAliasNodeImpl = class extends NodeImpl {
13723
14758
  }
13724
14759
  static getUIData() {
13725
14760
  return {
13726
- infoBoxBody: import_ts_dedent69.dedent`
14761
+ infoBoxBody: import_ts_dedent70.dedent`
13727
14762
  References a graph from another project. Inputs and outputs are defined by Graph Input and Graph Output nodes within the referenced graph.
13728
14763
  `,
13729
14764
  infoBoxTitle: "Referenced Graph Alias Node",
@@ -13817,7 +14852,7 @@ var referencedGraphAliasNode = nodeDefinition(ReferencedGraphAliasNodeImpl, "Ref
13817
14852
 
13818
14853
  // src/model/Nodes.ts
13819
14854
  var registerBuiltInNodes = (registry2) => {
13820
- return registry2.register(toYamlNode).register(userInputNode).register(textNode).register(chatNode).register(promptNode).register(extractRegexNode).register(codeNode).register(matchNode).register(ifNode).register(readDirectoryNode).register(readFileNode).register(writeFileNode).register(ifElseNode).register(chunkNode).register(graphInputNode).register(graphOutputNode).register(subGraphNode).register(arrayNode).register(extractJsonNode).register(assemblePromptNode).register(loopControllerNode).register(trimChatMessagesNode).register(extractYamlNode).register(externalCallNode).register(extractObjectPathNode).register(raiseEventNode).register(contextNode).register(coalesceNode).register(passthroughNode).register(popNode).register(setGlobalNode).register(getGlobalNode).register(waitForEventNode).register(gptFunctionNode).register(getEmbeddingNode).register(vectorStoreNode).register(vectorNearestNeighborsNode).register(hashNode).register(abortGraphNode).register(raceInputsNode).register(toJsonNode).register(joinNode).register(filterNode).register(objectNode).register(booleanNode).register(compareNode).register(evaluateNode).register(numberNode).register(randomNumberNode).register(shuffleNode).register(commentNode).register(imageToMDNode).register(imageNode).register(audioNode).register(httpCallNode).register(delayNode).register(appendToDatasetNode).register(createDatasetNode).register(loadDatasetNode).register(getAllDatasetsNode).register(splitNode).register(datasetNearestNeighborsNode).register(getDatasetRowNode).register(sliceNode).register(extractMarkdownCodeBlocksNode).register(assembleMessageNode).register(urlReferenceNode).register(destructureNode).register(replaceDatasetNode).register(listGraphsNode).register(graphReferenceNode).register(callGraphNode).register(delegateFunctionCallNode).register(playAudioNode).register(documentNode).register(chatLoopNode).register(readAllFilesNode).register(toMarkdownTableNode).register(cronNode).register(toTreeNode).register(loopUntilNode).register(referencedGraphAliasNode);
14855
+ return registry2.register(toYamlNode).register(userInputNode).register(textNode).register(chatNode).register(promptNode).register(extractRegexNode).register(codeNode).register(matchNode).register(ifNode).register(readDirectoryNode).register(readFileNode).register(writeFileNode).register(ifElseNode).register(chunkNode).register(graphInputNode).register(graphOutputNode).register(subGraphNode).register(arrayNode).register(extractJsonNode).register(assemblePromptNode).register(loopControllerNode).register(trimChatMessagesNode).register(extractYamlNode).register(externalCallNode).register(extractObjectPathNode).register(raiseEventNode).register(contextNode).register(coalesceNode).register(passthroughNode).register(popNode).register(setGlobalNode).register(getGlobalNode).register(waitForEventNode).register(gptFunctionNode).register(getEmbeddingNode).register(vectorStoreNode).register(vectorNearestNeighborsNode).register(hashNode).register(abortGraphNode).register(raceInputsNode).register(toJsonNode).register(joinNode).register(filterNode).register(objectNode).register(booleanNode).register(compareNode).register(evaluateNode).register(numberNode).register(randomNumberNode).register(shuffleNode).register(commentNode).register(imageToMDNode).register(imageNode).register(audioNode).register(httpCallNode).register(delayNode).register(appendToDatasetNode).register(createDatasetNode).register(loadDatasetNode).register(getAllDatasetsNode).register(splitNode).register(datasetNearestNeighborsNode).register(getDatasetRowNode).register(sliceNode).register(extractMarkdownCodeBlocksNode).register(assembleMessageNode).register(urlReferenceNode).register(destructureNode).register(replaceDatasetNode).register(listGraphsNode).register(graphReferenceNode).register(callGraphNode).register(delegateFunctionCallNode).register(playAudioNode).register(documentNode).register(chatLoopNode).register(readAllFilesNode).register(toMarkdownTableNode).register(cronNode).register(toTreeNode).register(loopUntilNode).register(mcpDiscoveryNode).register(mcpToolCallNode).register(mcpGetPromptNode).register(referencedGraphAliasNode);
13821
14856
  };
13822
14857
  var globalRivetNodeRegistry = registerBuiltInNodes(new NodeRegistration());
13823
14858
  function resetGlobalRivetNodeRegistry() {
@@ -13891,7 +14926,7 @@ ${parameterName}?: ${value == null ? void 0 : value.type}`).join("\n")}
13891
14926
 
13892
14927
  // src/integrations/CodeRunner.ts
13893
14928
  var IsomorphicCodeRunner = class {
13894
- async runCode(code, inputs, options2) {
14929
+ async runCode(code, inputs, options2, graphInputs, contextValues) {
13895
14930
  const argNames = ["inputs"];
13896
14931
  const args = [inputs];
13897
14932
  if (options2.includeRequire) {
@@ -13912,6 +14947,14 @@ var IsomorphicCodeRunner = class {
13912
14947
  argNames.push("Rivet");
13913
14948
  args.push(exports_exports);
13914
14949
  }
14950
+ if (graphInputs) {
14951
+ argNames.push("graphInputs");
14952
+ args.push(graphInputs);
14953
+ }
14954
+ if (contextValues) {
14955
+ argNames.push("context");
14956
+ args.push(contextValues);
14957
+ }
13915
14958
  argNames.push(code);
13916
14959
  const AsyncFunction = async function() {
13917
14960
  }.constructor;
@@ -13921,7 +14964,7 @@ var IsomorphicCodeRunner = class {
13921
14964
  }
13922
14965
  };
13923
14966
  var NotAllowedCodeRunner = class {
13924
- async runCode(_code, _inputs, _options) {
14967
+ async runCode(_code, _inputs, _options, _graphInputs, _contextValues) {
13925
14968
  throw new Error("Dynamic code execution is disabled.");
13926
14969
  }
13927
14970
  };
@@ -13946,7 +14989,7 @@ var GraphProcessor = class _GraphProcessor {
13946
14989
  #isPaused = false;
13947
14990
  #parent;
13948
14991
  #registry;
13949
- id = (0, import_non_secure76.nanoid)();
14992
+ id = (0, import_non_secure78.nanoid)();
13950
14993
  #includeTrace = true;
13951
14994
  executor;
13952
14995
  /** If set, specifies the node(s) that the graph will run TO, instead of the nodes without any dependents. */
@@ -13991,6 +15034,7 @@ var GraphProcessor = class _GraphProcessor {
13991
15034
  // @ts-expect-error
13992
15035
  #nodesNotInCycle = void 0;
13993
15036
  #nodeAbortControllers = /* @__PURE__ */ new Map();
15037
+ #graphInputNodeValues = {};
13994
15038
  /** User input nodes that are pending user input. */
13995
15039
  #pendingUserInputs = void 0;
13996
15040
  get isRunning() {
@@ -14371,6 +15415,7 @@ var GraphProcessor = class _GraphProcessor {
14371
15415
  this.#abortSuccessfully = false;
14372
15416
  this.#nodeAbortControllers = /* @__PURE__ */ new Map();
14373
15417
  this.#loadedProjects = {};
15418
+ this.#graphInputNodeValues = {};
14374
15419
  }
14375
15420
  /** Main function for running a graph. Runs a graph and returns the outputs from the output nodes of the graph. */
14376
15421
  async processGraph(context, inputs = {}, contextValues = {}) {
@@ -14378,6 +15423,7 @@ var GraphProcessor = class _GraphProcessor {
14378
15423
  if (this.#running) {
14379
15424
  throw new Error("Cannot process graph while already processing");
14380
15425
  }
15426
+ console.info(`Process graph called. Context:${context}, Inputs: ${JSON.stringify(inputs)}, Context Values: ${JSON.stringify(contextValues)}`);
14381
15427
  this.#initProcessState();
14382
15428
  this.#context = context;
14383
15429
  this.#graphInputs = inputs;
@@ -14387,8 +15433,11 @@ var GraphProcessor = class _GraphProcessor {
14387
15433
  this.#emitter.emit("error", { error });
14388
15434
  });
14389
15435
  }
15436
+ console.info(`Process graph calling loadProjectReferences`);
14390
15437
  await this.#loadProjectReferences();
15438
+ console.info(`Process graph called loadProjectReferences`);
14391
15439
  this.#preprocessGraph();
15440
+ console.info(`Process graph called preprocessGraph`);
14392
15441
  if (!this.#isSubProcessor) {
14393
15442
  await this.#emitter.emit("start", {
14394
15443
  contextValues: this.#contextValues,
@@ -14611,7 +15660,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
14611
15660
  return;
14612
15661
  }
14613
15662
  const inputValues = this.#getInputValuesForNode(node);
14614
- if (this.#excludedDueToControlFlow(node, inputValues, (0, import_non_secure76.nanoid)(), "loop-not-broken")) {
15663
+ if (this.#excludedDueToControlFlow(node, inputValues, (0, import_non_secure78.nanoid)(), "loop-not-broken")) {
14615
15664
  this.#emitTraceEvent(`Node ${node.title} is excluded due to control flow`);
14616
15665
  return;
14617
15666
  }
@@ -14745,7 +15794,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
14745
15794
  return nodeData;
14746
15795
  }
14747
15796
  async #processNode(node) {
14748
- const processId = (0, import_non_secure76.nanoid)();
15797
+ const processId = (0, import_non_secure78.nanoid)();
14749
15798
  if (this.#abortController.signal.aborted) {
14750
15799
  this.#nodeErrored(node, new Error("Processing aborted"), processId);
14751
15800
  return processId;
@@ -14773,7 +15822,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
14773
15822
  return;
14774
15823
  }
14775
15824
  const splittingAmount = Math.min(
14776
- (0, import_lodash_es15.max)(values(inputValues).map((value) => Array.isArray(value == null ? void 0 : value.value) ? value == null ? void 0 : value.value.length : 1)) ?? 1,
15825
+ (0, import_lodash_es16.max)(values(inputValues).map((value) => Array.isArray(value == null ? void 0 : value.value) ? value == null ? void 0 : value.value.length : 1)) ?? 1,
14777
15826
  node.splitRunMax ?? 10
14778
15827
  );
14779
15828
  this.#emitter.emit("nodeStart", { node, inputs: inputValues, processId });
@@ -14816,7 +15865,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
14816
15865
  }
14817
15866
  } else {
14818
15867
  results = await Promise.all(
14819
- (0, import_lodash_es15.range)(0, splittingAmount).map(async (i) => {
15868
+ (0, import_lodash_es16.range)(0, splittingAmount).map(async (i) => {
14820
15869
  var _a2, _b2, _c2;
14821
15870
  const inputs = fromEntries(
14822
15871
  entries(inputValues).map(([port, value]) => [
@@ -14866,15 +15915,15 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
14866
15915
  }, {});
14867
15916
  this.#nodeResults.set(node.id, aggregateResults);
14868
15917
  this.#visitedNodes.add(node.id);
14869
- this.#totalRequestTokens += (0, import_lodash_es15.sum)(results.map((r) => {
15918
+ this.#totalRequestTokens += (0, import_lodash_es16.sum)(results.map((r) => {
14870
15919
  var _a2;
14871
15920
  return coerceTypeOptional((_a2 = r.output) == null ? void 0 : _a2["requestTokens"], "number") ?? 0;
14872
15921
  }));
14873
- this.#totalResponseTokens += (0, import_lodash_es15.sum)(results.map((r) => {
15922
+ this.#totalResponseTokens += (0, import_lodash_es16.sum)(results.map((r) => {
14874
15923
  var _a2;
14875
15924
  return coerceTypeOptional((_a2 = r.output) == null ? void 0 : _a2["responseTokens"], "number") ?? 0;
14876
15925
  }));
14877
- this.#totalCost += (0, import_lodash_es15.sum)(results.map((r) => {
15926
+ this.#totalCost += (0, import_lodash_es16.sum)(results.map((r) => {
14878
15927
  var _a2;
14879
15928
  return coerceTypeOptional((_a2 = r.output) == null ? void 0 : _a2["cost"], "number") ?? 0;
14880
15929
  }));
@@ -15094,7 +16143,8 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
15094
16143
  });
15095
16144
  });
15096
16145
  return results2;
15097
- }
16146
+ },
16147
+ graphInputNodeValues: this.#graphInputNodeValues
15098
16148
  };
15099
16149
  await this.#waitUntilUnpaused();
15100
16150
  const results = await instance.process(inputValues, context);
@@ -15217,7 +16267,7 @@ ${erroredNodes.map(([nodeId]) => `- ${this.#nodesById[nodeId].title} (${nodeId})
15217
16267
  const connectionDefinition = outputDefinitions.find((def) => def.id === connection.outputId);
15218
16268
  return connectionDefinition != null;
15219
16269
  });
15220
- const outputNodes = (0, import_lodash_es15.uniqBy)(
16270
+ const outputNodes = (0, import_lodash_es16.uniqBy)(
15221
16271
  outputConnections.map((conn) => this.#nodesById[conn.inputNodeId]).filter(isNotNull),
15222
16272
  (x) => x.id
15223
16273
  );
@@ -15371,9 +16421,9 @@ var OpenAIEmbeddingGenerator = class {
15371
16421
  registerIntegration("embeddingGenerator", "openai", (context) => new OpenAIEmbeddingGenerator(context.settings));
15372
16422
 
15373
16423
  // src/recording/ExecutionRecorder.ts
15374
- var import_non_secure77 = require("nanoid/non-secure");
16424
+ var import_non_secure79 = require("nanoid/non-secure");
15375
16425
  var import_emittery3 = __toESM(require("emittery-0-13"), 1);
15376
- var import_lodash_es16 = require("lodash");
16426
+ var import_lodash_es17 = require("lodash");
15377
16427
  var toRecordedEventMap = {
15378
16428
  graphStart: ({ graph, inputs }) => ({ graphId: graph.metadata.id, inputs }),
15379
16429
  graphFinish: ({ graph, outputs }) => ({ graphId: graph.metadata.id, outputs }),
@@ -15470,16 +16520,16 @@ function toRecordedEvent(event, data) {
15470
16520
  function mapValuesDeep(obj, fn) {
15471
16521
  if (Array.isArray(obj)) {
15472
16522
  return obj.map((value) => {
15473
- if ((0, import_lodash_es16.isPlainObject)(value) || Array.isArray(value)) {
16523
+ if ((0, import_lodash_es17.isPlainObject)(value) || Array.isArray(value)) {
15474
16524
  return mapValuesDeep(value, fn);
15475
16525
  }
15476
16526
  return fn(value);
15477
16527
  });
15478
16528
  }
15479
- if ((0, import_lodash_es16.isPlainObject)(obj)) {
16529
+ if ((0, import_lodash_es17.isPlainObject)(obj)) {
15480
16530
  return Object.fromEntries(
15481
16531
  Object.entries(obj).map(([key, value]) => {
15482
- if ((0, import_lodash_es16.isPlainObject)(value) || Array.isArray(value)) {
16532
+ if ((0, import_lodash_es17.isPlainObject)(value) || Array.isArray(value)) {
15483
16533
  return [key, mapValuesDeep(value, fn)];
15484
16534
  }
15485
16535
  return [key, fn(value)];
@@ -15505,7 +16555,7 @@ var ExecutionRecorder = class _ExecutionRecorder {
15505
16555
  once = void 0;
15506
16556
  recordSocket(channel) {
15507
16557
  return new Promise((resolve) => {
15508
- this.recordingId = (0, import_non_secure77.nanoid)();
16558
+ this.recordingId = (0, import_non_secure79.nanoid)();
15509
16559
  const listener = (event) => {
15510
16560
  const { message, data } = JSON.parse(event.data);
15511
16561
  if (this.#includePartialOutputs === false && message === "partialOutput") {
@@ -15527,7 +16577,7 @@ var ExecutionRecorder = class _ExecutionRecorder {
15527
16577
  });
15528
16578
  }
15529
16579
  record(processor) {
15530
- this.recordingId = (0, import_non_secure77.nanoid)();
16580
+ this.recordingId = (0, import_non_secure79.nanoid)();
15531
16581
  processor.onAny((event, data) => {
15532
16582
  if (this.#includePartialOutputs === false && event === "partialOutput") {
15533
16583
  return;
@@ -15589,7 +16639,7 @@ var ExecutionRecorder = class _ExecutionRecorder {
15589
16639
  const asString = uint8ArrayToBase64Sync(val);
15590
16640
  const existingAsset = Object.entries(serialized.assets).find(([, asset]) => asset === asString);
15591
16641
  if (!existingAsset) {
15592
- const id = (0, import_non_secure77.nanoid)();
16642
+ const id = (0, import_non_secure79.nanoid)();
15593
16643
  serialized.assets[id] = asString;
15594
16644
  return `$ASSET:${id}`;
15595
16645
  } else {
@@ -15604,8 +16654,8 @@ var ExecutionRecorder = class _ExecutionRecorder {
15604
16654
  };
15605
16655
 
15606
16656
  // src/plugins/aidon/nodes/ChatAidonNode.ts
15607
- var import_lodash_es17 = require("lodash");
15608
- var import_ts_dedent70 = require("ts-dedent");
16657
+ var import_lodash_es18 = require("lodash");
16658
+ var import_ts_dedent71 = require("ts-dedent");
15609
16659
  var registry = globalRivetNodeRegistry;
15610
16660
  var ChatAidonNodeImpl = class extends ChatNodeImpl {
15611
16661
  create() {
@@ -15656,7 +16706,7 @@ var ChatAidonNodeImpl = class extends ChatNodeImpl {
15656
16706
  }
15657
16707
  return path;
15658
16708
  }
15659
- async callToolGet(parsedArgs, schemaDetail, path, data) {
16709
+ async callToolGet(schemaDetail, path, parsedArgs, data) {
15660
16710
  const queryParams = new URLSearchParams(
15661
16711
  parsedArgs.parameters
15662
16712
  ).toString();
@@ -15734,7 +16784,7 @@ var ChatAidonNodeImpl = class extends ChatNodeImpl {
15734
16784
  if (schemaDetail.requestInBody) {
15735
16785
  data = await this.callToolPost(schemaDetail, path, functionCall.arguments, data);
15736
16786
  } else {
15737
- data = await this.callToolGet(functionCall.arguments, schemaDetail, path, data);
16787
+ data = await this.callToolGet(schemaDetail, path, functionCall.arguments, data);
15738
16788
  }
15739
16789
  messages["value"].push({
15740
16790
  type: "function",
@@ -15742,7 +16792,7 @@ var ChatAidonNodeImpl = class extends ChatNodeImpl {
15742
16792
  message: JSON.stringify(data)
15743
16793
  });
15744
16794
  }
15745
- inputs = (0, import_lodash_es17.omit)(inputs, ["functions", "prompt"]);
16795
+ inputs = (0, import_lodash_es18.omit)(inputs, ["functions", "prompt"]);
15746
16796
  inputs["prompt"] = messages;
15747
16797
  outputs = await super.process(inputs, context);
15748
16798
  }
@@ -15786,7 +16836,7 @@ var createPluginNodeImpl = (chatNode2) => {
15786
16836
  },
15787
16837
  getUIData() {
15788
16838
  return {
15789
- infoBoxBody: import_ts_dedent70.dedent`
16839
+ infoBoxBody: import_ts_dedent71.dedent`
15790
16840
  Makes a call to an Aidon chat model. The settings contains many options for tweaking the model's behavior.
15791
16841
  `,
15792
16842
  infoBoxTitle: "Chat (Aidon) Node",
@@ -15810,6 +16860,21 @@ function chatAidonNode() {
15810
16860
  var aidonPlugin = {
15811
16861
  id: "aidon",
15812
16862
  name: "Aidon",
16863
+ configSpec: {
16864
+ aidonURL: {
16865
+ type: "string",
16866
+ label: "Aidon URL",
16867
+ description: "The URL for the Aidon application.",
16868
+ helperText: "Defaults to https://app.aidon.ai. URL for the Aidon application.",
16869
+ default: "https://app.aidon.ai"
16870
+ },
16871
+ aidonKey: {
16872
+ type: "secret",
16873
+ label: "Aidon API Key",
16874
+ description: "The API Key for the Aidon application.",
16875
+ helperText: "API Key for the Aidon application."
16876
+ }
16877
+ },
15813
16878
  register: (register) => {
15814
16879
  register(chatAidonNode());
15815
16880
  }
@@ -16001,6 +17066,22 @@ var anthropicModels = {
16001
17066
  completion: 15e-6
16002
17067
  },
16003
17068
  displayName: "Claude 3.7 Sonnet"
17069
+ },
17070
+ "claude-sonnet-4-20250514": {
17071
+ maxTokens: 2e5,
17072
+ cost: {
17073
+ prompt: 3e-6,
17074
+ completion: 375e-8
17075
+ },
17076
+ displayName: "Claude Sonnet 4"
17077
+ },
17078
+ "claude-opus-4-20250514": {
17079
+ maxTokens: 2e5,
17080
+ cost: {
17081
+ prompt: 15e-6,
17082
+ completion: 1875e-8
17083
+ },
17084
+ displayName: "Claude Opus 4"
16004
17085
  }
16005
17086
  };
16006
17087
  var anthropicModelOptions = Object.entries(anthropicModels).map(([id, { displayName }]) => ({
@@ -16011,6 +17092,7 @@ async function* streamChatCompletions2({
16011
17092
  apiEndpoint,
16012
17093
  apiKey,
16013
17094
  signal,
17095
+ additionalHeaders,
16014
17096
  ...rest
16015
17097
  }) {
16016
17098
  const defaultSignal = new AbortController().signal;
@@ -16020,7 +17102,8 @@ async function* streamChatCompletions2({
16020
17102
  "Content-Type": "application/json",
16021
17103
  "x-api-key": apiKey,
16022
17104
  "anthropic-version": "2023-06-01",
16023
- "anthropic-dangerous-direct-browser-access": "true"
17105
+ "anthropic-dangerous-direct-browser-access": "true",
17106
+ ...additionalHeaders
16024
17107
  },
16025
17108
  body: JSON.stringify({
16026
17109
  ...rest,
@@ -16055,6 +17138,7 @@ async function* streamMessageApi({
16055
17138
  apiKey,
16056
17139
  signal,
16057
17140
  beta,
17141
+ additionalHeaders,
16058
17142
  ...rest
16059
17143
  }) {
16060
17144
  const defaultSignal = new AbortController().signal;
@@ -16065,7 +17149,8 @@ async function* streamMessageApi({
16065
17149
  "x-api-key": apiKey,
16066
17150
  "anthropic-version": "2023-06-01",
16067
17151
  "anthropic-dangerous-direct-browser-access": "true",
16068
- ...beta ? { "anthropic-beta": beta } : {}
17152
+ ...beta ? { "anthropic-beta": beta } : {},
17153
+ ...additionalHeaders
16069
17154
  },
16070
17155
  body: JSON.stringify({
16071
17156
  ...rest,
@@ -16104,8 +17189,8 @@ var AnthropicError = class extends Error {
16104
17189
  };
16105
17190
 
16106
17191
  // src/plugins/anthropic/nodes/ChatAnthropicNode.ts
16107
- var import_non_secure78 = require("nanoid/non-secure");
16108
- var import_ts_dedent71 = require("ts-dedent");
17192
+ var import_non_secure80 = require("nanoid/non-secure");
17193
+ var import_ts_dedent72 = require("ts-dedent");
16109
17194
  var import_p_retry2 = __toESM(require("p-retry-4"), 1);
16110
17195
  var import_ts_pattern10 = require("ts-pattern");
16111
17196
 
@@ -16121,14 +17206,14 @@ var ChatAnthropicNodeImpl = {
16121
17206
  const chartNode = {
16122
17207
  type: "chatAnthropic",
16123
17208
  title: "Chat (Anthropic)",
16124
- id: (0, import_non_secure78.nanoid)(),
17209
+ id: (0, import_non_secure80.nanoid)(),
16125
17210
  visualData: {
16126
17211
  x: 0,
16127
17212
  y: 0,
16128
17213
  width: 275
16129
17214
  },
16130
17215
  data: {
16131
- model: "claude-3-7-sonnet-latest",
17216
+ model: "claude-sonnet-4-20250514",
16132
17217
  useModelInput: false,
16133
17218
  temperature: 0.5,
16134
17219
  useTemperatureInput: false,
@@ -16221,6 +17306,14 @@ var ChatAnthropicNodeImpl = {
16221
17306
  coerced: false
16222
17307
  });
16223
17308
  }
17309
+ if (data.useHeadersInput) {
17310
+ inputs.push({
17311
+ dataType: "object",
17312
+ id: "headers",
17313
+ title: "Headers",
17314
+ description: "Additional headers to send to the API."
17315
+ });
17316
+ }
16224
17317
  return inputs;
16225
17318
  },
16226
17319
  getOutputDefinitions(data) {
@@ -16257,7 +17350,7 @@ var ChatAnthropicNodeImpl = {
16257
17350
  getBody(data) {
16258
17351
  var _a;
16259
17352
  const modelName = data.overrideModel ? data.overrideModel : ((_a = anthropicModels[data.model]) == null ? void 0 : _a.displayName) ?? "Unknown Model";
16260
- return import_ts_dedent71.dedent`
17353
+ return import_ts_dedent72.dedent`
16261
17354
  ${modelName}
16262
17355
  ${data.useTopP ? `Top P: ${data.useTopPInput ? "(Using Input)" : data.top_p}` : `Temperature: ${data.useTemperatureInput ? "(Using Input)" : data.temperature}`}
16263
17356
  Max Tokens: ${data.maxTokens}
@@ -16367,6 +17460,14 @@ var ChatAnthropicNodeImpl = {
16367
17460
  dataKey: "overrideModel",
16368
17461
  useInputToggleDataKey: "useOverrideModelInput",
16369
17462
  helperMessage: "Overrides the AI model used for the chat node to this value."
17463
+ },
17464
+ {
17465
+ type: "keyValuePair",
17466
+ label: "Headers",
17467
+ dataKey: "headers",
17468
+ useInputToggleDataKey: "useHeadersInput",
17469
+ keyPlaceholder: "Header",
17470
+ helperMessage: "Additional headers to send to the API."
16370
17471
  }
16371
17472
  ]
16372
17473
  }
@@ -16374,7 +17475,7 @@ var ChatAnthropicNodeImpl = {
16374
17475
  },
16375
17476
  getUIData() {
16376
17477
  return {
16377
- infoBoxBody: import_ts_dedent71.dedent`
17478
+ infoBoxBody: import_ts_dedent72.dedent`
16378
17479
  Makes a call to an Anthropic chat model. The settings contains many options for tweaking the model's behavior.
16379
17480
  `,
16380
17481
  infoBoxTitle: "Chat (Anthropic) Node",
@@ -16433,6 +17534,18 @@ Assistant: ${content}`;
16433
17534
  addWarning(output, message);
16434
17535
  maxTokens = Math.floor((modelInfo.maxTokens - tokenCountEstimate) * 0.95);
16435
17536
  }
17537
+ const headersFromData = (data.headers ?? []).reduce(
17538
+ (acc, header) => {
17539
+ acc[header.key] = header.value;
17540
+ return acc;
17541
+ },
17542
+ {}
17543
+ );
17544
+ const additionalHeaders = data.useHeadersInput ? coerceTypeOptional(inputs["headers"], "object") ?? headersFromData : headersFromData;
17545
+ const allAdditionalHeaders = cleanHeaders({
17546
+ ...context.settings.chatNodeHeaders,
17547
+ ...additionalHeaders
17548
+ });
16436
17549
  try {
16437
17550
  return await (0, import_p_retry2.default)(
16438
17551
  async () => {
@@ -16455,7 +17568,7 @@ Assistant: ${content}`;
16455
17568
  messages,
16456
17569
  tools: tools ? tools.map((tool) => ({ name: tool.name, description: tool.description, input_schema: tool.parameters })) : void 0
16457
17570
  };
16458
- const useMessageApi = model.startsWith("claude-3");
17571
+ const useMessageApi = model.startsWith("claude-3") || model.startsWith("claude-sonnet") || model.startsWith("claude-opus");
16459
17572
  const cacheKey = JSON.stringify(useMessageApi ? messageOptions : completionOptions);
16460
17573
  if (data.cache) {
16461
17574
  const cached = cache2.get(cacheKey);
@@ -16474,6 +17587,7 @@ Assistant: ${content}`;
16474
17587
  apiKey: apiKey ?? "",
16475
17588
  signal: context.signal,
16476
17589
  beta: "prompt-caching-2024-07-31",
17590
+ additionalHeaders: allAdditionalHeaders,
16477
17591
  ...messageOptions
16478
17592
  });
16479
17593
  const responseParts = [];
@@ -16587,6 +17701,7 @@ Assistant: ${content}`;
16587
17701
  apiEndpoint,
16588
17702
  apiKey: apiKey ?? "",
16589
17703
  signal: context.signal,
17704
+ additionalHeaders: allAdditionalHeaders,
16590
17705
  ...completionOptions
16591
17706
  });
16592
17707
  const responseParts = [];
@@ -16869,8 +17984,8 @@ var anthropicPlugin = {
16869
17984
  var anthropic_default = anthropicPlugin;
16870
17985
 
16871
17986
  // src/plugins/autoevals/AutoEvalsNode.ts
16872
- var import_non_secure79 = require("nanoid/non-secure");
16873
- var import_ts_dedent72 = require("ts-dedent");
17987
+ var import_non_secure81 = require("nanoid/non-secure");
17988
+ var import_ts_dedent73 = require("ts-dedent");
16874
17989
  var import_autoevals = require("autoevals");
16875
17990
  var import_ts_pattern11 = require("ts-pattern");
16876
17991
  var options = [
@@ -16889,7 +18004,7 @@ var AutoEvalsNodeImpl = {
16889
18004
  const chartNode = {
16890
18005
  type: "autoevals",
16891
18006
  title: "Autoevals",
16892
- id: (0, import_non_secure79.nanoid)(),
18007
+ id: (0, import_non_secure81.nanoid)(),
16893
18008
  visualData: {
16894
18009
  x: 0,
16895
18010
  y: 0,
@@ -17004,7 +18119,7 @@ var AutoEvalsNodeImpl = {
17004
18119
  },
17005
18120
  getUIData() {
17006
18121
  return {
17007
- infoBoxBody: import_ts_dedent72.dedent`
18122
+ infoBoxBody: import_ts_dedent73.dedent`
17008
18123
  Evaluates the validity of a response using the autoevals library.
17009
18124
  `,
17010
18125
  infoBoxTitle: "Autoevals Node",
@@ -17084,8 +18199,8 @@ var autoevalsPlugin = {
17084
18199
  var autoevals_default = autoevalsPlugin;
17085
18200
 
17086
18201
  // src/plugins/assemblyAi/LemurQaNode.ts
17087
- var import_non_secure80 = require("nanoid/non-secure");
17088
- var import_ts_dedent73 = require("ts-dedent");
18202
+ var import_non_secure82 = require("nanoid/non-secure");
18203
+ var import_ts_dedent74 = require("ts-dedent");
17089
18204
 
17090
18205
  // src/plugins/assemblyAi/lemurHelpers.ts
17091
18206
  var import_assemblyai = require("assemblyai");
@@ -17205,7 +18320,7 @@ var LemurQaNodeImpl = {
17205
18320
  const chartNode = {
17206
18321
  type: "assemblyAiLemurQa",
17207
18322
  title: "LeMUR Question & Answers",
17208
- id: (0, import_non_secure80.nanoid)(),
18323
+ id: (0, import_non_secure82.nanoid)(),
17209
18324
  visualData: {
17210
18325
  x: 0,
17211
18326
  y: 0,
@@ -17271,7 +18386,7 @@ var LemurQaNodeImpl = {
17271
18386
  },
17272
18387
  getUIData() {
17273
18388
  return {
17274
- infoBoxBody: import_ts_dedent73.dedent`Use AssemblyAI LeMUR to ask questions about transcripts`,
18389
+ infoBoxBody: import_ts_dedent74.dedent`Use AssemblyAI LeMUR to ask questions about transcripts`,
17275
18390
  infoBoxTitle: "Use AssemblyAI LeMUR Question & Answer",
17276
18391
  contextMenuTitle: "LeMUR Q&A",
17277
18392
  group: ["AI", "AssemblyAI"]
@@ -17343,14 +18458,14 @@ function applyQuestionEditors(data, question) {
17343
18458
  var lemurQaNode = pluginNodeDefinition(LemurQaNodeImpl, "LeMUR Q&A");
17344
18459
 
17345
18460
  // src/plugins/assemblyAi/TranscribeAudioNode.ts
17346
- var import_non_secure81 = require("nanoid/non-secure");
17347
- var import_ts_dedent74 = require("ts-dedent");
18461
+ var import_non_secure83 = require("nanoid/non-secure");
18462
+ var import_ts_dedent75 = require("ts-dedent");
17348
18463
  var TranscribeAudioNodeImpl = {
17349
18464
  create() {
17350
18465
  const chartNode = {
17351
18466
  type: "assemblyAiTranscribeAudio",
17352
18467
  title: "Transcribe Audio",
17353
- id: (0, import_non_secure81.nanoid)(),
18468
+ id: (0, import_non_secure83.nanoid)(),
17354
18469
  visualData: {
17355
18470
  x: 0,
17356
18471
  y: 0,
@@ -17405,7 +18520,7 @@ var TranscribeAudioNodeImpl = {
17405
18520
  },
17406
18521
  getUIData() {
17407
18522
  return {
17408
- infoBoxBody: import_ts_dedent74.dedent`Use AssemblyAI to transcribe audio`,
18523
+ infoBoxBody: import_ts_dedent75.dedent`Use AssemblyAI to transcribe audio`,
17409
18524
  infoBoxTitle: "Transcribe Audio Node",
17410
18525
  contextMenuTitle: "Transcribe Audio",
17411
18526
  group: ["AI", "AssemblyAI"]
@@ -17464,15 +18579,15 @@ function getAdditionalParameters(data) {
17464
18579
  }
17465
18580
 
17466
18581
  // src/plugins/assemblyAi/LemurSummaryNode.ts
17467
- var import_non_secure82 = require("nanoid/non-secure");
17468
- var import_ts_dedent75 = require("ts-dedent");
18582
+ var import_non_secure84 = require("nanoid/non-secure");
18583
+ var import_ts_dedent76 = require("ts-dedent");
17469
18584
  var import_assemblyai3 = require("assemblyai");
17470
18585
  var LemurSummaryNodeImpl = {
17471
18586
  create() {
17472
18587
  const chartNode = {
17473
18588
  type: "assemblyAiLemurSummary",
17474
18589
  title: "LeMUR Summary",
17475
- id: (0, import_non_secure82.nanoid)(),
18590
+ id: (0, import_non_secure84.nanoid)(),
17476
18591
  visualData: {
17477
18592
  x: 0,
17478
18593
  y: 0,
@@ -17518,7 +18633,7 @@ var LemurSummaryNodeImpl = {
17518
18633
  },
17519
18634
  getUIData() {
17520
18635
  return {
17521
- infoBoxBody: import_ts_dedent75.dedent`Use AssemblyAI LeMUR Summary to summarize transcripts`,
18636
+ infoBoxBody: import_ts_dedent76.dedent`Use AssemblyAI LeMUR Summary to summarize transcripts`,
17522
18637
  infoBoxTitle: "Use AssemblyAI LeMUR Summary",
17523
18638
  contextMenuTitle: "LeMUR Summary",
17524
18639
  group: ["AI", "AssemblyAI"]
@@ -17542,15 +18657,15 @@ var LemurSummaryNodeImpl = {
17542
18657
  var lemurSummaryNode = pluginNodeDefinition(LemurSummaryNodeImpl, "LeMUR Summary");
17543
18658
 
17544
18659
  // src/plugins/assemblyAi/LemurTaskNode.ts
17545
- var import_non_secure83 = require("nanoid/non-secure");
17546
- var import_ts_dedent76 = require("ts-dedent");
18660
+ var import_non_secure85 = require("nanoid/non-secure");
18661
+ var import_ts_dedent77 = require("ts-dedent");
17547
18662
  var import_assemblyai4 = require("assemblyai");
17548
18663
  var LemurTaskNodeImpl = {
17549
18664
  create() {
17550
18665
  const chartNode = {
17551
18666
  type: "assemblyAiLemurTask",
17552
18667
  title: "LeMUR Task",
17553
- id: (0, import_non_secure83.nanoid)(),
18668
+ id: (0, import_non_secure85.nanoid)(),
17554
18669
  visualData: {
17555
18670
  x: 0,
17556
18671
  y: 0,
@@ -17596,7 +18711,7 @@ var LemurTaskNodeImpl = {
17596
18711
  },
17597
18712
  getUIData() {
17598
18713
  return {
17599
- infoBoxBody: import_ts_dedent76.dedent`Use AssemblyAI LeMUR Custom Task to ask anything.`,
18714
+ infoBoxBody: import_ts_dedent77.dedent`Use AssemblyAI LeMUR Custom Task to ask anything.`,
17600
18715
  infoBoxTitle: "Use AssemblyAI LeMUR Custom Task",
17601
18716
  contextMenuTitle: "LeMUR Custom Task",
17602
18717
  group: ["AI", "AssemblyAI"]
@@ -17622,14 +18737,14 @@ var LemurTaskNodeImpl = {
17622
18737
  var lemurTaskNode = pluginNodeDefinition(LemurTaskNodeImpl, "LeMUR Task");
17623
18738
 
17624
18739
  // src/plugins/assemblyAi/LemurActionItemsNode.ts
17625
- var import_non_secure84 = require("nanoid/non-secure");
17626
- var import_ts_dedent77 = require("ts-dedent");
18740
+ var import_non_secure86 = require("nanoid/non-secure");
18741
+ var import_ts_dedent78 = require("ts-dedent");
17627
18742
  var LemurActionItemsNodeImpl = {
17628
18743
  create() {
17629
18744
  const chartNode = {
17630
18745
  type: "assemblyAiLemurActionItems",
17631
18746
  title: "LeMUR Action Items",
17632
- id: (0, import_non_secure84.nanoid)(),
18747
+ id: (0, import_non_secure86.nanoid)(),
17633
18748
  visualData: {
17634
18749
  x: 0,
17635
18750
  y: 0,
@@ -17675,7 +18790,7 @@ var LemurActionItemsNodeImpl = {
17675
18790
  },
17676
18791
  getUIData() {
17677
18792
  return {
17678
- infoBoxBody: import_ts_dedent77.dedent`Use AssemblyAI LeMUR Action Items to extract action items`,
18793
+ infoBoxBody: import_ts_dedent78.dedent`Use AssemblyAI LeMUR Action Items to extract action items`,
17679
18794
  infoBoxTitle: "Use AssemblyAI LeMUR Action Items",
17680
18795
  contextMenuTitle: "LeMUR Action Items",
17681
18796
  group: ["AI", "AssemblyAI"]
@@ -17727,12 +18842,12 @@ var assemblyAiPlugin = {
17727
18842
  var assemblyAi_default = assemblyAiPlugin;
17728
18843
 
17729
18844
  // src/plugins/huggingface/nodes/ChatHuggingFace.ts
17730
- var import_non_secure85 = require("nanoid/non-secure");
18845
+ var import_non_secure87 = require("nanoid/non-secure");
17731
18846
  var import_inference = require("@huggingface/inference");
17732
18847
  var ChatHuggingFaceNodeImpl = {
17733
18848
  create() {
17734
18849
  return {
17735
- id: (0, import_non_secure85.nanoid)(),
18850
+ id: (0, import_non_secure87.nanoid)(),
17736
18851
  type: "chatHuggingFace",
17737
18852
  data: {
17738
18853
  model: "",
@@ -17925,7 +19040,7 @@ var ChatHuggingFaceNodeImpl = {
17925
19040
  const repetitionPenalty = getInputOrData(data, inputData, "repetitionPenalty", "number");
17926
19041
  const topP = getInputOrData(data, inputData, "topP", "number");
17927
19042
  const topK = getInputOrData(data, inputData, "topK", "number");
17928
- const hf = endpoint ? new import_inference.HfInferenceEndpoint(endpoint, accessToken) : new import_inference.HfInference(accessToken);
19043
+ const hf = endpoint ? new import_inference.InferenceClient(accessToken, { endpointUrl: endpoint }) : new import_inference.InferenceClient(accessToken);
17929
19044
  const generationStream = hf.textGenerationStream({
17930
19045
  inputs: prompt,
17931
19046
  model,
@@ -17962,13 +19077,13 @@ var ChatHuggingFaceNodeImpl = {
17962
19077
  var chatHuggingFaceNode = pluginNodeDefinition(ChatHuggingFaceNodeImpl, "Chat (Hugging Face)");
17963
19078
 
17964
19079
  // src/plugins/huggingface/nodes/TextToImageHuggingFace.ts
17965
- var import_non_secure86 = require("nanoid/non-secure");
19080
+ var import_non_secure88 = require("nanoid/non-secure");
17966
19081
  var import_inference2 = require("@huggingface/inference");
17967
- var import_ts_dedent78 = require("ts-dedent");
19082
+ var import_ts_dedent79 = require("ts-dedent");
17968
19083
  var TextToImageHuggingFaceNodeImpl = {
17969
19084
  create() {
17970
19085
  return {
17971
- id: (0, import_non_secure86.nanoid)(),
19086
+ id: (0, import_non_secure88.nanoid)(),
17972
19087
  type: "textToImageHuggingFace",
17973
19088
  data: {
17974
19089
  model: "",
@@ -18109,7 +19224,7 @@ var TextToImageHuggingFaceNodeImpl = {
18109
19224
  ];
18110
19225
  },
18111
19226
  getBody(data) {
18112
- return import_ts_dedent78.dedent`
19227
+ return import_ts_dedent79.dedent`
18113
19228
  Model: ${data.useModelInput ? "(Using Input)" : data.model}
18114
19229
  `;
18115
19230
  },
@@ -18123,18 +19238,21 @@ var TextToImageHuggingFaceNodeImpl = {
18123
19238
  const negativePrompt = getInputOrData(data, inputData, "negativePrompt") || void 0;
18124
19239
  const guidanceScale = getInputOrData(data, inputData, "guidanceScale", "number");
18125
19240
  const numInferenceSteps = getInputOrData(data, inputData, "numInferenceSteps", "number");
18126
- const hf = endpoint ? new import_inference2.HfInferenceEndpoint(endpoint, accessToken) : new import_inference2.HfInference(accessToken);
18127
- const image = await hf.textToImage({
18128
- inputs: prompt,
18129
- model,
18130
- parameters: {
18131
- width,
18132
- height,
18133
- negative_prompt: negativePrompt,
18134
- guidance_scale: guidanceScale,
18135
- num_inference_steps: numInferenceSteps
18136
- }
18137
- });
19241
+ const hf = endpoint ? new import_inference2.InferenceClient(accessToken, { endpointUrl: endpoint }) : new import_inference2.InferenceClient(accessToken);
19242
+ const image = await hf.textToImage(
19243
+ {
19244
+ inputs: prompt,
19245
+ model,
19246
+ parameters: {
19247
+ width,
19248
+ height,
19249
+ negative_prompt: negativePrompt,
19250
+ guidance_scale: guidanceScale,
19251
+ num_inference_steps: numInferenceSteps
19252
+ }
19253
+ },
19254
+ { outputType: "blob" }
19255
+ );
18138
19256
  return {
18139
19257
  ["output"]: {
18140
19258
  type: "image",
@@ -18285,7 +19403,7 @@ var pinecone_default = pineconePlugin;
18285
19403
 
18286
19404
  // src/plugins/gentrace/plugin.ts
18287
19405
  var import_core = require("@gentrace/core");
18288
- var import_lodash_es18 = require("lodash");
19406
+ var import_lodash_es19 = require("lodash");
18289
19407
  var apiKeyConfigSpec = {
18290
19408
  type: "secret",
18291
19409
  label: "Gentrace API Key",
@@ -20844,7 +21962,7 @@ ${additional.join("\n")}`;
20844
21962
  var runThreadNode = pluginNodeDefinition(RunThreadNodeImpl, "Run Thread");
20845
21963
 
20846
21964
  // src/plugins/openai/nodes/ThreadMessageNode.ts
20847
- var import_lodash_es19 = require("lodash");
21965
+ var import_lodash_es20 = require("lodash");
20848
21966
  var ThreadMessageNodeImpl = {
20849
21967
  create() {
20850
21968
  return {
@@ -20969,7 +22087,7 @@ var ThreadMessageNodeImpl = {
20969
22087
  if (data.useMetadataInput && inputData["metadata"]) {
20970
22088
  metadata = coerceTypeOptional(inputData["metadata"], "object");
20971
22089
  }
20972
- const inputMap = (0, import_lodash_es19.mapValues)(inputData, (input) => coerceType(input, "string"));
22090
+ const inputMap = (0, import_lodash_es20.mapValues)(inputData, (input) => coerceType(input, "string"));
20973
22091
  const interpolated = interpolate(text, inputMap);
20974
22092
  return {
20975
22093
  ["message"]: {
@@ -21017,7 +22135,7 @@ var openAIPlugin = {
21017
22135
  };
21018
22136
 
21019
22137
  // src/plugins/google/google.ts
21020
- var import_generative_ai = require("@google/generative-ai");
22138
+ var import_genai = require("@google/genai");
21021
22139
  var googleModelsDeprecated = {
21022
22140
  "gemini-pro": {
21023
22141
  maxTokens: 32760,
@@ -21037,12 +22155,20 @@ var googleModelsDeprecated = {
21037
22155
  }
21038
22156
  };
21039
22157
  var generativeAiGoogleModels = {
21040
- "gemini-2.0-flash-001": {
22158
+ "gemini-2.5-flash-preview-04-17": {
21041
22159
  maxTokens: 1048576,
21042
22160
  cost: {
21043
22161
  prompt: 0.15 / 1e3,
21044
22162
  completion: 0.6 / 1e3
21045
22163
  },
22164
+ displayName: "Gemini 2.5 Flash Preview"
22165
+ },
22166
+ "gemini-2.0-flash-001": {
22167
+ maxTokens: 1048576,
22168
+ cost: {
22169
+ prompt: 0.1 / 1e3,
22170
+ completion: 0.4 / 1e3
22171
+ },
21046
22172
  displayName: "Gemini 2.0 Flash"
21047
22173
  },
21048
22174
  "gemini-2.0-pro-exp-02-05": {
@@ -21142,36 +22268,42 @@ async function* streamGenerativeAi({
21142
22268
  topP,
21143
22269
  topK,
21144
22270
  signal,
21145
- tools
22271
+ tools,
22272
+ thinkingBudget,
22273
+ additionalHeaders
21146
22274
  }) {
21147
22275
  var _a, _b, _c, _d, _e;
21148
- const { GoogleGenerativeAI } = await import("@google/generative-ai");
21149
- const genAi = new GoogleGenerativeAI(apiKey);
21150
- const genaiModel = genAi.getGenerativeModel({
22276
+ const { GoogleGenAI } = await import("@google/genai");
22277
+ const genAi = new GoogleGenAI({ apiKey });
22278
+ const result = await genAi.models.generateContentStream({
21151
22279
  model,
21152
- systemInstruction: systemPrompt,
21153
- generationConfig: {
22280
+ contents: prompt,
22281
+ config: {
22282
+ systemInstruction: systemPrompt,
21154
22283
  maxOutputTokens,
21155
22284
  temperature,
21156
22285
  topP,
21157
- topK
21158
- },
21159
- tools
22286
+ topK,
22287
+ tools,
22288
+ abortSignal: signal,
22289
+ thinkingConfig: {
22290
+ thinkingBudget
22291
+ },
22292
+ httpOptions: {
22293
+ headers: {
22294
+ ...additionalHeaders
22295
+ }
22296
+ }
22297
+ }
21160
22298
  });
21161
- const result = await genaiModel.generateContentStream(
21162
- {
21163
- contents: prompt
21164
- },
21165
- { signal }
21166
- );
21167
- for await (const chunk of result.stream) {
22299
+ for await (const chunk of result) {
21168
22300
  const outChunk = {
21169
22301
  completion: void 0,
21170
22302
  finish_reason: void 0,
21171
22303
  function_calls: void 0,
21172
22304
  model
21173
22305
  };
21174
- const functionCalls = chunk.functionCalls();
22306
+ const functionCalls = chunk.functionCalls;
21175
22307
  if (functionCalls) {
21176
22308
  outChunk.function_calls = functionCalls;
21177
22309
  }
@@ -21232,26 +22364,26 @@ async function* streamChatCompletions3({
21232
22364
  }
21233
22365
 
21234
22366
  // src/plugins/google/nodes/ChatGoogleNode.ts
21235
- var import_non_secure87 = require("nanoid/non-secure");
21236
- var import_ts_dedent79 = require("ts-dedent");
22367
+ var import_non_secure89 = require("nanoid/non-secure");
22368
+ var import_ts_dedent80 = require("ts-dedent");
21237
22369
  var import_p_retry3 = __toESM(require("p-retry-4"), 1);
21238
22370
  var import_ts_pattern12 = require("ts-pattern");
21239
- var import_generative_ai2 = require("@google/generative-ai");
21240
- var import_lodash_es20 = require("lodash");
22371
+ var import_genai2 = require("@google/genai");
22372
+ var import_lodash_es21 = require("lodash");
21241
22373
  var cache3 = /* @__PURE__ */ new Map();
21242
22374
  var ChatGoogleNodeImpl = {
21243
22375
  create() {
21244
22376
  const chartNode = {
21245
22377
  type: "chatGoogle",
21246
22378
  title: "Chat (Google)",
21247
- id: (0, import_non_secure87.nanoid)(),
22379
+ id: (0, import_non_secure89.nanoid)(),
21248
22380
  visualData: {
21249
22381
  x: 0,
21250
22382
  y: 0,
21251
22383
  width: 275
21252
22384
  },
21253
22385
  data: {
21254
- model: "gemini-2.0-flash-001",
22386
+ model: "gemini-2.5-flash-preview-04-17",
21255
22387
  useModelInput: false,
21256
22388
  temperature: 0.5,
21257
22389
  useTemperatureInput: false,
@@ -21265,7 +22397,9 @@ var ChatGoogleNodeImpl = {
21265
22397
  useMaxTokensInput: false,
21266
22398
  cache: false,
21267
22399
  useAsGraphPartialOutput: true,
21268
- useToolCalling: false
22400
+ useToolCalling: false,
22401
+ thinkingBudget: void 0,
22402
+ useThinkingBudgetInput: false
21269
22403
  }
21270
22404
  };
21271
22405
  return chartNode;
@@ -21323,11 +22457,27 @@ var ChatGoogleNodeImpl = {
21323
22457
  description: "Tools available for the model to call."
21324
22458
  });
21325
22459
  }
22460
+ if (data.useThinkingBudgetInput) {
22461
+ inputs.push({
22462
+ dataType: "number",
22463
+ id: "thinkingBudget",
22464
+ title: "Thinking Budget",
22465
+ description: "The token budget for the model to think before responding."
22466
+ });
22467
+ }
21326
22468
  inputs.push({
21327
22469
  dataType: ["chat-message", "chat-message[]"],
21328
22470
  id: "prompt",
21329
22471
  title: "Prompt"
21330
22472
  });
22473
+ if (data.useHeadersInput) {
22474
+ inputs.push({
22475
+ dataType: "object",
22476
+ id: "headers",
22477
+ title: "Headers",
22478
+ description: "Additional headers to send to the API."
22479
+ });
22480
+ }
21331
22481
  return inputs;
21332
22482
  },
21333
22483
  getOutputDefinitions(data) {
@@ -21361,10 +22511,11 @@ var ChatGoogleNodeImpl = {
21361
22511
  },
21362
22512
  getBody(data) {
21363
22513
  var _a;
21364
- return import_ts_dedent79.dedent`
22514
+ return import_ts_dedent80.dedent`
21365
22515
  ${((_a = generativeAiGoogleModels[data.model]) == null ? void 0 : _a.displayName) ?? `Google (${data.model})`}
21366
22516
  ${data.useTopP ? `Top P: ${data.useTopPInput ? "(Using Input)" : data.top_p}` : `Temperature: ${data.useTemperatureInput ? "(Using Input)" : data.temperature}`}
21367
22517
  Max Tokens: ${data.maxTokens}
22518
+ Thinking Budget: ${data.thinkingBudget ?? "Automatic"}
21368
22519
  `;
21369
22520
  },
21370
22521
  getEditors() {
@@ -21409,6 +22560,17 @@ var ChatGoogleNodeImpl = {
21409
22560
  max: Number.MAX_SAFE_INTEGER,
21410
22561
  step: 1
21411
22562
  },
22563
+ {
22564
+ type: "number",
22565
+ label: "Thinking Budget",
22566
+ dataKey: "thinkingBudget",
22567
+ allowEmpty: true,
22568
+ step: 1,
22569
+ min: 0,
22570
+ max: Number.MAX_SAFE_INTEGER,
22571
+ useInputToggleDataKey: "useThinkingBudgetInput",
22572
+ helperMessage: "The token budget for the model to think before responding. Leave blank for automatic budget."
22573
+ },
21412
22574
  {
21413
22575
  type: "toggle",
21414
22576
  label: "Enable Tool Calling",
@@ -21423,12 +22585,20 @@ var ChatGoogleNodeImpl = {
21423
22585
  type: "toggle",
21424
22586
  label: "Use for subgraph partial output",
21425
22587
  dataKey: "useAsGraphPartialOutput"
22588
+ },
22589
+ {
22590
+ type: "keyValuePair",
22591
+ label: "Headers",
22592
+ dataKey: "headers",
22593
+ useInputToggleDataKey: "useHeadersInput",
22594
+ keyPlaceholder: "Header",
22595
+ helperMessage: "Additional headers to send to the API."
21426
22596
  }
21427
22597
  ];
21428
22598
  },
21429
22599
  getUIData() {
21430
22600
  return {
21431
- infoBoxBody: import_ts_dedent79.dedent`
22601
+ infoBoxBody: import_ts_dedent80.dedent`
21432
22602
  Makes a call to an Google chat model. The settings contains many options for tweaking the model's behavior.
21433
22603
  `,
21434
22604
  infoBoxTitle: "Chat (Google) Node",
@@ -21444,6 +22614,7 @@ var ChatGoogleNodeImpl = {
21444
22614
  const temperature = getInputOrData(data, inputs, "temperature", "number");
21445
22615
  const topP = getInputOrData(data, inputs, "top_p", "number");
21446
22616
  const useTopP = getInputOrData(data, inputs, "useTopP", "boolean");
22617
+ const thinkingBudget = getInputOrData(data, inputs, "thinkingBudget", "number");
21447
22618
  const { messages } = getChatGoogleNodeMessages(inputs);
21448
22619
  let prompt = await Promise.all(
21449
22620
  messages.map(async (message) => {
@@ -21542,8 +22713,8 @@ var ChatGoogleNodeImpl = {
21542
22713
  name: tool.name,
21543
22714
  description: tool.description,
21544
22715
  parameters: Object.keys(tool.parameters.properties).length === 0 ? void 0 : {
21545
- type: import_generative_ai2.SchemaType.OBJECT,
21546
- properties: (0, import_lodash_es20.mapValues)(tool.parameters.properties, (p) => ({
22716
+ type: import_genai2.Type.OBJECT,
22717
+ properties: (0, import_lodash_es21.mapValues)(tool.parameters.properties, (p) => ({
21547
22718
  // gemini doesn't support union property types, it uses openapi style not jsonschema, what a mess
21548
22719
  type: Array.isArray(p.type) ? p.type.filter((t) => t !== "null")[0] : p.type,
21549
22720
  description: p.description
@@ -21567,6 +22738,18 @@ var ChatGoogleNodeImpl = {
21567
22738
  throw new Error("Google Application Credentials or Google API Key is not defined.");
21568
22739
  }
21569
22740
  }
22741
+ const headersFromData = (data.headers ?? []).reduce(
22742
+ (acc, header) => {
22743
+ acc[header.key] = header.value;
22744
+ return acc;
22745
+ },
22746
+ {}
22747
+ );
22748
+ const additionalHeaders = data.useHeadersInput ? coerceTypeOptional(inputs["headers"], "object") ?? headersFromData : headersFromData;
22749
+ const allAdditionalHeaders = cleanHeaders({
22750
+ ...context.settings.chatNodeHeaders,
22751
+ ...additionalHeaders
22752
+ });
21570
22753
  try {
21571
22754
  return await (0, import_p_retry3.default)(
21572
22755
  async () => {
@@ -21579,7 +22762,9 @@ var ChatGoogleNodeImpl = {
21579
22762
  maxOutputTokens: maxTokens,
21580
22763
  systemPrompt,
21581
22764
  topK: void 0,
21582
- tools
22765
+ tools,
22766
+ thinkingBudget,
22767
+ additionalHeaders: allAdditionalHeaders
21583
22768
  };
21584
22769
  const cacheKey = JSON.stringify(options2);
21585
22770
  if (data.cache) {
@@ -21604,7 +22789,9 @@ var ChatGoogleNodeImpl = {
21604
22789
  topK: void 0,
21605
22790
  apiKey,
21606
22791
  systemPrompt,
21607
- tools
22792
+ tools,
22793
+ thinkingBudget,
22794
+ additionalHeaders: allAdditionalHeaders
21608
22795
  });
21609
22796
  } else {
21610
22797
  chunks = streamChatCompletions3({
@@ -21622,6 +22809,15 @@ var ChatGoogleNodeImpl = {
21622
22809
  }
21623
22810
  const responseParts = [];
21624
22811
  const functionCalls = [];
22812
+ let throttleLastCalledTime = Date.now();
22813
+ const onPartialOutput = (output2) => {
22814
+ var _a2;
22815
+ const now = Date.now();
22816
+ if (now - throttleLastCalledTime > (context.settings.throttleChatNode ?? 100)) {
22817
+ (_a2 = context.onPartialOutputs) == null ? void 0 : _a2.call(context, output2);
22818
+ throttleLastCalledTime = now;
22819
+ }
22820
+ };
21625
22821
  for await (const chunk of chunks) {
21626
22822
  if (chunk.completion) {
21627
22823
  responseParts.push(chunk.completion);
@@ -21641,8 +22837,9 @@ var ChatGoogleNodeImpl = {
21641
22837
  }))
21642
22838
  };
21643
22839
  }
21644
- (_a = context.onPartialOutputs) == null ? void 0 : _a.call(context, output);
22840
+ onPartialOutput == null ? void 0 : onPartialOutput(output);
21645
22841
  }
22842
+ (_a = context.onPartialOutputs) == null ? void 0 : _a.call(context, output);
21646
22843
  const endTime = Date.now();
21647
22844
  output["all-messages"] = {
21648
22845
  type: "chat-message[]",
@@ -21794,7 +22991,7 @@ var plugins = {
21794
22991
  };
21795
22992
 
21796
22993
  // src/integrations/DatasetProvider.ts
21797
- var import_lodash_es21 = require("lodash");
22994
+ var import_lodash_es22 = require("lodash");
21798
22995
  var InMemoryDatasetProvider = class {
21799
22996
  #datasets;
21800
22997
  constructor(datasets) {
@@ -21874,7 +23071,7 @@ var InMemoryDatasetProvider = class {
21874
23071
  return sorted.slice(0, k).map((r) => ({ ...r.row, distance: r.similarity }));
21875
23072
  }
21876
23073
  async exportDatasetsForProject(_projectId) {
21877
- return (0, import_lodash_es21.cloneDeep)(this.#datasets);
23074
+ return (0, import_lodash_es22.cloneDeep)(this.#datasets);
21878
23075
  }
21879
23076
  };
21880
23077
  var dotProductSimilarity = (a, b) => {
@@ -21889,11 +23086,13 @@ function nodeMatches(spec, event) {
21889
23086
  async function* getProcessorEvents(processor, spec) {
21890
23087
  const previousIndexes = /* @__PURE__ */ new Map();
21891
23088
  const usages = [];
23089
+ let hasDelta = false;
21892
23090
  for await (const event of processor.events()) {
21893
23091
  if (event.type === "partialOutput") {
21894
23092
  if (spec.partialOutputs === true || nodeMatches(spec.partialOutputs, event)) {
21895
23093
  const currentOutput = coerceType(event.outputs["response"], "string");
21896
23094
  const delta = currentOutput.slice(previousIndexes.get(event.node.id) ?? 0);
23095
+ hasDelta = true;
21897
23096
  yield {
21898
23097
  type: "partialOutput",
21899
23098
  nodeId: event.node.id,
@@ -21949,7 +23148,7 @@ async function* getProcessorEvents(processor, spec) {
21949
23148
  usages.push(usage);
21950
23149
  }
21951
23150
  }
21952
- if (spec.nodeFinish === true || nodeMatches(spec.nodeFinish, event)) {
23151
+ if ((spec.nodeFinish === true || nodeMatches(spec.nodeFinish, event)) && !(spec.removeFinalOutput && hasDelta)) {
21953
23152
  yield {
21954
23153
  type: "nodeFinish",
21955
23154
  outputs: event.outputs,
@@ -21960,6 +23159,23 @@ async function* getProcessorEvents(processor, spec) {
21960
23159
  }
21961
23160
  }
21962
23161
  }
23162
+ var createOnStreamUserEvents = (eventList, handleUserEvent) => {
23163
+ if (!(eventList == null ? void 0 : eventList.trim())) {
23164
+ return void 0;
23165
+ }
23166
+ const events = eventList.split(",").map((e) => e.trim()).filter(Boolean);
23167
+ if (!events.length) {
23168
+ return void 0;
23169
+ }
23170
+ return Object.fromEntries(
23171
+ events.map((event) => [
23172
+ event,
23173
+ async (data) => {
23174
+ await handleUserEvent(event, data);
23175
+ }
23176
+ ])
23177
+ );
23178
+ };
21963
23179
  function getProcessorSSEStream(processor, spec) {
21964
23180
  const encoder = new TextEncoder();
21965
23181
  function sendEvent(controller, type, data) {
@@ -21971,6 +23187,22 @@ data: ${JSON.stringify(data)}
21971
23187
  }
21972
23188
  return new ReadableStream({
21973
23189
  async start(controller) {
23190
+ const userEventHandler = async (eventName, data) => {
23191
+ const graphEvent = {
23192
+ type: "event",
23193
+ graphEvent: {
23194
+ name: eventName,
23195
+ message: coerceType(data, "string")
23196
+ }
23197
+ };
23198
+ sendEvent(controller, "event", graphEvent);
23199
+ };
23200
+ const streamEvents = createOnStreamUserEvents(spec.userStreamEvents, userEventHandler);
23201
+ if (streamEvents) {
23202
+ for (const [name, fn] of Object.entries(streamEvents)) {
23203
+ processor.onUserEvent(name, fn);
23204
+ }
23205
+ }
21974
23206
  try {
21975
23207
  for await (const event of getProcessorEvents(processor, spec)) {
21976
23208
  sendEvent(controller, event.type, event);
@@ -21997,6 +23229,21 @@ function getSingleNodeStream(processor, arg) {
21997
23229
  async start(controller) {
21998
23230
  var _a;
21999
23231
  try {
23232
+ const userEventHandler = async (eventName, data) => {
23233
+ const payload = {
23234
+ name: eventName,
23235
+ message: coerceType(data, "string")
23236
+ };
23237
+ controller.enqueue(`event: ${JSON.stringify(payload)}
23238
+
23239
+ `);
23240
+ };
23241
+ const streamEvents = createOnStreamUserEvents(spec.userStreamEvents, userEventHandler);
23242
+ if (streamEvents) {
23243
+ for (const [name, fn] of Object.entries(streamEvents)) {
23244
+ processor.onUserEvent(name, fn);
23245
+ }
23246
+ }
22000
23247
  for await (const event of getProcessorEvents(processor, spec)) {
22001
23248
  if (event.type === "partialOutput") {
22002
23249
  controller.enqueue(`data: ${JSON.stringify(event.delta)}
@@ -22125,6 +23372,7 @@ function coreCreateProcessor(project, options2) {
22125
23372
  nativeApi: options2.nativeApi,
22126
23373
  datasetProvider: options2.datasetProvider,
22127
23374
  audioProvider: options2.audioProvider,
23375
+ mcpProvider: options2.mcpProvider,
22128
23376
  codeRunner: options2.codeRunner,
22129
23377
  projectPath: options2.projectPath,
22130
23378
  projectReferenceLoader: options2.projectReferenceLoader,
@@ -22223,6 +23471,10 @@ var Rivet = void 0;
22223
23471
  LoadDatasetNodeImpl,
22224
23472
  LoopControllerNodeImpl,
22225
23473
  LoopUntilNodeImpl,
23474
+ MCPError,
23475
+ MCPErrorType,
23476
+ MCPGetPromptNodeImpl,
23477
+ MCPToolCallNodeImpl,
22226
23478
  MatchNodeImpl,
22227
23479
  NodeImpl,
22228
23480
  NodeRegistration,
@@ -22295,6 +23547,7 @@ var Rivet = void 0;
22295
23547
  coreCreateProcessor,
22296
23548
  coreRunGraph,
22297
23549
  createDatasetNode,
23550
+ createOnStreamUserEvents,
22298
23551
  cronNode,
22299
23552
  dataTypeDisplayNames,
22300
23553
  dataTypes,
@@ -22372,6 +23625,9 @@ var Rivet = void 0;
22372
23625
  looseDataValueToDataValue,
22373
23626
  looseDataValuesToDataValues,
22374
23627
  matchNode,
23628
+ mcpDiscoveryNode,
23629
+ mcpGetPromptNode,
23630
+ mcpToolCallNode,
22375
23631
  newId,
22376
23632
  nodeDefinition,
22377
23633
  numberNode,