@atomoz/workflows-nodes 0.1.16 → 0.1.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -60,7 +60,9 @@ var HttpGetInputNode = {
60
60
 
61
61
  // src/nodes/inputs/http/get/function.ts
62
62
  var HttpGetInputNodeFunction = async (params) => {
63
- const { request, fieldValues } = params;
63
+ const { $field: _$field, $req, $inputs: _$inputs, $vars: _$vars } = params;
64
+ const request = $req || params.request;
65
+ const fieldValues = params.fieldValues || {};
64
66
  const { queryParams: configQueryParams, headers: configHeaders } = fieldValues || {};
65
67
  const actualData = {
66
68
  queryParams: request?.query || {},
@@ -187,7 +189,9 @@ var HttpPostInputNode = {
187
189
 
188
190
  // src/nodes/inputs/http/post/function.ts
189
191
  var HttpPostInputNodeFunction = (params) => {
190
- const { request, fieldValues } = params;
192
+ const { $field: _$field, $req, $inputs: _$inputs, $vars: _$vars } = params;
193
+ const request = $req || params.request;
194
+ const fieldValues = params.fieldValues || {};
191
195
  const { queryParams: configQueryParams, headers: configHeaders, body: configBody } = fieldValues || {};
192
196
  if (request.method !== "POST") {
193
197
  throw new Error("M\xE9todo HTTP inv\xE1lido");
@@ -233,7 +237,9 @@ var HttpPostInputNodeSchema = z3.object({
233
237
 
234
238
  // src/nodes/inputs/chat/chat.ts
235
239
  var ChatInputNodeFunction = (params) => {
236
- const { request, fieldValues } = params;
240
+ const { $field: _$field, $req, $inputs: _$inputs, $vars: _$vars } = params;
241
+ const request = $req || params.request;
242
+ const fieldValues = params.fieldValues || {};
237
243
  const { message: configMessage, chatId: configChatId } = fieldValues || {};
238
244
  const actualData = {
239
245
  message: request?.message ?? configMessage ?? "",
@@ -286,7 +292,8 @@ var ChatInputNode = {
286
292
 
287
293
  // src/nodes/inputs/manual/trigger.ts
288
294
  var ManualTriggerNodeFunction = (params) => {
289
- const { fieldValues } = params || {};
295
+ const { $field: _$field, $req: _$req, $inputs: _$inputs, $vars: _$vars } = params;
296
+ const fieldValues = params.fieldValues || {};
290
297
  return fieldValues || {};
291
298
  };
292
299
  var ManualTriggerNode = {
@@ -323,11 +330,84 @@ var ManualTriggerNode = {
323
330
  ]
324
331
  };
325
332
 
333
+ // src/nodes/inputs/cron/trigger.ts
334
+ var CronTriggerNodeFunction = (params) => {
335
+ const { $req } = params;
336
+ const triggerData = $req || {};
337
+ const fieldValues = params.fieldValues || {};
338
+ return {
339
+ triggeredAt: triggerData.triggeredAt || (/* @__PURE__ */ new Date()).toISOString(),
340
+ scheduleId: triggerData.scheduleId || null,
341
+ cronExpression: fieldValues.cronExpression || null,
342
+ timezone: fieldValues.timezone || "America/Sao_Paulo",
343
+ success: true
344
+ };
345
+ };
346
+ var CronTriggerNode = {
347
+ label: "Cron Trigger",
348
+ type: "CronTrigger",
349
+ category: "input",
350
+ icon: "\u23F0",
351
+ description: "Dispara fluxo automaticamente baseado em express\xE3o cron",
352
+ tags: {
353
+ execution: "async",
354
+ group: "Custom"
355
+ },
356
+ fields: [
357
+ {
358
+ id: "cronExpression",
359
+ label: "Express\xE3o Cron",
360
+ type: "string",
361
+ required: true,
362
+ placeholder: "0 9 * * * (todos os dias \xE0s 9h)"
363
+ },
364
+ {
365
+ id: "timezone",
366
+ label: "Timezone",
367
+ type: "select",
368
+ defaultValue: "America/Sao_Paulo",
369
+ options: [
370
+ { label: "S\xE3o Paulo (GMT-3)", value: "America/Sao_Paulo" },
371
+ { label: "UTC", value: "UTC" },
372
+ { label: "New York (GMT-5)", value: "America/New_York" },
373
+ { label: "Los Angeles (GMT-8)", value: "America/Los_Angeles" },
374
+ { label: "London (GMT+0)", value: "Europe/London" }
375
+ ]
376
+ },
377
+ {
378
+ id: "triggeredAt",
379
+ label: "Triggered At",
380
+ type: "string",
381
+ typeable: false,
382
+ handle: {
383
+ type: "output",
384
+ label: "Triggered At",
385
+ name: "triggeredAt",
386
+ fieldType: "string"
387
+ }
388
+ },
389
+ {
390
+ id: "scheduleId",
391
+ label: "Schedule ID",
392
+ type: "string",
393
+ typeable: false,
394
+ handle: {
395
+ type: "output",
396
+ label: "Schedule ID",
397
+ name: "scheduleId",
398
+ fieldType: "string"
399
+ }
400
+ }
401
+ ]
402
+ };
403
+
326
404
  // src/nodes/processors/concat.ts
327
405
  import { z as z4 } from "zod";
328
- var ConcatNodeFunction = (inputs) => {
329
- const input1 = inputs.input1 || "";
330
- const input2 = inputs.input2 || "";
406
+ var ConcatNodeFunction = (params) => {
407
+ const { $field: _$field, $req: _$req, $inputs: _$inputs, $vars: _$vars } = params;
408
+ const inputs_resolved = params.inputs || params || {};
409
+ const input1 = inputs_resolved.input1 || "";
410
+ const input2 = inputs_resolved.input2 || "";
331
411
  const result = `${input1}${input2}`;
332
412
  return {
333
413
  output: result,
@@ -507,8 +587,12 @@ var OutputNode = {
507
587
 
508
588
  // src/nodes/outputs/chat/output.ts
509
589
  var ChatOutputNodeFunction = async (params) => {
510
- const { inputs, fieldValues, stream, emitter } = params || {};
511
- const content = inputs?.data ?? fieldValues?.data ?? "";
590
+ const { $field: _$field, $req: _$req, $inputs: _$inputs, $vars: _$vars } = params;
591
+ const inputs_resolved = params.inputs || {};
592
+ const fieldValues = params.fieldValues || {};
593
+ const stream = params.stream;
594
+ const emitter = params.emitter;
595
+ const content = inputs_resolved?.data ?? fieldValues?.data ?? "";
512
596
  if (stream && emitter) {
513
597
  try {
514
598
  emitter.emitDone({ content });
@@ -670,7 +754,7 @@ import { createReactAgent } from "@langchain/langgraph/prebuilt";
670
754
  import { SystemMessage } from "@langchain/core/messages";
671
755
 
672
756
  // src/utils/llm-factory.ts
673
- import { GraphQLClient, gql } from "graphql-request";
757
+ import { gql } from "graphql-request";
674
758
  import { ChatGoogle } from "@langchain/google-gauth";
675
759
  import { ChatOpenAI } from "@langchain/openai";
676
760
  var GRAPHQL_ENDPOINT = process.env["GRAPHQL_URL"] || "http://localhost:3001/graphql";
@@ -695,44 +779,12 @@ async function createLLMFromModel(modelConfig, authToken, streaming = false) {
695
779
  throw new Error('Model config deve conter "model" e "integrationId"');
696
780
  }
697
781
  const { model, integrationId } = modelConfig;
698
- const client = new GraphQLClient(GRAPHQL_ENDPOINT, {
699
- headers: {
700
- Authorization: `Bearer ${authToken}`,
701
- "x-tenant-id": "65d62c52-0c09-473a-8895-359afbed3f5a"
702
- }
703
- });
704
- let integrationData;
705
- try {
706
- const response = await client.request(
707
- GET_INTEGRATIONS_QUERY,
708
- {
709
- where: {
710
- id: {
711
- eq: integrationId
712
- }
713
- }
714
- }
715
- );
716
- if (!response.getIntegrations?.data?.[0]) {
717
- throw new Error(`Integra\xE7\xE3o ${integrationId} n\xE3o encontrada`);
718
- }
719
- integrationData = response.getIntegrations.data[0];
720
- } catch (error) {
721
- console.error("Erro ao buscar integra\xE7\xE3o:", error);
722
- throw new Error(
723
- `Falha ao buscar integra\xE7\xE3o: ${error instanceof Error ? error.message : "Erro desconhecido"}`
724
- );
725
- }
726
- const apiKey = integrationData.data?.["token"] || integrationData.data?.["token"];
727
- if (!apiKey) {
728
- throw new Error(`API Key n\xE3o encontrada na integra\xE7\xE3o ${integrationId}`);
729
- }
730
- const provider = integrationData.data?.provider?.toLowerCase() || inferProviderFromModel(model);
782
+ const provider = "gemini";
731
783
  switch (provider) {
732
784
  case "gemini":
733
785
  return new ChatGoogle({
734
786
  model: "gemini-flash-latest",
735
- apiKey,
787
+ apiKey: "AIzaSyAWS9GhesWxG4uTdJRQbBziMB1diXtXtlI",
736
788
  streaming
737
789
  });
738
790
  case "openai":
@@ -756,21 +808,14 @@ async function createLLMFromModel(modelConfig, authToken, streaming = false) {
756
808
  );
757
809
  }
758
810
  }
759
- function inferProviderFromModel(model) {
760
- const modelLower = model.toLowerCase();
761
- if (modelLower.includes("gemini") || modelLower.includes("palm")) {
762
- return "gemini";
763
- }
764
- if (modelLower.includes("gpt") || modelLower.includes("o1") || modelLower.includes("o3")) {
765
- return "openai";
766
- }
767
- return "openrouter";
768
- }
769
811
 
770
812
  // src/nodes/ia/agent/function.ts
771
813
  var IaAgentNodeFunction = async (inputs) => {
772
- const { model, tools, systemMessage, name, message } = inputs.fieldValues;
814
+ const { $field: _$field, $req: _$req, $inputs: _$inputs, $vars: _$vars } = inputs;
815
+ const { model, tools, systemMessage, name, message } = inputs.fieldValues || {};
773
816
  const authToken = inputs.authToken;
817
+ const stream = Boolean(inputs?.stream);
818
+ const emitter = inputs?.emitter;
774
819
  if (!name) {
775
820
  throw new Error("Agent 'name' is required. Please provide a unique name for the agent in the node properties.");
776
821
  }
@@ -792,8 +837,7 @@ IMPORTANT: You must base your response on the last message in the conversation h
792
837
  if (!authToken) {
793
838
  throw new Error("Auth token is required to instantiate LLM from integration");
794
839
  }
795
- const streaming = Boolean(inputs?.stream);
796
- llmInstance = await createLLMFromModel(model, authToken, streaming);
840
+ llmInstance = await createLLMFromModel(model, authToken, stream);
797
841
  } else if (typeof model?.bindTools === "function") {
798
842
  llmInstance = model;
799
843
  } else {
@@ -809,22 +853,70 @@ IMPORTANT: You must base your response on the last message in the conversation h
809
853
  if (message) {
810
854
  try {
811
855
  const { HumanMessage: HumanMessage2 } = await import("@langchain/core/messages");
812
- const result = await agent.invoke({
813
- messages: [new HumanMessage2(message)]
814
- });
815
- if (result?.messages && result.messages.length > 0) {
816
- const lastMessage = result.messages[result.messages.length - 1];
817
- const content = lastMessage?.content;
818
- if (typeof content === "string") {
819
- output = content;
820
- } else if (Array.isArray(content)) {
821
- output = content.map((part) => {
822
- if (typeof part === "string") return part;
823
- if (part?.type === "text") return part.text;
824
- return "";
825
- }).filter(Boolean).join("\n");
826
- } else {
827
- output = "";
856
+ if (stream && emitter) {
857
+ const streamIterator = await agent.stream({
858
+ messages: [new HumanMessage2(message)]
859
+ });
860
+ let lastMessages = [];
861
+ const sentContents = /* @__PURE__ */ new Set();
862
+ for await (const step of streamIterator) {
863
+ if (step && typeof step === "object") {
864
+ for (const [key, value] of Object.entries(step)) {
865
+ if (value && typeof value === "object" && "messages" in value) {
866
+ const messages = value.messages;
867
+ if (Array.isArray(messages)) {
868
+ lastMessages = messages;
869
+ for (const msg of messages) {
870
+ const content = msg?.content;
871
+ const contentStr = typeof content === "string" ? content : Array.isArray(content) ? content.map((p) => p.type === "text" ? p.text : "").filter(Boolean).join("") : "";
872
+ if (contentStr && !sentContents.has(contentStr)) {
873
+ sentContents.add(contentStr);
874
+ if (emitter?.emitDelta) {
875
+ emitter.emitDelta({
876
+ content: contentStr,
877
+ actor: name,
878
+ isAgent: true,
879
+ isTool: false
880
+ });
881
+ }
882
+ }
883
+ }
884
+ }
885
+ }
886
+ }
887
+ }
888
+ }
889
+ if (lastMessages.length > 0) {
890
+ const lastMessage = lastMessages[lastMessages.length - 1];
891
+ const content = lastMessage?.content;
892
+ if (typeof content === "string") {
893
+ output = content;
894
+ } else if (Array.isArray(content)) {
895
+ output = content.map((part) => {
896
+ if (typeof part === "string") return part;
897
+ if (part?.type === "text") return part.text;
898
+ return "";
899
+ }).filter(Boolean).join("\n");
900
+ }
901
+ }
902
+ } else {
903
+ const result = await agent.invoke({
904
+ messages: [new HumanMessage2(message)]
905
+ });
906
+ if (result?.messages && result.messages.length > 0) {
907
+ const lastMessage = result.messages[result.messages.length - 1];
908
+ const content = lastMessage?.content;
909
+ if (typeof content === "string") {
910
+ output = content;
911
+ } else if (Array.isArray(content)) {
912
+ output = content.map((part) => {
913
+ if (typeof part === "string") return part;
914
+ if (part?.type === "text") return part.text;
915
+ return "";
916
+ }).filter(Boolean).join("\n");
917
+ } else {
918
+ output = "";
919
+ }
828
920
  }
829
921
  }
830
922
  } catch (error) {
@@ -973,8 +1065,9 @@ var extractFinalResponse = (messages) => {
973
1065
  }
974
1066
  return "No response generated.";
975
1067
  };
976
- var AiSupervisorNodeFunction = async (fieldValues) => {
977
- const outer = fieldValues ?? {};
1068
+ var AiSupervisorNodeFunction = async (params) => {
1069
+ const { $field: _$field, $req: _$req, $inputs: _$inputs, $vars: _$vars } = params;
1070
+ const outer = params ?? {};
978
1071
  const inner = outer && typeof outer === "object" && outer.fieldValues && typeof outer.fieldValues === "object" ? outer.fieldValues : {};
979
1072
  const model = inner.model ?? outer.model;
980
1073
  const agents = inner.agents ?? outer.agents;
@@ -1194,7 +1287,9 @@ var schemas = {
1194
1287
  };
1195
1288
 
1196
1289
  // src/nodes/ia/tool/function.ts
1197
- var AiToolNodeFunction = async (fieldValues) => {
1290
+ var AiToolNodeFunction = async (params) => {
1291
+ const { $field: _$field, $req: _$req, $inputs: _$inputs, $vars: _$vars } = params;
1292
+ const fieldValues = params.fieldValues || params;
1198
1293
  const { name, description, nodeFunction, nodeType, originalNodeData, workflowService, currentResults } = fieldValues;
1199
1294
  const schema = schemas[nodeType] || CustomToolSchema;
1200
1295
  const dynamicTool = tool(
@@ -1237,9 +1332,12 @@ var IaMessageNodeSchema = z8.object({
1237
1332
  message: z8.string().describe("User message to send to the LLM")
1238
1333
  });
1239
1334
  var IaMessageNodeFunction = async (inputs) => {
1335
+ const { $field: _$field, $req: _$req, $inputs: _$inputs_var, $vars: _$vars } = inputs;
1240
1336
  const fieldValues = inputs.fieldValues || inputs;
1241
1337
  const { model, systemMessage, message } = fieldValues;
1242
1338
  const authToken = inputs.authToken;
1339
+ const stream = Boolean(inputs?.stream);
1340
+ const emitter = inputs?.emitter;
1243
1341
  if (!model) {
1244
1342
  throw new Error("Model is required");
1245
1343
  }
@@ -1249,10 +1347,9 @@ var IaMessageNodeFunction = async (inputs) => {
1249
1347
  let llmInstance;
1250
1348
  if (model?.model && model?.integrationId) {
1251
1349
  if (!authToken) {
1252
- throw new Error("Auth token is required to instantiate LLM from integration2 ");
1350
+ throw new Error("Auth token is required to instantiate LLM from integration");
1253
1351
  }
1254
- const streaming = Boolean(inputs?.stream);
1255
- llmInstance = await createLLMFromModel(model, authToken, streaming);
1352
+ llmInstance = await createLLMFromModel(model, authToken, stream);
1256
1353
  } else {
1257
1354
  llmInstance = model;
1258
1355
  }
@@ -1262,6 +1359,29 @@ var IaMessageNodeFunction = async (inputs) => {
1262
1359
  }
1263
1360
  messages.push(["human", message]);
1264
1361
  try {
1362
+ if (stream && emitter) {
1363
+ let fullContent = "";
1364
+ const streamResponse = await llmInstance.stream(messages);
1365
+ for await (const chunk of streamResponse) {
1366
+ const chunkContent = typeof chunk.content === "string" ? chunk.content : chunk.content?.text || "";
1367
+ if (chunkContent) {
1368
+ fullContent += chunkContent;
1369
+ if (emitter?.emitDelta) {
1370
+ emitter.emitDelta({
1371
+ content: chunkContent,
1372
+ actor: "IaMessageNode",
1373
+ isAgent: false,
1374
+ isTool: false
1375
+ });
1376
+ }
1377
+ }
1378
+ }
1379
+ return {
1380
+ output: fullContent,
1381
+ response: fullContent,
1382
+ fullResponse: { content: fullContent }
1383
+ };
1384
+ }
1265
1385
  const response = await llmInstance.invoke(messages);
1266
1386
  return {
1267
1387
  output: response.content,
@@ -1493,19 +1613,75 @@ var WhatsappMessageTriggerNode = {
1493
1613
 
1494
1614
  // src/nodes/processors/custom-code.ts
1495
1615
  var NodeFunction = (params) => {
1496
- const { inputValue, fieldValues } = params;
1497
- const { customCode } = fieldValues;
1498
- if (!customCode) {
1499
- return inputValue;
1616
+ let input = params?.inputValue ?? params?.input;
1617
+ const context = params && params.fieldValues ? params.fieldValues : params || {};
1618
+ let customCode = context?.customCode ?? params?.customCode;
1619
+ if (input === void 0 && Array.isArray(context?.fields)) {
1620
+ const firstInputField = context.fields.find((f) => f?.handle?.type === "input");
1621
+ if (firstInputField && firstInputField.id) {
1622
+ const key = String(firstInputField.id);
1623
+ if (params && Object.prototype.hasOwnProperty.call(params, key)) input = params[key];
1624
+ else if (context && Object.prototype.hasOwnProperty.call(context, key)) input = context[key];
1625
+ else if (firstInputField.value !== void 0) input = firstInputField.value;
1626
+ }
1627
+ }
1628
+ const looksLikeCode = (code) => {
1629
+ if (typeof code !== "string") return false;
1630
+ const c = code.trim();
1631
+ if (c.length < 3) return false;
1632
+ return /(return\s+|=>|function\s*\(|;|\n|\{|\})/.test(c);
1633
+ };
1634
+ if (typeof customCode === "string" && !looksLikeCode(customCode)) {
1635
+ if (input === void 0) input = customCode;
1636
+ customCode = "";
1637
+ }
1638
+ if (!customCode || typeof customCode === "string" && customCode.trim() === "") {
1639
+ return input;
1500
1640
  }
1501
1641
  try {
1502
- const customFunction = new Function("input", "context", "request", "params", customCode);
1503
- const result = customFunction(inputValue, fieldValues, params.request, params);
1642
+ const $inputs = params?.results || {};
1643
+ const $vars = context && context.variables || params?.variables || void 0;
1644
+ const __placeholders = params?.__codePlaceholders ?? context?.__codePlaceholders;
1645
+ const customFunction = new Function("input", "context", "request", "params", "$inputs", "$vars", "__placeholders", customCode);
1646
+ const result = customFunction(input, context, params?.request, params, $inputs, $vars, __placeholders);
1504
1647
  return result;
1505
1648
  } catch (error) {
1506
1649
  throw new Error(`Erro ao executar c\xF3digo customizado: ${error instanceof Error ? error.message : "Erro desconhecido"}`);
1507
1650
  }
1508
1651
  };
1652
+ var CustomNodeFunction = (params) => {
1653
+ const context = params && params.fieldValues ? params.fieldValues : params || {};
1654
+ const customCode = context?.customCode;
1655
+ if (!customCode || typeof customCode === "string" && customCode.trim() === "") {
1656
+ throw new Error("CustomNode sem c\xF3digo configurado");
1657
+ }
1658
+ const fields = Array.isArray(context?.fields) ? context.fields : [];
1659
+ const $field = {};
1660
+ fields.forEach((field) => {
1661
+ const fieldId = field?.id;
1662
+ if (!fieldId) return;
1663
+ let value;
1664
+ if (params && Object.prototype.hasOwnProperty.call(params, fieldId)) {
1665
+ value = params[fieldId];
1666
+ } else if (field.handle && field.handle.name && params && Object.prototype.hasOwnProperty.call(params, field.handle.name)) {
1667
+ value = params[field.handle.name];
1668
+ } else if (field.value !== void 0) {
1669
+ value = field.value;
1670
+ } else if (field.defaultValue !== void 0) {
1671
+ value = field.defaultValue;
1672
+ }
1673
+ $field[fieldId] = value;
1674
+ });
1675
+ try {
1676
+ const $inputs = params?.results || {};
1677
+ const $vars = context && context.variables || params?.variables || void 0;
1678
+ const customFunction = new Function("$field", "context", "request", "params", "$inputs", "$vars", customCode);
1679
+ const result = customFunction($field, context, params?.request, params, $inputs, $vars);
1680
+ return result;
1681
+ } catch (error) {
1682
+ throw new Error(`Erro ao executar CustomNode: ${error instanceof Error ? error.message : "Erro desconhecido"}`);
1683
+ }
1684
+ };
1509
1685
  var CustomCodeNode = {
1510
1686
  label: "Custom Code",
1511
1687
  type: "CustomCodeNode",
@@ -1518,7 +1694,13 @@ var CustomCodeNode = {
1518
1694
  label: "C\xF3digo Customizado",
1519
1695
  type: "code",
1520
1696
  required: false,
1521
- placeholder: '// Seu c\xF3digo JavaScript aqui\n// Use "input" para acessar o valor de entrada\n// Use "context" para acessar os dados do n\xF3\nreturn input;',
1697
+ placeholder: '// Seu c\xF3digo JavaScript aqui\n// Use "input" para acessar o valor de entrada\n// Use "context" para acessar os dados do n\xF3\nreturn input;'
1698
+ },
1699
+ {
1700
+ id: "input",
1701
+ label: "Input",
1702
+ type: "any",
1703
+ required: false,
1522
1704
  handle: {
1523
1705
  type: "input",
1524
1706
  label: "Input",
@@ -1547,6 +1729,7 @@ var CustomCodeNode = {
1547
1729
  var nodes = [
1548
1730
  ChatInputNode,
1549
1731
  ManualTriggerNode,
1732
+ CronTriggerNode,
1550
1733
  HttpGetInputNode,
1551
1734
  // HttpPostInputNode,
1552
1735
  // TransformNode,
@@ -1665,6 +1848,7 @@ var nodeFunctions = {
1665
1848
  HttpGetInput: HttpGetInputNodeFunction,
1666
1849
  HttpPostInput: HttpPostInputNodeFunction,
1667
1850
  ManualTrigger: ManualTriggerNodeFunction,
1851
+ CronTrigger: CronTriggerNodeFunction,
1668
1852
  HttpOutput: HttpOutputNodeFunction,
1669
1853
  ConcatNode: ConcatNodeFunction,
1670
1854
  IaMessageNode: IaMessageNodeFunction,
@@ -1673,7 +1857,8 @@ var nodeFunctions = {
1673
1857
  AiSupervisorNode: AiSupervisorNodeFunction,
1674
1858
  WhatsappNode: WhatsappStartChatFunction,
1675
1859
  WhatsappSendMessageNode: WhatsappSendMessageFunction,
1676
- CustomCodeNode: NodeFunction
1860
+ CustomCodeNode: NodeFunction,
1861
+ CustomNode: CustomNodeFunction
1677
1862
  };
1678
1863
  var node_functions_default = nodeFunctions;
1679
1864
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@atomoz/workflows-nodes",
3
- "version": "0.1.16",
3
+ "version": "0.1.17",
4
4
  "description": "Atomoz Workflows - Node Library",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -35,17 +35,20 @@
35
35
  "build": "tsup index.ts --dts --format esm,cjs --target node18 --out-dir dist --clean --tsconfig tsconfig.build.json",
36
36
  "dev": "tsup index.ts --dts --format esm,cjs --target node18 --out-dir dist --watch --tsconfig tsconfig.build.json"
37
37
  },
38
+ "peerDependencies": {
39
+ "zod": ">=3.0.0 || >=4.0.0"
40
+ },
38
41
  "dependencies": {
39
42
  "@langchain/core": "^0.3.66",
40
43
  "@langchain/google-gauth": "^0.2.16",
41
44
  "@langchain/langgraph": "^0.4.3",
42
45
  "@langchain/langgraph-supervisor": "^0.0.17",
43
46
  "@langchain/openai": "^0.6.3",
44
- "graphql-request": "^7.2.0",
45
- "zod": "^4.0.14"
47
+ "graphql-request": "^7.2.0"
46
48
  },
47
49
  "devDependencies": {
48
50
  "tsup": "^8.2.4",
49
- "typescript": "^5.6.3"
51
+ "typescript": "^5.6.3",
52
+ "zod": "^4.0.14"
50
53
  }
51
- }
54
+ }