@cognigy/rest-api-client 2025.15.0 → 2025.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/build/apigroups/MetricsAPIGroup_2_0.js +10 -0
  3. package/build/apigroups/ResourcesAPIGroup_2_0.js +6 -0
  4. package/build/apigroups/SimulationAPIGroup_2_0.js +4 -0
  5. package/build/shared/charts/descriptors/logic/if/if.js +2 -2
  6. package/build/shared/charts/descriptors/logic/switch/switch.js +30 -21
  7. package/build/shared/charts/descriptors/message/question/question.js +3 -3
  8. package/build/shared/charts/descriptors/message/question/utils/validateQuestionAnswer.js +2 -2
  9. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +29 -22
  10. package/build/shared/charts/descriptors/service/aiAgent/helpers/createToolDefinitions.js +4 -4
  11. package/build/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +24 -18
  12. package/build/shared/charts/descriptors/transcripts/addTranscriptStep.js +3 -3
  13. package/build/shared/interfaces/generativeAI/IGenerativeAIModels.js +1 -0
  14. package/build/shared/interfaces/resources/IGetAiAgentJobsTools.js +3 -0
  15. package/build/shared/interfaces/resources/IKnowledgeDescriptor.js +38 -5
  16. package/build/shared/interfaces/resources/ILargeLanguageModel.js +15 -1
  17. package/build/shared/interfaces/restAPI/metrics/callCounter/v3.0/ICallCounterPreAggregatedValue_3_0.js +3 -0
  18. package/build/shared/interfaces/restAPI/metrics/callCounter/v3.0/IGetCallCounterOrganisationRest_3_0.js +3 -0
  19. package/build/shared/interfaces/restAPI/metrics/callCounter/v3.0/IGetCallCounterRest_3_0.js +3 -0
  20. package/build/shared/interfaces/restAPI/metrics/callCounter/v3.0/index.js +3 -0
  21. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IAiAgentJobNodeWithTools_2_0.js +65 -0
  22. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IGetAiAgentJobAndToolsRest_2_0 .js +4 -0
  23. package/build/shared/interfaces/restAPI/resources/knowledgeStore/v2.0/IIndexKnowledgeDescriptorsRest_2_0.js +3 -0
  24. package/build/shared/interfaces/restAPI/resources/knowledgeStore/v2.0/extension/IRunKnowledgeExtensionRest_2_0.js +3 -0
  25. package/build/shared/interfaces/restAPI/resources/knowledgeStore/v2.0/index.js +16 -0
  26. package/build/shared/interfaces/restAPI/simulation/simulationRunBatch/IStopSimulationRunBatchRest_2_0.js +3 -0
  27. package/build/shared/interfaces/security/ICallCounterPreAggregatedValue.js +3 -0
  28. package/dist/esm/apigroups/MetricsAPIGroup_2_0.js +10 -0
  29. package/dist/esm/apigroups/ResourcesAPIGroup_2_0.js +6 -0
  30. package/dist/esm/apigroups/SimulationAPIGroup_2_0.js +4 -0
  31. package/dist/esm/shared/charts/descriptors/logic/if/if.js +2 -2
  32. package/dist/esm/shared/charts/descriptors/logic/switch/switch.js +30 -21
  33. package/dist/esm/shared/charts/descriptors/message/question/question.js +3 -3
  34. package/dist/esm/shared/charts/descriptors/message/question/utils/validateQuestionAnswer.js +4 -3
  35. package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +28 -21
  36. package/dist/esm/shared/charts/descriptors/service/aiAgent/helpers/createToolDefinitions.js +4 -4
  37. package/dist/esm/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +31 -25
  38. package/dist/esm/shared/charts/descriptors/transcripts/addTranscriptStep.js +3 -3
  39. package/dist/esm/shared/interfaces/generativeAI/IGenerativeAIModels.js +1 -0
  40. package/dist/esm/shared/interfaces/resources/IGetAiAgentJobsTools.js +2 -0
  41. package/dist/esm/shared/interfaces/resources/IKnowledgeDescriptor.js +37 -5
  42. package/dist/esm/shared/interfaces/resources/ILargeLanguageModel.js +13 -0
  43. package/dist/esm/shared/interfaces/restAPI/metrics/callCounter/v3.0/ICallCounterPreAggregatedValue_3_0.js +2 -0
  44. package/dist/esm/shared/interfaces/restAPI/metrics/callCounter/v3.0/IGetCallCounterOrganisationRest_3_0.js +2 -0
  45. package/dist/esm/shared/interfaces/restAPI/metrics/callCounter/v3.0/IGetCallCounterRest_3_0.js +2 -0
  46. package/dist/esm/shared/interfaces/restAPI/metrics/callCounter/v3.0/index.js +2 -0
  47. package/dist/esm/shared/interfaces/restAPI/resources/aiAgent/v2.0/IAiAgentJobNodeWithTools_2_0.js +65 -0
  48. package/dist/esm/shared/interfaces/restAPI/resources/aiAgent/v2.0/IGetAiAgentJobAndToolsRest_2_0 .js +3 -0
  49. package/dist/esm/shared/interfaces/restAPI/resources/knowledgeStore/v2.0/IIndexKnowledgeDescriptorsRest_2_0.js +2 -0
  50. package/dist/esm/shared/interfaces/restAPI/resources/knowledgeStore/v2.0/extension/IRunKnowledgeExtensionRest_2_0.js +2 -0
  51. package/dist/esm/shared/interfaces/restAPI/resources/knowledgeStore/v2.0/index.js +2 -1
  52. package/dist/esm/shared/interfaces/restAPI/simulation/simulationRunBatch/IStopSimulationRunBatchRest_2_0.js +2 -0
  53. package/dist/esm/shared/interfaces/security/ICallCounterPreAggregatedValue.js +2 -0
  54. package/package.json +2 -2
  55. package/types/index.d.ts +257 -23
package/CHANGELOG.md CHANGED
@@ -1,3 +1,13 @@
1
+ # 2025.16.0
2
+ Released: August 05th, 2025
3
+
4
+ Released state of package up to date with Cognigy.AI v2025.16.0
5
+
6
+ # 2025.15.1
7
+ Released: July 24th, 2025
8
+
9
+ Released state of package up to date with Cognigy.AI v2025.15.1
10
+
1
11
  # 2025.15.0
2
12
  Released: July 22nd, 2025
3
13
 
@@ -64,6 +64,16 @@ function MetricsAPIGroup_2_0(instance) {
64
64
  return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/projects/${projectId}/conversationcounter?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options);
65
65
  },
66
66
  getConversationCounterOrganisation: (args, options) => (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/conversationcounter?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options),
67
+ getPreAggregatedConversationCounter: (_a, options) => {
68
+ var { projectId } = _a, args = __rest(_a, ["projectId"]);
69
+ return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v3.0/projects/${projectId}/conversationcounter?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options);
70
+ },
71
+ getPreAggregatedConversationCounterOrganisation: (args, options) => (0, GenericAPIFn_1.GenericAPIFn)(`/new/v3.0/conversationcounter?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options),
72
+ getPreAggregatedCallCounter: (_a, options) => {
73
+ var { projectId } = _a, args = __rest(_a, ["projectId"]);
74
+ return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v3.0/projects/${projectId}/callcounter?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options);
75
+ },
76
+ getPreAggregatedCallCounterOrganisation: (args, options) => (0, GenericAPIFn_1.GenericAPIFn)(`/new/v3.0/callcounter?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options),
67
77
  getKnowledgeQueryCounter: (_a, options) => {
68
78
  var { projectId } = _a, args = __rest(_a, ["projectId"]);
69
79
  return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/projects/${projectId}/knowledgequerycounter?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options);
@@ -559,6 +559,7 @@ const ResourcesAPIGroup_2_0 = (instance) => {
559
559
  validateAiAgentName: (args, options) => (0, GenericAPIFn_1.GenericAPIFn)("/new/v2.0/aiagents/validatename", "POST", self)(args, options),
560
560
  getAiAgentHiringTemplates: (args, options) => (0, GenericAPIFn_1.GenericAPIFn)("/new/v2.0/aiagents/hire/templates", "GET", self)(args, options),
561
561
  hireAiAgent: (args, options) => (0, GenericAPIFn_1.GenericAPIFn)("/new/v2.0/aiagents/hire", "POST", self)(args, options),
562
+ getAiAgentJobsAndTools: ({ aiAgentId }, options) => (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/aiagents/${aiAgentId}/jobs`, "GET", self)(undefined, options),
562
563
  generateNodeOutput(_a, options) {
563
564
  var { flowId } = _a, args = __rest(_a, ["flowId"]);
564
565
  return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/flows/${flowId}/chart/nodes/output/generate`, "POST", self)(args, options);
@@ -591,6 +592,11 @@ const ResourcesAPIGroup_2_0 = (instance) => {
591
592
  var { knowledgeStoreId } = _a, args = __rest(_a, ["knowledgeStoreId"]);
592
593
  return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/knowledgestores/${knowledgeStoreId}`, "PATCH", self)(args, options);
593
594
  },
595
+ runKnowledgeExtension: (_a, options) => {
596
+ var { knowledgeStoreId } = _a, args = __rest(_a, ["knowledgeStoreId"]);
597
+ return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/knowledgestores/${knowledgeStoreId}/extensions/run`, "POST", self)(args, options);
598
+ },
599
+ indexKnowledgeDescriptors: ({ knowledgeStoreId }, options) => (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/knowledgestores/${knowledgeStoreId}/descriptors`, "GET", self)(undefined, options),
594
600
  indexKnowledgeSources: (_a, options) => {
595
601
  var { knowledgeStoreId } = _a, args = __rest(_a, ["knowledgeStoreId"]);
596
602
  return (0, GenericAPIFn_1.GenericAPIFn)(`/new/v2.0/knowledgestores/${knowledgeStoreId}/sources?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options);
@@ -44,6 +44,10 @@ function SimulationAPIGroup_2_0(instance) {
44
44
  var { simulationReference, simulationRunBatchReference } = _a, args = __rest(_a, ["simulationReference", "simulationRunBatchReference"]);
45
45
  return (0, GenericAPIFn_1.GenericAPIFn)(`/testing/beta/simulations/${simulationReference}/batches/${simulationRunBatchReference}?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options);
46
46
  },
47
+ stopSimulationRunBatch: (_a, options) => {
48
+ var { simulationReference, simulationRunBatchReference } = _a, args = __rest(_a, ["simulationReference", "simulationRunBatchReference"]);
49
+ return (0, GenericAPIFn_1.GenericAPIFn)(`/testing/beta/simulations/${simulationReference}/batches/${simulationRunBatchReference}/stop?${(0, query_1.stringifyQuery)(args)}`, "POST", self)(undefined, options);
50
+ },
47
51
  indexSimulationRuns: (_a, options) => {
48
52
  var { simulationReference, simulationRunBatchReference } = _a, args = __rest(_a, ["simulationReference", "simulationRunBatchReference"]);
49
53
  return (0, GenericAPIFn_1.GenericAPIFn)(`/testing/beta/simulations/${simulationReference}/batches/${simulationRunBatchReference}/runs?${(0, query_1.stringifyQuery)(args)}`, "GET", self)(undefined, options);
@@ -55,13 +55,13 @@ exports.IF = (0, createNodeDescriptor_1.createNodeDescriptor)({
55
55
  switch (type) {
56
56
  case "rule":
57
57
  {
58
- isConditionTrue = api.evaluateRule(rule);
58
+ isConditionTrue = await api.evaluateRule(rule);
59
59
  }
60
60
  break;
61
61
  case "condition":
62
62
  default:
63
63
  {
64
- isConditionTrue = api.parseCognigyScriptCondition(condition);
64
+ isConditionTrue = await api.parseCognigyScriptCondition(condition);
65
65
  }
66
66
  break;
67
67
  }
@@ -190,32 +190,41 @@ exports.SWITCH = (0, createNodeDescriptor_1.createNodeDescriptor)({
190
190
  * by mistake, then CS has already been parsed,
191
191
  * causing the parser return empty string.
192
192
  */
193
- parsedOperator = (_a = api.parseCognigyScriptText(`{{${operator}}}`)) !== null && _a !== void 0 ? _a : operator;
193
+ parsedOperator = (_a = (await api.parseCognigyScriptText(`{{${operator}}}`))) !== null && _a !== void 0 ? _a : operator;
194
194
  if (parsedOperator === "") {
195
195
  parsedOperator = operator;
196
196
  }
197
197
  }
198
- const matchedCase = children === null || children === void 0 ? void 0 : children.find((child) => {
199
- var _a, _b;
200
- if (child.type !== "case") {
201
- return;
198
+ let matchedCase = undefined;
199
+ if (children) {
200
+ for (const child of children) {
201
+ if (child.type !== "case") {
202
+ continue;
203
+ }
204
+ const check = async () => {
205
+ var _a, _b;
206
+ if (useStrict) {
207
+ return (await api.parseCognigyScriptText(`${(_a = child.config.case.value) === null || _a === void 0 ? void 0 : _a.trim()}`)) === parsedOperator;
208
+ }
209
+ else {
210
+ /**
211
+ * We cast the case
212
+ * and the operator to strings to avoid issues
213
+ * where e.g. the case contains a string but the
214
+ * operator is a number. We do not support switching on
215
+ * objects
216
+ */
217
+ const parsedCognigyScriptText = `${await api.parseCognigyScriptText(`${(_b = child.config.case.value) === null || _b === void 0 ? void 0 : _b.trim()}`)}`;
218
+ const parsedOperatorString = `${parsedOperator}`;
219
+ return parsedCognigyScriptText === parsedOperatorString;
220
+ }
221
+ };
222
+ if (await check()) {
223
+ matchedCase = child;
224
+ break;
225
+ }
202
226
  }
203
- if (useStrict) {
204
- return api.parseCognigyScriptText(`${(_a = child.config.case.value) === null || _a === void 0 ? void 0 : _a.trim()}`) === parsedOperator;
205
- }
206
- else {
207
- /**
208
- * We cast the case
209
- * and the operator to strings to avoid issues
210
- * where e.g. the case contains a string but the
211
- * operator is a number. We do not support switching on
212
- * objects
213
- */
214
- const parsedCognigyScriptText = `${api.parseCognigyScriptText(`${(_b = child.config.case.value) === null || _b === void 0 ? void 0 : _b.trim()}`)}`;
215
- const parsedOperatorString = `${parsedOperator}`;
216
- return parsedCognigyScriptText === parsedOperatorString;
217
- }
218
- });
227
+ }
219
228
  if (matchedCase) {
220
229
  api.setNextNode(matchedCase.id);
221
230
  return;
@@ -1899,7 +1899,7 @@ DO NOT talk about other topics. Do not offer general assistance.`,
1899
1899
  // set input.result, so we can use it for validation
1900
1900
  input.result = result;
1901
1901
  // Verify that answer is valid based on some other conditions defined in the function
1902
- const isValid = (0, validateQuestionAnswer_1.validateQuestionAnswer)(cognigy, config);
1902
+ const isValid = await (0, validateQuestionAnswer_1.validateQuestionAnswer)(cognigy, config);
1903
1903
  if (!isValid) {
1904
1904
  input.result = null;
1905
1905
  }
@@ -2057,7 +2057,7 @@ DO NOT talk about other topics. Do not offer general assistance.`,
2057
2057
  // if a result location was specified, try to get the result from that location
2058
2058
  // if the location returns a falsey value, the answer is invalid
2059
2059
  if (resultLocation && result) {
2060
- result = api.parseCognigyScriptResultLocation(resultLocation);
2060
+ result = await api.parseCognigyScriptResultLocation(resultLocation);
2061
2061
  // If we want detailed results, augment the result object accordingly
2062
2062
  if (storeDetailedResults && result !== null && result !== undefined) {
2063
2063
  result = {
@@ -2410,7 +2410,7 @@ DO NOT talk about other topics. Do not offer general assistance.`,
2410
2410
  // #region 5.2.3 Reprompt
2411
2411
  // check if there is an extra condition defined for reprompts and check whether it was truthy
2412
2412
  if (sayReprompt && repromptCondition) {
2413
- const repromptConditionResult = !!api.parseCognigyScriptCondition(repromptCondition);
2413
+ const repromptConditionResult = !!await api.parseCognigyScriptCondition(repromptCondition);
2414
2414
  !repromptConditionResult && api.logDebugMessage(`UI__DEBUG_MODE__QUESTION__MESSAGE_6`, "Skipping Reprompt Message");
2415
2415
  sayReprompt = repromptConditionResult;
2416
2416
  }
@@ -1,13 +1,13 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.validateQuestionAnswer = void 0;
4
- const validateQuestionAnswer = (cognigy, config) => {
4
+ const validateQuestionAnswer = async (cognigy, config) => {
5
5
  const { additionalValidation, escalateIntentsAction, escalateIntentsThreshold, escalateIntentsValidIntents } = config;
6
6
  const { input, api } = cognigy;
7
7
  let isValid = true;
8
8
  // check if there is an extra condition defined and check whether it was truthy
9
9
  if (additionalValidation) {
10
- const additionalValidationResult = !!api.parseCognigyScriptCondition(additionalValidation);
10
+ const additionalValidationResult = !!await api.parseCognigyScriptCondition(additionalValidation);
11
11
  !additionalValidationResult && api.logDebugMessage(`UI__DEBUG_MODE__QUESTION__MESSAGE_7`, "Invalid Answer");
12
12
  isValid = additionalValidationResult;
13
13
  }
@@ -11,7 +11,7 @@ var __rest = (this && this.__rest) || function (s, e) {
11
11
  return t;
12
12
  };
13
13
  Object.defineProperty(exports, "__esModule", { value: true });
14
- exports.AI_AGENT_JOB = void 0;
14
+ exports.AI_AGENT_JOB = exports.AI_AGENT_TOOLS_WHITELIST = void 0;
15
15
  /* Custom modules */
16
16
  const createNodeDescriptor_1 = require("../../../createNodeDescriptor");
17
17
  const crypto_1 = require("crypto");
@@ -23,6 +23,7 @@ const generateSearchPrompt_1 = require("./helpers/generateSearchPrompt");
23
23
  const getUserMemory_1 = require("./helpers/getUserMemory");
24
24
  const createToolDefinitions_1 = require("./helpers/createToolDefinitions");
25
25
  const transcripts_1 = require("../../../../interfaces/transcripts/transcripts");
26
+ exports.AI_AGENT_TOOLS_WHITELIST = ["aiAgentJobDefault", "aiAgentJobTool", "aiAgentJobMCPTool"];
26
27
  exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
27
28
  type: "aiAgentJob",
28
29
  defaultLabel: "AI Agent",
@@ -31,7 +32,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
31
32
  collapsable: true,
32
33
  placement: {
33
34
  children: {
34
- whitelist: ["aiAgentJobDefault", "aiAgentJobTool", "aiAgentJobMCPTool"],
35
+ whitelist: exports.AI_AGENT_TOOLS_WHITELIST,
35
36
  },
36
37
  },
37
38
  },
@@ -860,7 +861,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
860
861
  ],
861
862
  tags: ["ai", "aiAgent"],
862
863
  function: async ({ cognigy, config, childConfigs, nodeId }) => {
863
- var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21;
864
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23;
864
865
  const { api, context, input, profile, flowReferenceId } = cognigy;
865
866
  const { aiAgent, llmProviderReferenceId, name: jobName, description: jobDescription, instructions: jobInstructions, outputImmediately, toolChoice, useStrict, memoryType, selectedProfileFields, memoryContextInjection, knowledgeSearchBehavior, knowledgeSearchTags, knowledgeSearchTagsFilterOp, knowledgeSearchAiAgentKnowledge, knowledgeSearchJobKnowledge, knowledgeSearchJobStore, knowledgeSearchGenerateSearchPrompt, knowledgeSearchTopK, timeoutInMs, maxTokens, temperature, logErrorToSystem, storeErrorInInput, errorHandling, errorHandlingGotoTarget, errorMessage, debugConfig, debugLogTokenCount, debugLogSystemPrompt, debugLogToolDefinitions, debugResult, debugLogLLMLatency, storeLocation, contextKey, inputKey, streamStoreCopyInInput, streamStopTokens, processImages, transcriptImageHandling, sessionParams } = config;
866
867
  try {
@@ -910,7 +911,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
910
911
  throw new Error(`[VG2] Error on AI Agent Job node. Error message: ${error.message}`);
911
912
  }
912
913
  }
913
- const _22 = profile, { profileId, accepted_gdpr, prevent_data_collection, privacy_policy } = _22, cleanedProfile = __rest(_22, ["profileId", "accepted_gdpr", "prevent_data_collection", "privacy_policy"]);
914
+ const _24 = profile, { profileId, accepted_gdpr, prevent_data_collection, privacy_policy } = _24, cleanedProfile = __rest(_24, ["profileId", "accepted_gdpr", "prevent_data_collection", "privacy_policy"]);
914
915
  const userMemory = (0, getUserMemory_1.getUserMemory)(memoryType, selectedProfileFields, aiAgent, cleanedProfile);
915
916
  /**
916
917
  * ----- Knowledge Search Section -----
@@ -1225,14 +1226,20 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1225
1226
  const mainToolCall = llmResult.toolCalls[0];
1226
1227
  let isMcpToolCall = false;
1227
1228
  // Find the child node with the toolId of the tool call
1228
- let toolChild = childConfigs.find(child => { var _a, _b; return child.type === "aiAgentJobTool" && ((_a = child.config) === null || _a === void 0 ? void 0 : _a.toolId) && api.parseCognigyScriptText((_b = child.config) === null || _b === void 0 ? void 0 : _b.toolId) === mainToolCall.function.name; });
1229
+ let toolChild = undefined;
1230
+ for (const child of childConfigs) {
1231
+ if (child.type === "aiAgentJobTool" && ((_5 = child.config) === null || _5 === void 0 ? void 0 : _5.toolId) && await api.parseCognigyScriptText((_6 = child.config) === null || _6 === void 0 ? void 0 : _6.toolId) === mainToolCall.function.name) {
1232
+ toolChild = child;
1233
+ break;
1234
+ }
1235
+ }
1229
1236
  if (!toolChild && toolMap.has(mainToolCall.function.name)) {
1230
1237
  // If the tool call is from an MCP tool, set the next node to the corresponding child node
1231
1238
  toolChild = childConfigs.find(child => child.id === toolMap.get(mainToolCall.function.name));
1232
1239
  isMcpToolCall = true;
1233
1240
  }
1234
1241
  if (mainToolCall.function.name !== "retrieve_knowledge" && toolChild === undefined) {
1235
- (_5 = api.logDebugError) === null || _5 === void 0 ? void 0 : _5.call(api, `UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__BODY <b>${mainToolCall.function.name}</b>`, "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__HEADER");
1242
+ (_7 = api.logDebugError) === null || _7 === void 0 ? void 0 : _7.call(api, `UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__BODY <b>${mainToolCall.function.name}</b>`, "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__HEADER");
1236
1243
  }
1237
1244
  // Add last tool call to session state for loading it from Tool Answer Node
1238
1245
  api.updateSessionStateValues({
@@ -1240,21 +1247,21 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1240
1247
  flow: flowReferenceId,
1241
1248
  node: nodeId,
1242
1249
  } }, (isMcpToolCall && {
1243
- mcpServerUrl: (_6 = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _6 === void 0 ? void 0 : _6.mcpServerUrl,
1244
- timeout: (_7 = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _7 === void 0 ? void 0 : _7.timeout,
1250
+ mcpServerUrl: (_8 = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _8 === void 0 ? void 0 : _8.mcpServerUrl,
1251
+ timeout: (_9 = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _9 === void 0 ? void 0 : _9.timeout,
1245
1252
  mcpToolNode: toolChild === null || toolChild === void 0 ? void 0 : toolChild.id,
1246
1253
  })), { toolCall: mainToolCall }),
1247
1254
  });
1248
1255
  // if there are any parameters/arguments, add them to the input slots
1249
1256
  if (mainToolCall.function.arguments) {
1250
- input.aiAgent = Object.assign(Object.assign({}, input.aiAgent), { toolArgs: Object.assign(Object.assign({}, (_9 = (_8 = input.aiAgent) === null || _8 === void 0 ? void 0 : _8.toolArgs) !== null && _9 !== void 0 ? _9 : {}), mainToolCall.function.arguments) });
1257
+ input.aiAgent = Object.assign(Object.assign({}, input.aiAgent), { toolArgs: Object.assign(Object.assign({}, (_11 = (_10 = input.aiAgent) === null || _10 === void 0 ? void 0 : _10.toolArgs) !== null && _11 !== void 0 ? _11 : {}), mainToolCall.function.arguments) });
1251
1258
  }
1252
1259
  // Debug Message for Tool Calls, configured in the Tool Node
1253
- if ((_10 = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _10 === void 0 ? void 0 : _10.debugMessage) {
1254
- const toolId = isMcpToolCall ? mainToolCall.function.name : api.parseCognigyScriptText(toolChild.config.toolId);
1260
+ if ((_12 = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _12 === void 0 ? void 0 : _12.debugMessage) {
1261
+ const toolId = isMcpToolCall ? mainToolCall.function.name : await api.parseCognigyScriptText(toolChild.config.toolId);
1255
1262
  const messageLines = [`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER:</b> ${toolId}`];
1256
1263
  // Arguments / Parameters Slots
1257
- const slots = ((_11 = mainToolCall === null || mainToolCall === void 0 ? void 0 : mainToolCall.function) === null || _11 === void 0 ? void 0 : _11.arguments) && Object.keys(mainToolCall.function.arguments);
1264
+ const slots = ((_13 = mainToolCall === null || mainToolCall === void 0 ? void 0 : mainToolCall.function) === null || _13 === void 0 ? void 0 : _13.arguments) && Object.keys(mainToolCall.function.arguments);
1258
1265
  const hasSlots = slots && slots.length > 0;
1259
1266
  messageLines.push(`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__SLOTS</b>${hasSlots ? "" : " -"}`);
1260
1267
  if (hasSlots) {
@@ -1269,7 +1276,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1269
1276
  messageLines.push(`- ${slot}: ${slotValueAsString}`);
1270
1277
  });
1271
1278
  }
1272
- (_12 = api.logDebugMessage) === null || _12 === void 0 ? void 0 : _12.call(api, messageLines.join("\n"), "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER");
1279
+ (_14 = api.logDebugMessage) === null || _14 === void 0 ? void 0 : _14.call(api, messageLines.join("\n"), "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER");
1273
1280
  }
1274
1281
  if (toolChild) {
1275
1282
  api.setNextNode(toolChild.id);
@@ -1294,11 +1301,11 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1294
1301
  }
1295
1302
  // Optionally output the result immediately
1296
1303
  if (llmResult.result && outputImmediately && !llmPromptOptions.stream) {
1297
- await ((_13 = api.output) === null || _13 === void 0 ? void 0 : _13.call(api, llmResult.result, {}));
1304
+ await ((_15 = api.output) === null || _15 === void 0 ? void 0 : _15.call(api, llmResult.result, {}));
1298
1305
  }
1299
1306
  else if (llmResult.finishReason && llmPromptOptions.stream) {
1300
1307
  // send the finishReason as last output for a stream
1301
- (_14 = api.output) === null || _14 === void 0 ? void 0 : _14.call(api, "", {
1308
+ (_16 = api.output) === null || _16 === void 0 ? void 0 : _16.call(api, "", {
1302
1309
  _cognigy: {
1303
1310
  _preventTranscript: true,
1304
1311
  _messageId,
@@ -1321,7 +1328,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1321
1328
  }
1322
1329
  // Add response to Cognigy Input/Context for further usage
1323
1330
  if (storeLocation === "context") {
1324
- (_15 = api.addToContext) === null || _15 === void 0 ? void 0 : _15.call(api, contextKey, llmResult, "simple");
1331
+ (_17 = api.addToContext) === null || _17 === void 0 ? void 0 : _17.call(api, contextKey, llmResult, "simple");
1325
1332
  }
1326
1333
  else if (storeLocation === "input") {
1327
1334
  api.addToInput(inputKey, llmResult);
@@ -1334,14 +1341,14 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1334
1341
  const errorDetails = {
1335
1342
  name: (error === null || error === void 0 ? void 0 : error.name) || "Error",
1336
1343
  code: (error === null || error === void 0 ? void 0 : error.code) || (error === null || error === void 0 ? void 0 : error.httpStatusCode),
1337
- message: (error === null || error === void 0 ? void 0 : error.message) || ((_16 = error.originalErrorDetails) === null || _16 === void 0 ? void 0 : _16.message),
1344
+ message: (error === null || error === void 0 ? void 0 : error.message) || ((_18 = error.originalErrorDetails) === null || _18 === void 0 ? void 0 : _18.message),
1338
1345
  };
1339
- (_17 = api.emitEvent) === null || _17 === void 0 ? void 0 : _17.call(api, "nodeError", { nodeId, flowId: flowReferenceId, errorMessage: error });
1346
+ (_19 = api.emitEvent) === null || _19 === void 0 ? void 0 : _19.call(api, "nodeError", { nodeId, flowId: flowReferenceId, errorMessage: error });
1340
1347
  if (logErrorToSystem) {
1341
- (_18 = api.log) === null || _18 === void 0 ? void 0 : _18.call(api, "error", JSON.stringify(errorDetails));
1348
+ (_20 = api.log) === null || _20 === void 0 ? void 0 : _20.call(api, "error", JSON.stringify(errorDetails));
1342
1349
  }
1343
1350
  if (errorHandling !== "stop") {
1344
- (_19 = api.logDebugError) === null || _19 === void 0 ? void 0 : _19.call(api, errorDetails.message + (errorDetails.code ? ` (error code: ${errorDetails.code})` : ""), errorDetails.name);
1351
+ (_21 = api.logDebugError) === null || _21 === void 0 ? void 0 : _21.call(api, errorDetails.message + (errorDetails.code ? ` (error code: ${errorDetails.code})` : ""), errorDetails.name);
1345
1352
  }
1346
1353
  if (storeErrorInInput) {
1347
1354
  input.aiAgent = input.aiAgent || {};
@@ -1350,7 +1357,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1350
1357
  if (errorHandling === "continue") {
1351
1358
  // output the timeout message
1352
1359
  if (errorMessage) {
1353
- await ((_20 = api.output) === null || _20 === void 0 ? void 0 : _20.call(api, errorMessage, null));
1360
+ await ((_22 = api.output) === null || _22 === void 0 ? void 0 : _22.call(api, errorMessage, null));
1354
1361
  }
1355
1362
  // Set default node as next node
1356
1363
  const defaultChild = childConfigs.find(child => child.type === "aiAgentJobDefault");
@@ -1362,7 +1369,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1362
1369
  if (!errorHandlingGotoTarget) {
1363
1370
  throw new Error("GoTo Target is required");
1364
1371
  }
1365
- if (!((_21 = api.checkThink) === null || _21 === void 0 ? void 0 : _21.call(api, nodeId))) {
1372
+ if (!((_23 = api.checkThink) === null || _23 === void 0 ? void 0 : _23.call(api, nodeId))) {
1366
1373
  api.resetNextNodes();
1367
1374
  await api.executeFlow({
1368
1375
  flowNode: {
@@ -36,11 +36,11 @@ const createToolDefinitions = async (childConfigs, api, useStrict) => {
36
36
  }
37
37
  const toolId = child.config.toolId;
38
38
  if ((child.type === "aiAgentJobTool" || child.type === "llmPromptTool") &&
39
- (!child.config.condition || !!api.parseCognigyScriptCondition(child.config.condition))) {
39
+ (!child.config.condition || !!await api.parseCognigyScriptCondition(child.config.condition))) {
40
40
  if (!toolId) {
41
41
  throw new Error(`Tool ID is missing in Tool Node configuration.`);
42
42
  }
43
- const parsedToolId = api.parseCognigyScriptText(toolId);
43
+ const parsedToolId = await api.parseCognigyScriptText(toolId);
44
44
  if (!(0, createSystemMessage_1.validateToolId)(parsedToolId)) {
45
45
  throw new Error(`Tool ID ${parsedToolId} is not valid. Please use only alphanumeric characters, dashes and underscores.`);
46
46
  }
@@ -53,7 +53,7 @@ const createToolDefinitions = async (childConfigs, api, useStrict) => {
53
53
  type: "function",
54
54
  function: {
55
55
  name: parsedToolId,
56
- description: api.parseCognigyScriptText(child.config.description),
56
+ description: await api.parseCognigyScriptText(child.config.description),
57
57
  },
58
58
  };
59
59
  if (useStrict) {
@@ -65,7 +65,7 @@ const createToolDefinitions = async (childConfigs, api, useStrict) => {
65
65
  tools.push(tool);
66
66
  }
67
67
  if ((child.type === "aiAgentJobMCPTool" || child.type === "llmPromptMCPTool") &&
68
- (!child.config.condition || !!api.parseCognigyScriptCondition(child.config.condition))) {
68
+ (!child.config.condition || !!await api.parseCognigyScriptCondition(child.config.condition))) {
69
69
  if (!child.config.mcpServerUrl) {
70
70
  throw new Error(`MCP Server URL is missing in Tool Node configuration.`);
71
71
  }
@@ -638,7 +638,7 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
638
638
  },
639
639
  tags: ["ai", "llm", "gpt", "generative ai", "openai", "azure", "prompt"],
640
640
  function: async ({ cognigy, config, childConfigs, nodeId }) => {
641
- var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v;
641
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
642
642
  const { api, input, flowReferenceId } = cognigy;
643
643
  const { temperature, maxTokens, topP, presencePenalty, frequencyPenalty, useStop, stop, storeLocation, contextKey, inputKey, timeout, streamStopTokens, streamStopTokenOverrides, debugLogTokenCount, debugLogRequestAndCompletion, debugLogLLMLatency, debugLogToolDefinitions, llmProviderReferenceId, usePromptMode, chatTranscriptSteps, responseFormat, streamStoreCopyInInput, seed, immediateOutput, customModelOptions, customRequestOptions, errorHandling = "continue", // default behavior for LLM Prompt node was, continue its execution even though an error occurred (deviating it from the SEO node) & do not output an error message on UI explicitly. However, error is always stored in the input or context object. We can use an extra "say" node to output it.
644
644
  errorHandlingGotoTarget, errorMessage, logErrorToSystem, processImages, transcriptImageHandling, toolChoice, useStrict } = config;
@@ -830,14 +830,20 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
830
830
  const mainToolCall = llmResult.toolCalls[0];
831
831
  let isMcpToolCall = false;
832
832
  // Find the child node with the toolId of the tool call
833
- let toolChild = childConfigs.find(child => { var _a, _b; return child.type === "llmPromptTool" && ((_a = child.config) === null || _a === void 0 ? void 0 : _a.toolId) && api.parseCognigyScriptText((_b = child.config) === null || _b === void 0 ? void 0 : _b.toolId) === mainToolCall.function.name; });
833
+ let toolChild = undefined;
834
+ for (const child of childConfigs) {
835
+ if (child.type === "llmPromptTool" && ((_e = child.config) === null || _e === void 0 ? void 0 : _e.toolId) && await api.parseCognigyScriptText((_f = child.config) === null || _f === void 0 ? void 0 : _f.toolId) === mainToolCall.function.name) {
836
+ toolChild = child;
837
+ break;
838
+ }
839
+ }
834
840
  if (!toolChild && toolMap.has(mainToolCall.function.name)) {
835
841
  // If the tool call is from an MCP tool, set the next node to the corresponding child node
836
842
  toolChild = childConfigs.find(child => child.id === toolMap.get(mainToolCall.function.name));
837
843
  isMcpToolCall = true;
838
844
  }
839
845
  if (mainToolCall.function.name !== "retrieve_knowledge" && toolChild === undefined) {
840
- (_e = api.logDebugError) === null || _e === void 0 ? void 0 : _e.call(api, `UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__BODY <b>${mainToolCall.function.name}</b>`, "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__HEADER");
846
+ (_g = api.logDebugError) === null || _g === void 0 ? void 0 : _g.call(api, `UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__BODY <b>${mainToolCall.function.name}</b>`, "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__ERROR__HEADER");
841
847
  }
842
848
  // Add last tool call to session state for loading it from Tool Answer Node
843
849
  api.updateSessionStateValues({
@@ -845,20 +851,20 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
845
851
  flow: flowReferenceId,
846
852
  node: nodeId,
847
853
  } }, (isMcpToolCall && {
848
- mcpServerUrl: (_f = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _f === void 0 ? void 0 : _f.mcpServerUrl,
849
- timeout: (_g = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _g === void 0 ? void 0 : _g.timeout,
854
+ mcpServerUrl: (_h = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _h === void 0 ? void 0 : _h.mcpServerUrl,
855
+ timeout: (_j = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _j === void 0 ? void 0 : _j.timeout,
850
856
  mcpToolNode: toolChild === null || toolChild === void 0 ? void 0 : toolChild.id,
851
857
  })), { toolCall: mainToolCall }),
852
858
  });
853
859
  // if there are any parameters/arguments, add them to the input slots
854
860
  if (mainToolCall.function.arguments) {
855
- input.llmPrompt = Object.assign(Object.assign({}, input.llmPrompt), { toolArgs: Object.assign(Object.assign({}, (_j = (_h = input.llmPrompt) === null || _h === void 0 ? void 0 : _h.toolArgs) !== null && _j !== void 0 ? _j : {}), mainToolCall.function.arguments) });
861
+ input.llmPrompt = Object.assign(Object.assign({}, input.llmPrompt), { toolArgs: Object.assign(Object.assign({}, (_l = (_k = input.llmPrompt) === null || _k === void 0 ? void 0 : _k.toolArgs) !== null && _l !== void 0 ? _l : {}), mainToolCall.function.arguments) });
856
862
  }
857
863
  // Debug Message for Tool Calls, configured in the Tool Node
858
- if ((_k = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _k === void 0 ? void 0 : _k.debugMessage) {
859
- const messageLines = [`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER:</b> ${api.parseCognigyScriptText(toolChild.config.toolId)}`];
864
+ if ((_m = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _m === void 0 ? void 0 : _m.debugMessage) {
865
+ const messageLines = [`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER:</b> ${await api.parseCognigyScriptText(toolChild.config.toolId)}`];
860
866
  // Arguments / Parameters Slots
861
- const slots = ((_l = mainToolCall === null || mainToolCall === void 0 ? void 0 : mainToolCall.function) === null || _l === void 0 ? void 0 : _l.arguments) && Object.keys(mainToolCall.function.arguments);
867
+ const slots = ((_o = mainToolCall === null || mainToolCall === void 0 ? void 0 : mainToolCall.function) === null || _o === void 0 ? void 0 : _o.arguments) && Object.keys(mainToolCall.function.arguments);
862
868
  const hasSlots = slots && slots.length > 0;
863
869
  messageLines.push(`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__SLOTS</b>${hasSlots ? "" : " -"}`);
864
870
  if (hasSlots) {
@@ -873,7 +879,7 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
873
879
  messageLines.push(`- ${slot}: ${slotValueAsString}`);
874
880
  });
875
881
  }
876
- (_m = api.logDebugMessage) === null || _m === void 0 ? void 0 : _m.call(api, messageLines.join("\n"), "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER");
882
+ (_p = api.logDebugMessage) === null || _p === void 0 ? void 0 : _p.call(api, messageLines.join("\n"), "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER");
877
883
  }
878
884
  if (toolChild) {
879
885
  api.setNextNode(toolChild.id);
@@ -898,11 +904,11 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
898
904
  // we stringify objects (e.g. results coming from JSON Mode)
899
905
  // so that the transcript only contains text
900
906
  const resultToOutput = typeof ((llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult) === "object" ? JSON.stringify((llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult, undefined, 2) : (llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult;
901
- await ((_o = api.output) === null || _o === void 0 ? void 0 : _o.call(api, resultToOutput, {}));
907
+ await ((_q = api.output) === null || _q === void 0 ? void 0 : _q.call(api, resultToOutput, {}));
902
908
  }
903
909
  else if (llmResult.finishReason && llmPromptOptions.stream) {
904
910
  // send the finishReason as last output for a stream
905
- (_p = api.output) === null || _p === void 0 ? void 0 : _p.call(api, "", {
911
+ (_r = api.output) === null || _r === void 0 ? void 0 : _r.call(api, "", {
906
912
  _cognigy: {
907
913
  _preventTranscript: true,
908
914
  _messageId,
@@ -925,7 +931,7 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
925
931
  }
926
932
  // Add response to Cognigy Input/Context for further usage
927
933
  if (storeLocation === "context") {
928
- (_q = api.addToContext) === null || _q === void 0 ? void 0 : _q.call(api, contextKey, llmResult, "simple");
934
+ (_s = api.addToContext) === null || _s === void 0 ? void 0 : _s.call(api, contextKey, llmResult, "simple");
929
935
  }
930
936
  else if (storeLocation === "input") {
931
937
  api.addToInput(inputKey, llmResult);
@@ -938,19 +944,19 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
938
944
  const errorDetailsBase = {
939
945
  name: error === null || error === void 0 ? void 0 : error.name,
940
946
  code: (error === null || error === void 0 ? void 0 : error.code) || (error === null || error === void 0 ? void 0 : error.httpStatusCode),
941
- message: (error === null || error === void 0 ? void 0 : error.message) || ((_r = error.originalErrorDetails) === null || _r === void 0 ? void 0 : _r.message),
947
+ message: (error === null || error === void 0 ? void 0 : error.message) || ((_t = error.originalErrorDetails) === null || _t === void 0 ? void 0 : _t.message),
942
948
  };
943
949
  const errorDetails = Object.assign(Object.assign({}, errorDetailsBase), { originalErrorDetails: error === null || error === void 0 ? void 0 : error.originalErrorDetails });
944
950
  // return the requestId if it exist in the error obj.
945
- if ((_s = error.meta) === null || _s === void 0 ? void 0 : _s.requestId) {
951
+ if ((_u = error.meta) === null || _u === void 0 ? void 0 : _u.requestId) {
946
952
  errorDetails["meta"] = {
947
- requestId: (_t = error.meta) === null || _t === void 0 ? void 0 : _t.requestId
953
+ requestId: (_v = error.meta) === null || _v === void 0 ? void 0 : _v.requestId
948
954
  };
949
955
  }
950
956
  if (logErrorToSystem) {
951
- (_u = api.log) === null || _u === void 0 ? void 0 : _u.call(api, "error", JSON.stringify(errorDetailsBase));
957
+ (_w = api.log) === null || _w === void 0 ? void 0 : _w.call(api, "error", JSON.stringify(errorDetailsBase));
952
958
  }
953
- (_v = api.logDebugError) === null || _v === void 0 ? void 0 : _v.call(api, errorDetailsBase, "UI__DEBUG_MODE__LLM_PROMPT__ERROR");
959
+ (_x = api.logDebugError) === null || _x === void 0 ? void 0 : _x.call(api, errorDetailsBase, "UI__DEBUG_MODE__LLM_PROMPT__ERROR");
954
960
  await handleServiceError(errorDetails);
955
961
  return;
956
962
  }
@@ -328,7 +328,7 @@ exports.ADD_TRANSCRIPT_STEP = (0, createNodeDescriptor_1.createNodeDescriptor)({
328
328
  },
329
329
  tags: ["service", "transcripts"],
330
330
  function: async ({ cognigy, config }) => {
331
- const { role, agentType, text, data, name, id, input, toolCallId, content, header, message, metadata } = config;
331
+ const { role, text, data, name, id, input, toolCallId, assistantType, content, header, message, metadata } = config;
332
332
  const { api } = cognigy;
333
333
  let log = null;
334
334
  switch (role) {
@@ -344,7 +344,7 @@ exports.ADD_TRANSCRIPT_STEP = (0, createNodeDescriptor_1.createNodeDescriptor)({
344
344
  };
345
345
  break;
346
346
  case transcripts_1.TranscriptRole.ASSISTANT:
347
- if (agentType === transcripts_1.TranscriptEntryType.OUTPUT) {
347
+ if (assistantType === transcripts_1.TranscriptEntryType.OUTPUT) {
348
348
  log = {
349
349
  role: transcripts_1.TranscriptRole.ASSISTANT,
350
350
  type: transcripts_1.TranscriptEntryType.OUTPUT,
@@ -355,7 +355,7 @@ exports.ADD_TRANSCRIPT_STEP = (0, createNodeDescriptor_1.createNodeDescriptor)({
355
355
  }
356
356
  };
357
357
  }
358
- else if (agentType === transcripts_1.TranscriptEntryType.TOOL_CALL) {
358
+ else if (assistantType === transcripts_1.TranscriptEntryType.TOOL_CALL) {
359
359
  log = {
360
360
  role: transcripts_1.TranscriptRole.ASSISTANT,
361
361
  type: transcripts_1.TranscriptEntryType.TOOL_CALL,
@@ -53,6 +53,7 @@ exports.generativeAIModels = [
53
53
  "pixtral-large-latest",
54
54
  "mistral-medium-latest",
55
55
  "mistral-small-latest",
56
+ "text-davinci-003",
56
57
  ...exports.embeddingModels,
57
58
  ];
58
59
  exports.generativeAIProviders = [
@@ -0,0 +1,3 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ //# sourceMappingURL=IGetAiAgentJobsTools.js.map
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.knowledgeDescriptorSchema = exports.knowledgeFieldSchema = exports.knowledgeFieldTypes = void 0;
3
+ exports.buildConfigValidationSchema = exports.knowledgeDescriptorSchema = exports.knowledgeFieldSchema = exports.knowledgeFieldTypes = void 0;
4
4
  const INodeDescriptorSet_1 = require("./INodeDescriptorSet");
5
5
  exports.knowledgeFieldTypes = [
6
6
  "text",
@@ -20,13 +20,13 @@ exports.knowledgeFieldTypes = [
20
20
  "daterange",
21
21
  "connection",
22
22
  "condition",
23
- "description",
23
+ "description"
24
24
  ];
25
25
  exports.knowledgeFieldSchema = {
26
26
  title: "knowledgeFieldSchema",
27
27
  type: "object",
28
28
  additionalProperties: false,
29
- properties: Object.assign(Object.assign({}, INodeDescriptorSet_1.nodeFieldSchema.properties), { type: { type: "string", enum: [...exports.knowledgeFieldTypes] }, key: { type: "string", minLength: 1, maxLength: 200 } }),
29
+ properties: Object.assign(Object.assign({}, INodeDescriptorSet_1.nodeFieldSchema.properties), { type: { type: "string", enum: [...exports.knowledgeFieldTypes] }, key: { type: "string", minLength: 1, maxLength: 200 } })
30
30
  };
31
31
  const { type, summary, defaultLabel, sections, form } = INodeDescriptorSet_1.nodeDescriptorSchema.properties;
32
32
  exports.knowledgeDescriptorSchema = {
@@ -39,7 +39,40 @@ exports.knowledgeDescriptorSchema = {
39
39
  summary,
40
40
  sections,
41
41
  form,
42
- fields: { type: "array", items: exports.knowledgeFieldSchema },
43
- },
42
+ fields: { type: "array", items: exports.knowledgeFieldSchema }
43
+ }
44
+ };
45
+ const filterNonConfigFields = ({ type }) => !["description"].includes(type);
46
+ const buildConfigValidationSchema = (fields) => ({
47
+ type: "object",
48
+ additionalProperties: false,
49
+ required: (fields || []).filter(filterNonConfigFields).map(({ key }) => key),
50
+ properties: Object.assign({}, (fields || []).filter(filterNonConfigFields).reduce((result, field) => (Object.assign(Object.assign({}, result), { [field.key]: mapFieldToSchema(field) })), {}))
51
+ });
52
+ exports.buildConfigValidationSchema = buildConfigValidationSchema;
53
+ const mapFieldToSchema = ({ type, params }) => {
54
+ switch (type) {
55
+ case "checkbox":
56
+ case "toggle":
57
+ return { type: "boolean" };
58
+ case "number":
59
+ case "slider":
60
+ return { type: "number" };
61
+ case "textArray":
62
+ return {
63
+ type: "array",
64
+ minLength: (params === null || params === void 0 ? void 0 : params.required) ? 1 : 0
65
+ };
66
+ case "json":
67
+ return {
68
+ type: ["object", "array"],
69
+ additionalProperties: true
70
+ };
71
+ default:
72
+ return {
73
+ type: "string",
74
+ minLength: (params === null || params === void 0 ? void 0 : params.required) ? 1 : 0
75
+ };
76
+ }
44
77
  };
45
78
  //# sourceMappingURL=IKnowledgeDescriptor.js.map