@cognigy/rest-api-client 2025.19.0 → 2025.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/CHANGELOG.md +5 -0
  2. package/build/apigroups/InsightsAPIGroup_2_0.js +10 -10
  3. package/build/apigroups/MetricsAPIGroup_2_0.js +4 -0
  4. package/build/shared/charts/descriptors/nlu/fuzzySearch.js +6 -6
  5. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +81 -21
  6. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJobCallMCPTool.js +7 -5
  7. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJobMCPTool.js +8 -1
  8. package/build/shared/charts/descriptors/service/aiAgent/helpers/createToolDefinitions.js +2 -0
  9. package/build/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +78 -18
  10. package/build/shared/charts/descriptors/service/llmPrompt/llmPromptMCPTool.js +8 -1
  11. package/build/shared/interfaces/IOrganisation.js +2 -0
  12. package/build/shared/interfaces/license.js +9 -1
  13. package/build/shared/interfaces/resources/IAuditEvent.js +2 -1
  14. package/build/shared/interfaces/resources/knowledgeStore/IKnowledgeSource.js +1 -1
  15. package/build/shared/interfaces/restAPI/metrics/logs/v2.0/ITailLogEntriesRest_2_0.js +3 -0
  16. package/dist/esm/apigroups/InsightsAPIGroup_2_0.js +10 -10
  17. package/dist/esm/apigroups/MetricsAPIGroup_2_0.js +4 -0
  18. package/dist/esm/shared/charts/descriptors/nlu/fuzzySearch.js +6 -6
  19. package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +81 -21
  20. package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJobCallMCPTool.js +7 -5
  21. package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJobMCPTool.js +8 -1
  22. package/dist/esm/shared/charts/descriptors/service/aiAgent/helpers/createToolDefinitions.js +2 -0
  23. package/dist/esm/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +85 -25
  24. package/dist/esm/shared/charts/descriptors/service/llmPrompt/llmPromptMCPTool.js +8 -1
  25. package/dist/esm/shared/interfaces/IOrganisation.js +2 -0
  26. package/dist/esm/shared/interfaces/license.js +9 -1
  27. package/dist/esm/shared/interfaces/resources/IAuditEvent.js +2 -1
  28. package/dist/esm/shared/interfaces/resources/knowledgeStore/IKnowledgeSource.js +1 -1
  29. package/dist/esm/shared/interfaces/restAPI/metrics/logs/v2.0/ITailLogEntriesRest_2_0.js +2 -0
  30. package/package.json +1 -1
  31. package/types/index.d.ts +58 -2
@@ -395,6 +395,57 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
395
395
  description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__INCLUDE_ALL_OUTPUT_TYPES__DESCRIPTION",
396
396
  defaultValue: true,
397
397
  },
398
+ {
399
+ key: "advancedLogging",
400
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__ADVANCED_LOGGING__LABEL",
401
+ type: "toggle",
402
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__ADVANCED_LOGGING__DESCRIPTION",
403
+ defaultValue: false,
404
+ },
405
+ {
406
+ key: "loggingWebhookUrl",
407
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_WEBHOOK_URL__LABEL",
408
+ type: "cognigyText",
409
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_WEBHOOK_URL__DESCRIPTION",
410
+ defaultValue: "",
411
+ condition: {
412
+ key: "advancedLogging",
413
+ value: true
414
+ }
415
+ },
416
+ {
417
+ key: "loggingCustomData",
418
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CUSTOM_LOGGING_DATA__LABEL",
419
+ type: "cognigyText",
420
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CUSTOM_LOGGING_DATA__DESCRIPTION",
421
+ defaultValue: "",
422
+ condition: {
423
+ key: "advancedLogging",
424
+ value: true
425
+ }
426
+ },
427
+ {
428
+ key: "loggingHeaders",
429
+ type: "keyValuePairs",
430
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_HEADERS__LABEL",
431
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_HEADERS__DESCRIPTION",
432
+ defaultValue: "{}",
433
+ condition: {
434
+ key: "advancedLogging",
435
+ value: true
436
+ }
437
+ },
438
+ {
439
+ key: "conditionForLogging",
440
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CONDITION_FOR_LOGGING__LABEL",
441
+ type: "cognigyText",
442
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CONDITION_FOR_LOGGING__DESCRIPTION",
443
+ defaultValue: "",
444
+ condition: {
445
+ key: "advancedLogging",
446
+ value: true
447
+ }
448
+ },
398
449
  {
399
450
  key: "customModelOptions",
400
451
  label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__CUSTOM_MODEL_OPTIONS__LABEL",
@@ -595,7 +646,12 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
595
646
  "debugLogTokenCount",
596
647
  "debugLogRequestAndCompletion",
597
648
  "debugLogLLMLatency",
598
- "debugLogToolDefinitions"
649
+ "debugLogToolDefinitions",
650
+ "advancedLogging",
651
+ "loggingWebhookUrl",
652
+ "loggingCustomData",
653
+ "conditionForLogging",
654
+ "loggingHeaders",
599
655
  ]
600
656
  },
601
657
  {
@@ -633,10 +689,10 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
633
689
  },
634
690
  tags: ["ai", "llm", "gpt", "generative ai", "openai", "azure", "prompt"],
635
691
  function: ({ cognigy, config, childConfigs, nodeId }) => __awaiter(void 0, void 0, void 0, function* () {
636
- var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
692
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y;
637
693
  const { api, input, flowReferenceId } = cognigy;
638
694
  const { temperature, maxTokens, topP, presencePenalty, frequencyPenalty, useStop, stop, storeLocation, contextKey, inputKey, timeout, streamStopTokens, streamStopTokenOverrides, debugLogTokenCount, debugLogRequestAndCompletion, debugLogLLMLatency, debugLogToolDefinitions, llmProviderReferenceId, usePromptMode, chatTranscriptSteps, responseFormat, streamStoreCopyInInput, seed, immediateOutput, customModelOptions, customRequestOptions, errorHandling = "continue", // default behavior for LLM Prompt node was, continue its execution even though an error occurred (deviating it from the SEO node) & do not output an error message on UI explicitly. However, error is always stored in the input or context object. We can use an extra "say" node to output it.
639
- errorHandlingGotoTarget, errorMessage, useTextAlternativeForLLM, logErrorToSystem, processImages, transcriptImageHandling, toolChoice, useStrict } = config;
695
+ errorHandlingGotoTarget, errorMessage, useTextAlternativeForLLM, advancedLogging, loggingWebhookUrl, loggingCustomData, loggingHeaders, conditionForLogging, logErrorToSystem, processImages, transcriptImageHandling, toolChoice, useStrict } = config;
640
696
  let prompt = config.prompt || "";
641
697
  const { traceId } = input;
642
698
  // check if custom variables are used and if they have a length modifier
@@ -661,17 +717,17 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
661
717
  }
662
718
  // handle errors from external services, depending on the settings
663
719
  const handleServiceError = (error) => __awaiter(void 0, void 0, void 0, function* () {
664
- var _y, _z, _0, _1, _2, _3;
720
+ var _z, _0, _1, _2, _3, _4;
665
721
  const compactError = {
666
722
  name: error === null || error === void 0 ? void 0 : error.name,
667
723
  code: error === null || error === void 0 ? void 0 : error.code,
668
724
  message: (error === null || error === void 0 ? void 0 : error.message) || error
669
725
  };
670
726
  // return the requestId if it exist in the error obj.
671
- if ((_y = error === null || error === void 0 ? void 0 : error.meta) === null || _y === void 0 ? void 0 : _y.requestId) {
672
- compactError["requestId"] = (_z = error === null || error === void 0 ? void 0 : error.meta) === null || _z === void 0 ? void 0 : _z.requestId;
727
+ if ((_z = error === null || error === void 0 ? void 0 : error.meta) === null || _z === void 0 ? void 0 : _z.requestId) {
728
+ compactError["requestId"] = (_0 = error === null || error === void 0 ? void 0 : error.meta) === null || _0 === void 0 ? void 0 : _0.requestId;
673
729
  }
674
- if ((_0 = error === null || error === void 0 ? void 0 : error.originalErrorDetails) === null || _0 === void 0 ? void 0 : _0.code) {
730
+ if ((_1 = error === null || error === void 0 ? void 0 : error.originalErrorDetails) === null || _1 === void 0 ? void 0 : _1.code) {
675
731
  compactError.code = error.originalErrorDetails.code;
676
732
  }
677
733
  const errorResponse = {
@@ -680,7 +736,7 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
680
736
  // add error to context or input
681
737
  switch (storeLocation) {
682
738
  case "context":
683
- (_1 = api.addToContext) === null || _1 === void 0 ? void 0 : _1.call(api, contextKey, errorResponse, "simple");
739
+ (_2 = api.addToContext) === null || _2 === void 0 ? void 0 : _2.call(api, contextKey, errorResponse, "simple");
684
740
  break;
685
741
  default:
686
742
  api.addToInput(inputKey, errorResponse);
@@ -688,7 +744,7 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
688
744
  if (errorHandling === "continue") {
689
745
  // output the timeout message
690
746
  if (errorMessage) {
691
- yield ((_2 = api.output) === null || _2 === void 0 ? void 0 : _2.call(api, errorMessage, null));
747
+ yield ((_3 = api.output) === null || _3 === void 0 ? void 0 : _3.call(api, errorMessage, null));
692
748
  }
693
749
  // Continue with default node as next node
694
750
  const defaultChild = childConfigs === null || childConfigs === void 0 ? void 0 : childConfigs.find(child => child.type === "llmPromptDefault");
@@ -715,7 +771,7 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
715
771
  absorbContext: false
716
772
  }
717
773
  };
718
- yield ((_3 = GO_TO.function) === null || _3 === void 0 ? void 0 : _3.call(GO_TO, gotoParams));
774
+ yield ((_4 = GO_TO.function) === null || _4 === void 0 ? void 0 : _4.call(GO_TO, gotoParams));
719
775
  }
720
776
  else {
721
777
  throw new InternalServerError(error === null || error === void 0 ? void 0 : error.message, { traceId });
@@ -730,10 +786,11 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
730
786
  * Retrieve the tool definitions from the child nodes
731
787
  */
732
788
  const { toolIds, toolNames, toolMap, tools } = yield createToolDefinitions(childConfigs, api, useStrict);
789
+ const enableAdvancedLogging = advancedLogging && loggingWebhookUrl && (conditionForLogging === "" || !!conditionForLogging);
733
790
  /**
734
791
  * Generate Prompt Options
735
792
  */
736
- const llmPromptOptions = Object.assign(Object.assign(Object.assign({ prompt,
793
+ const llmPromptOptions = Object.assign(Object.assign(Object.assign(Object.assign({ prompt,
737
794
  temperature,
738
795
  maxTokens,
739
796
  topP,
@@ -756,7 +813,9 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
756
813
  }
757
814
  }, streamStopTokens: streamStopTokens || [".", "!", "?", "\\n"], streamStopTokenOverrides, preventNewLineRemoval: isStreamingChannel ? true : false,
758
815
  // set to true in order to get token usage
759
- detailedResults: true, seed: Number(seed) ? Number(seed) : undefined }, (tools.length > 0 && { tools })), (tools.length > 0 && { toolChoice: toolChoice })), { customModelOptions,
816
+ detailedResults: true, seed: Number(seed) ? Number(seed) : undefined }, (tools.length > 0 && { tools })), (tools.length > 0 && { toolChoice: toolChoice })), (enableAdvancedLogging && {
817
+ logging: Object.assign(Object.assign({ webhookUrl: loggingWebhookUrl }, (loggingCustomData && { customData: loggingCustomData })), (loggingHeaders && { headers: loggingHeaders }))
818
+ })), { customModelOptions,
760
819
  customRequestOptions });
761
820
  if (useStop) {
762
821
  llmPromptOptions["stop"] = stop;
@@ -848,19 +907,20 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
848
907
  node: nodeId,
849
908
  } }, (isMcpToolCall && {
850
909
  mcpServerUrl: (_h = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _h === void 0 ? void 0 : _h.mcpServerUrl,
851
- timeout: (_j = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _j === void 0 ? void 0 : _j.timeout,
910
+ mcpHeaders: (_j = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _j === void 0 ? void 0 : _j.mcpHeaders,
911
+ timeout: (_k = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _k === void 0 ? void 0 : _k.timeout,
852
912
  mcpToolNode: toolChild === null || toolChild === void 0 ? void 0 : toolChild.id,
853
913
  })), { toolCall: mainToolCall }),
854
914
  });
855
915
  // if there are any parameters/arguments, add them to the input slots
856
916
  if (mainToolCall.function.arguments) {
857
- input.llmPrompt = Object.assign(Object.assign({}, input.llmPrompt), { toolArgs: Object.assign(Object.assign({}, (_l = (_k = input.llmPrompt) === null || _k === void 0 ? void 0 : _k.toolArgs) !== null && _l !== void 0 ? _l : {}), mainToolCall.function.arguments) });
917
+ input.llmPrompt = Object.assign(Object.assign({}, input.llmPrompt), { toolArgs: Object.assign(Object.assign({}, (_m = (_l = input.llmPrompt) === null || _l === void 0 ? void 0 : _l.toolArgs) !== null && _m !== void 0 ? _m : {}), mainToolCall.function.arguments) });
858
918
  }
859
919
  // Debug Message for Tool Calls, configured in the Tool Node
860
- if ((_m = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _m === void 0 ? void 0 : _m.debugMessage) {
920
+ if ((_o = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _o === void 0 ? void 0 : _o.debugMessage) {
861
921
  const messageLines = [`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER:</b> ${yield api.parseCognigyScriptText(toolChild.config.toolId)}`];
862
922
  // Arguments / Parameters Slots
863
- const slots = ((_o = mainToolCall === null || mainToolCall === void 0 ? void 0 : mainToolCall.function) === null || _o === void 0 ? void 0 : _o.arguments) && Object.keys(mainToolCall.function.arguments);
923
+ const slots = ((_p = mainToolCall === null || mainToolCall === void 0 ? void 0 : mainToolCall.function) === null || _p === void 0 ? void 0 : _p.arguments) && Object.keys(mainToolCall.function.arguments);
864
924
  const hasSlots = slots && slots.length > 0;
865
925
  messageLines.push(`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__SLOTS</b>${hasSlots ? "" : " -"}`);
866
926
  if (hasSlots) {
@@ -875,7 +935,7 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
875
935
  messageLines.push(`- ${slot}: ${slotValueAsString}`);
876
936
  });
877
937
  }
878
- (_p = api.logDebugMessage) === null || _p === void 0 ? void 0 : _p.call(api, messageLines.join("\n"), "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER");
938
+ (_q = api.logDebugMessage) === null || _q === void 0 ? void 0 : _q.call(api, messageLines.join("\n"), "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER");
879
939
  }
880
940
  if (toolChild) {
881
941
  api.setNextNode(toolChild.id);
@@ -900,11 +960,11 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
900
960
  // we stringify objects (e.g. results coming from JSON Mode)
901
961
  // so that the transcript only contains text
902
962
  const resultToOutput = typeof ((llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult) === "object" ? JSON.stringify((llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult, undefined, 2) : (llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult;
903
- yield ((_q = api.output) === null || _q === void 0 ? void 0 : _q.call(api, resultToOutput, {}));
963
+ yield ((_r = api.output) === null || _r === void 0 ? void 0 : _r.call(api, resultToOutput, {}));
904
964
  }
905
965
  else if (llmResult.finishReason && llmPromptOptions.stream) {
906
966
  // send the finishReason as last output for a stream
907
- (_r = api.output) === null || _r === void 0 ? void 0 : _r.call(api, "", {
967
+ (_s = api.output) === null || _s === void 0 ? void 0 : _s.call(api, "", {
908
968
  _cognigy: {
909
969
  _preventTranscript: true,
910
970
  _messageId,
@@ -927,7 +987,7 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
927
987
  }
928
988
  // Add response to Cognigy Input/Context for further usage
929
989
  if (storeLocation === "context") {
930
- (_s = api.addToContext) === null || _s === void 0 ? void 0 : _s.call(api, contextKey, llmResult, "simple");
990
+ (_t = api.addToContext) === null || _t === void 0 ? void 0 : _t.call(api, contextKey, llmResult, "simple");
931
991
  }
932
992
  else if (storeLocation === "input") {
933
993
  api.addToInput(inputKey, llmResult);
@@ -940,19 +1000,19 @@ export const LLM_PROMPT_V2 = createNodeDescriptor({
940
1000
  const errorDetailsBase = {
941
1001
  name: error === null || error === void 0 ? void 0 : error.name,
942
1002
  code: (error === null || error === void 0 ? void 0 : error.code) || (error === null || error === void 0 ? void 0 : error.httpStatusCode),
943
- message: (error === null || error === void 0 ? void 0 : error.message) || ((_t = error.originalErrorDetails) === null || _t === void 0 ? void 0 : _t.message),
1003
+ message: (error === null || error === void 0 ? void 0 : error.message) || ((_u = error.originalErrorDetails) === null || _u === void 0 ? void 0 : _u.message),
944
1004
  };
945
1005
  const errorDetails = Object.assign(Object.assign({}, errorDetailsBase), { originalErrorDetails: error === null || error === void 0 ? void 0 : error.originalErrorDetails });
946
1006
  // return the requestId if it exist in the error obj.
947
- if ((_u = error.meta) === null || _u === void 0 ? void 0 : _u.requestId) {
1007
+ if ((_v = error.meta) === null || _v === void 0 ? void 0 : _v.requestId) {
948
1008
  errorDetails["meta"] = {
949
- requestId: (_v = error.meta) === null || _v === void 0 ? void 0 : _v.requestId
1009
+ requestId: (_w = error.meta) === null || _w === void 0 ? void 0 : _w.requestId
950
1010
  };
951
1011
  }
952
1012
  if (logErrorToSystem) {
953
- (_w = api.log) === null || _w === void 0 ? void 0 : _w.call(api, "error", JSON.stringify(errorDetailsBase));
1013
+ (_x = api.log) === null || _x === void 0 ? void 0 : _x.call(api, "error", JSON.stringify(errorDetailsBase));
954
1014
  }
955
- (_x = api.logDebugError) === null || _x === void 0 ? void 0 : _x.call(api, errorDetailsBase, "UI__DEBUG_MODE__LLM_PROMPT__ERROR");
1015
+ (_y = api.logDebugError) === null || _y === void 0 ? void 0 : _y.call(api, errorDetailsBase, "UI__DEBUG_MODE__LLM_PROMPT__ERROR");
956
1016
  yield handleServiceError(errorDetails);
957
1017
  return;
958
1018
  }
@@ -160,6 +160,13 @@ export const LLM_PROMPT_MCP_TOOL = createNodeDescriptor({
160
160
  value: "blacklist",
161
161
  },
162
162
  },
163
+ {
164
+ key: "mcpHeaders",
165
+ type: "keyValuePairs",
166
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_MCP_TOOL__FIELDS__HEADERS__LABEL",
167
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_MCP_TOOL__FIELDS__HEADERS__DESCRIPTION",
168
+ defaultValue: "{}",
169
+ },
163
170
  ],
164
171
  sections: [
165
172
  {
@@ -172,7 +179,7 @@ export const LLM_PROMPT_MCP_TOOL = createNodeDescriptor({
172
179
  key: "advanced",
173
180
  label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__SECTIONS__ADVANCED__LABEL",
174
181
  defaultCollapsed: true,
175
- fields: ["cacheTools", "condition", "toolFilter", "whitelist", "blacklist"],
182
+ fields: ["cacheTools", "condition", "toolFilter", "whitelist", "blacklist", "mcpHeaders"],
176
183
  },
177
184
  ],
178
185
  form: [
@@ -56,12 +56,14 @@ export const organisationDataSchema = {
56
56
  stepEventsTTLInMinutes: { type: "integer", minimum: 0 },
57
57
  disabled: { type: "boolean" },
58
58
  name: { type: "string", minLength: 1, format: "resource-name" },
59
+ businessUnitId: { type: "string" },
59
60
  passwordPolicy: organisationPasswordPolicySchema,
60
61
  quotaMaxChannelsPerProject: { type: "integer", minimum: 0 },
61
62
  quotaMaxMessagesPerDay: { type: "integer", minimum: 0 },
62
63
  quotaMaxProjects: { type: "integer", minimum: 0 },
63
64
  quotaMaxUsers: { type: "integer", minimum: 0 },
64
65
  quotaMaxKnowledgeChunks: { type: "integer", minimum: 0 },
66
+ aiOpsCenterEnabled: { type: "boolean" },
65
67
  sessionStateTTLInMinutes: { type: "integer", minimum: 0 },
66
68
  billingTimezone: { type: "string", format: "timezone" },
67
69
  dataPrivacySettings: organisationDataPrivacySettingsSchema,
@@ -13,6 +13,13 @@
13
13
  * - willExpireSoon
14
14
  * - expiredRenewRequired
15
15
  * - expired
16
+ * systemCapabilities:
17
+ * type: object
18
+ * properties:
19
+ * aiOpsCenterEnabled:
20
+ * type: boolean
21
+ * quotaMaxKnowledgeChunks:
22
+ * type: number
16
23
  */
17
24
  export const licenseStates = [
18
25
  "invalid",
@@ -22,11 +29,12 @@ export const licenseStates = [
22
29
  "expiredRenewRequired",
23
30
  "expired" /* The license is expired. The system can no longer be used (ui, api) */
24
31
  ];
25
- export const licenseCapabilitiesFromKey = ["ui", "handover", "liveAgent", "knowledge"];
32
+ export const licenseCapabilitiesFromKey = ["ui", "handover", "liveAgent", "knowledge", "aiOpsCenter"];
26
33
  export const licenseCapabilitiesMapping = {
27
34
  ui: "uiLicensed",
28
35
  handover: "handoverLicensed",
29
36
  liveAgent: "liveAgentLicensed",
30
37
  knowledge: "knowledgeLicensed",
38
+ aiOpsCenter: "aiOpsCenterLicensed"
31
39
  };
32
40
  //# sourceMappingURL=license.js.map
@@ -107,7 +107,8 @@ export const actionTypes = [
107
107
  "processKnowledgeSourceFile",
108
108
  "setupObservationConfig",
109
109
  "updateObservationConfig",
110
- "resolveAiOpsCenterError"
110
+ "resolveAiOpsCenterError",
111
+ "odataRequest",
111
112
  ];
112
113
  export const auditEventSchema = {
113
114
  title: "auditEventSchema",
@@ -1,7 +1,7 @@
1
1
  /* Interfaces & Types */
2
2
  import { entityMetaSchema } from "../IEntityMeta";
3
3
  export const knowledgeSourceStatus = ["ready", "ingesting", "disabled", "failure"];
4
- export const knowledgeSourceType = ["pdf", "txt", "docx", "pptx", "ctxt", "url", "manual", "jpeg", "jpg", "png", "bmp", "heif", "tiff"];
4
+ export const knowledgeSourceType = ["pdf", "txt", "docx", "pptx", "ctxt", "url", "manual", "jpeg", "jpg", "png", "bmp", "heif", "tiff", "extension"];
5
5
  export const knowledgeSourceDataSchema = {
6
6
  title: "knowledgeSourceDataSchema",
7
7
  type: "object",
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=ITailLogEntriesRest_2_0.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cognigy/rest-api-client",
3
- "version": "2025.19.0",
3
+ "version": "2025.21.0",
4
4
  "description": "Cognigy REST-Client",
5
5
  "main": "build/index.js",
6
6
  "module": "dist/esm/index.js",
package/types/index.d.ts CHANGED
@@ -10107,6 +10107,7 @@ export interface ISessionStateWithoutMeta {
10107
10107
  };
10108
10108
  mcpServerUrl?: string;
10109
10109
  mcpToolNode?: string;
10110
+ mcpHeaders?: Record<string, string>;
10110
10111
  timeout?: number;
10111
10112
  };
10112
10113
  tokenUsage?: TSessionUsageInformation;
@@ -10397,6 +10398,21 @@ export interface IRunGenerativeAIPromptOptions {
10397
10398
  * Option to prevent the replacing of \n with " " in streamed outputs
10398
10399
  */
10399
10400
  preventNewLineRemoval?: boolean;
10401
+ /**
10402
+ * Optional logging configuration
10403
+ */
10404
+ logging?: {
10405
+ /** Webhook URL to receive request/response payloads */
10406
+ webhookUrl?: string;
10407
+ /** Custom data to be sent alongside the logging payloads */
10408
+ customData?: unknown;
10409
+ /**
10410
+ * Optional headers to be sent with the webhook request
10411
+ */
10412
+ headers?: Record<string, unknown> | JSON;
10413
+ /** Any additional fields are forwarded into logging meta */
10414
+ [key: string]: unknown;
10415
+ };
10400
10416
  }
10401
10417
  export interface IAzureOpenAIProviderFieldsV2 {
10402
10418
  apiKey: string;
@@ -11431,6 +11447,7 @@ export interface INodeExecutionAPI extends Omit<IActions, "parseCognigyScriptCon
11431
11447
  mcpServerUrl: string;
11432
11448
  timeout: number;
11433
11449
  cacheTools: boolean;
11450
+ mcpHeaders?: Record<string, string>;
11434
11451
  }) => Promise<any[]>;
11435
11452
  executeMcpTool: (params: {
11436
11453
  toolName: string;
@@ -11439,6 +11456,7 @@ export interface INodeExecutionAPI extends Omit<IActions, "parseCognigyScriptCon
11439
11456
  };
11440
11457
  mcpServerUrl: string;
11441
11458
  timeout: number;
11459
+ mcpHeaders?: Record<string, string>;
11442
11460
  }) => Promise<{
11443
11461
  [x: string]: unknown;
11444
11462
  }>;
@@ -18286,7 +18304,8 @@ declare const knowledgeSourceType: readonly [
18286
18304
  "png",
18287
18305
  "bmp",
18288
18306
  "heif",
18289
- "tiff"
18307
+ "tiff",
18308
+ "extension"
18290
18309
  ];
18291
18310
  export declare type TKnowledgeSourceType = typeof knowledgeSourceType[number];
18292
18311
  export interface IKnowledgeSourceMetaData {
@@ -20239,6 +20258,8 @@ export interface IIndexLogEntriesRestData_2_0 extends IRestPagination<ILogEntryI
20239
20258
  type?: Array<TLogLevel>;
20240
20259
  userId?: string;
20241
20260
  flowName?: string;
20261
+ startDate?: string;
20262
+ endDate?: string;
20242
20263
  }
20243
20264
  export interface IIndexLogEntriesRestReturnValue_2_0 extends ICursorBasedPaginationReturnValue<ILogEntryIndexItem_2_0> {
20244
20265
  }
@@ -20279,6 +20300,19 @@ export interface IReadLogEntryRestData_2_0 extends IReadLogEntryRestDataParams_2
20279
20300
  }
20280
20301
  export interface IReadLogEntryRestReturnValue_2_0 extends ILogEntry_2_0 {
20281
20302
  }
20303
+ export interface ITailLogEntriesRestDataParams_2_0 extends IProjectScope {
20304
+ }
20305
+ export interface ITailLogEntriesRestData_2_0 extends IRestPagination<ILogEntryIndexItem_2_0>, ITailLogEntriesRestDataParams_2_0 {
20306
+ type?: TLogLevel[];
20307
+ userId?: string;
20308
+ flowName?: string;
20309
+ }
20310
+ export interface ITailLogEntriesRestReturnValue_2_0 {
20311
+ items: ILogEntryIndexItem_2_0[];
20312
+ total: number;
20313
+ nextCursor: string | null;
20314
+ previousCursor: string | null;
20315
+ }
20282
20316
  /**
20283
20317
  * @openapi
20284
20318
  *
@@ -20926,6 +20960,7 @@ export interface MetricsAPIGroup_2_0 {
20926
20960
  removeProfileData: TRestAPIOperation<IRemoveProfileDataRestData_2_0, IRemoveProfileDataRestReturnValue_2_0>;
20927
20961
  removeContactIdFromProfile: TRestAPIOperation<IRemoveContactIdFromProfileRestData_2_0, IRemoveContactIdFromProfileRestReturnValue_2_0>;
20928
20962
  indexLogEntries: TRestAPIOperation<IIndexLogEntriesRestData_2_0, IIndexLogEntriesRestReturnValue_2_0>;
20963
+ tailLogEntries: TRestAPIOperation<ITailLogEntriesRestData_2_0, ITailLogEntriesRestReturnValue_2_0>;
20929
20964
  readLogEntry: TRestAPIOperation<IReadLogEntryRestData_2_0, IReadLogEntryRestReturnValue_2_0>;
20930
20965
  indexTrainerRecords: TRestAPIOperation<IIndexTrainerRecordsRestData_2_0, IIndexTrainerRecordsRestReturnValue_2_0>;
20931
20966
  readTrainerRecord: TRestAPIOperation<IReadTrainerRecordRestData_2_0, IReadTrainerRecordRestReturnValue_2_0>;
@@ -21336,7 +21371,8 @@ declare const actionTypes: readonly [
21336
21371
  "processKnowledgeSourceFile",
21337
21372
  "setupObservationConfig",
21338
21373
  "updateObservationConfig",
21339
- "resolveAiOpsCenterError"
21374
+ "resolveAiOpsCenterError",
21375
+ "odataRequest"
21340
21376
  ];
21341
21377
  export declare type TActionType = typeof actionTypes[number];
21342
21378
  /**
@@ -21568,6 +21604,10 @@ declare const licenseStates: readonly [
21568
21604
  export declare type TLicenseState = typeof licenseStates[number];
21569
21605
  export interface IGetSystemLicenseStateRestReturnValue_2_0 {
21570
21606
  state: TLicenseState;
21607
+ systemCapabilities?: {
21608
+ aiOpsCenterEnabled?: boolean;
21609
+ quotaMaxKnowledgeChunks?: number;
21610
+ };
21571
21611
  }
21572
21612
  export interface ISetSystemLicenseRestDataBody_2_0 {
21573
21613
  email: string;
@@ -22294,6 +22334,11 @@ export interface ISetupVoiceGatewayRestData_2_0 extends ISetupVoiceGatewayRestDa
22294
22334
  * $ref: '#/components/schemas/TTimezone'
22295
22335
  * dataPrivacySettings:
22296
22336
  * $ref: '#/components/schemas/IOrganisationDataPrivacySettings_2_0'
22337
+ * aiOpsCenterEnabled:
22338
+ * type: boolean
22339
+ * businessUnitId:
22340
+ * type: string
22341
+ * description: The business unit ID
22297
22342
  * IOrganisation_2_0:
22298
22343
  * allOf:
22299
22344
  * - $ref: '#/components/schemas/IOrganisationData_2_0'
@@ -22315,6 +22360,8 @@ export interface IOrganisation_2_0 {
22315
22360
  _id: TMongoId;
22316
22361
  /** The name of this organisation, e.g. 'cognigy' */
22317
22362
  name: string;
22363
+ /** The business unit ID */
22364
+ businessUnitId?: string;
22318
22365
  /** Flag whether this organisation is currently disabled */
22319
22366
  disabled: boolean;
22320
22367
  /** Optional quota information */
@@ -22331,6 +22378,7 @@ export interface IOrganisation_2_0 {
22331
22378
  stepEventsTTLInMinutes: number;
22332
22379
  billingTimezone: string;
22333
22380
  dataPrivacySettings: IOrganisationDataPrivacySettings_2_0;
22381
+ aiOpsCenterEnabled: boolean;
22334
22382
  }
22335
22383
  /**
22336
22384
  * @openapi
@@ -22411,6 +22459,9 @@ export interface IOrganisationDataPrivacySettings_2_0 {
22411
22459
  * properties:
22412
22460
  * _id:
22413
22461
  * $ref: '#/components/schemas/TMongoId'
22462
+ * businessUnitId:
22463
+ * type: string
22464
+ * description: The business unit ID
22414
22465
  * name:
22415
22466
  * type: string
22416
22467
  * description: The name of this organisation
@@ -22441,12 +22492,16 @@ export interface IOrganisationDataPrivacySettings_2_0 {
22441
22492
  * $ref: '#/components/schemas/TTimezone'
22442
22493
  * dataPrivacySettings:
22443
22494
  * $ref: '#/components/schemas/IOrganisationDataPrivacySettings_2_0'
22495
+ * aiOpsCenterEnabled:
22496
+ * type: boolean
22444
22497
  */
22445
22498
  export interface IOrganisationIndexItem_2_0 {
22446
22499
  /** The mongo object id of this organisation */
22447
22500
  _id: TMongoId;
22448
22501
  /** The name of this organisation, e.g. 'cognigy' */
22449
22502
  name: string;
22503
+ /** The business unit ID */
22504
+ businessUnitId?: string;
22450
22505
  /** Flag whether this organisation is currently disabled */
22451
22506
  disabled: boolean;
22452
22507
  /** Optional quota information */
@@ -22462,6 +22517,7 @@ export interface IOrganisationIndexItem_2_0 {
22462
22517
  conversationTTLInMinutes: number;
22463
22518
  liveAgentAccount: number;
22464
22519
  dataPrivacySettings: IOrganisationDataPrivacySettings_2_0;
22520
+ aiOpsCenterEnabled: boolean;
22465
22521
  }
22466
22522
  export interface IIndexOrganisationsRestData_2_0 extends IRestPagination<IOrganisationIndexItem_2_0> {
22467
22523
  }