@cognigy/rest-api-client 2025.18.1 → 2025.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/build/apigroups/InsightsAPIGroup_2_0.js +10 -10
- package/build/apigroups/MetricsAPIGroup_2_0.js +4 -0
- package/build/authentication/OAuth2/OAuth2Authentication.js +2 -0
- package/build/connector/AxiosAdapter.js +4 -1
- package/build/shared/charts/descriptors/nlu/fuzzySearch.js +6 -6
- package/build/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +81 -21
- package/build/shared/charts/descriptors/service/aiAgent/aiAgentJobCallMCPTool.js +7 -5
- package/build/shared/charts/descriptors/service/aiAgent/aiAgentJobMCPTool.js +8 -1
- package/build/shared/charts/descriptors/service/aiAgent/helpers/createToolDefinitions.js +2 -0
- package/build/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +78 -18
- package/build/shared/charts/descriptors/service/llmPrompt/llmPromptMCPTool.js +8 -1
- package/build/shared/charts/descriptors/voice/mappers/transfer.mapper.js +20 -6
- package/build/shared/charts/descriptors/voicegateway2/nodes/transfer.js +39 -3
- package/build/shared/interfaces/IOrganisation.js +1 -0
- package/build/shared/interfaces/handover.js +3 -1
- package/build/shared/interfaces/messageAPI/endpoints.js +2 -0
- package/build/shared/interfaces/resources/IAuditEvent.js +2 -1
- package/build/shared/interfaces/resources/knowledgeStore/IKnowledgeSource.js +1 -1
- package/build/shared/interfaces/restAPI/metrics/logs/v2.0/ITailLogEntriesRest_2_0.js +3 -0
- package/dist/esm/apigroups/InsightsAPIGroup_2_0.js +10 -10
- package/dist/esm/apigroups/MetricsAPIGroup_2_0.js +4 -0
- package/dist/esm/authentication/OAuth2/OAuth2Authentication.js +2 -0
- package/dist/esm/connector/AxiosAdapter.js +4 -1
- package/dist/esm/shared/charts/descriptors/nlu/fuzzySearch.js +6 -6
- package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +81 -21
- package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJobCallMCPTool.js +7 -5
- package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJobMCPTool.js +8 -1
- package/dist/esm/shared/charts/descriptors/service/aiAgent/helpers/createToolDefinitions.js +2 -0
- package/dist/esm/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +85 -25
- package/dist/esm/shared/charts/descriptors/service/llmPrompt/llmPromptMCPTool.js +8 -1
- package/dist/esm/shared/charts/descriptors/voice/mappers/transfer.mapper.js +20 -6
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/transfer.js +39 -3
- package/dist/esm/shared/interfaces/IOrganisation.js +1 -0
- package/dist/esm/shared/interfaces/handover.js +3 -1
- package/dist/esm/shared/interfaces/messageAPI/endpoints.js +2 -0
- package/dist/esm/shared/interfaces/resources/IAuditEvent.js +2 -1
- package/dist/esm/shared/interfaces/resources/knowledgeStore/IKnowledgeSource.js +1 -1
- package/dist/esm/shared/interfaces/restAPI/metrics/logs/v2.0/ITailLogEntriesRest_2_0.js +2 -0
- package/package.json +1 -1
- package/types/index.d.ts +873 -816
- package/build/test.js +0 -39
- package/dist/esm/shared/interfaces/restAPI/management/authentication/ICreateJWTToken.js +0 -1
- package/dist/esm/test.js +0 -39
|
@@ -408,6 +408,57 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
408
408
|
description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__INCLUDE_ALL_OUTPUT_TYPES__DESCRIPTION",
|
|
409
409
|
defaultValue: true,
|
|
410
410
|
},
|
|
411
|
+
{
|
|
412
|
+
key: "advancedLogging",
|
|
413
|
+
label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__ADVANCED_LOGGING__LABEL",
|
|
414
|
+
type: "toggle",
|
|
415
|
+
description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__ADVANCED_LOGGING__DESCRIPTION",
|
|
416
|
+
defaultValue: false,
|
|
417
|
+
},
|
|
418
|
+
{
|
|
419
|
+
key: "loggingWebhookUrl",
|
|
420
|
+
label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_WEBHOOK_URL__LABEL",
|
|
421
|
+
type: "cognigyText",
|
|
422
|
+
description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_WEBHOOK_URL__DESCRIPTION",
|
|
423
|
+
defaultValue: "",
|
|
424
|
+
condition: {
|
|
425
|
+
key: "advancedLogging",
|
|
426
|
+
value: true
|
|
427
|
+
}
|
|
428
|
+
},
|
|
429
|
+
{
|
|
430
|
+
key: "loggingCustomData",
|
|
431
|
+
label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CUSTOM_LOGGING_DATA__LABEL",
|
|
432
|
+
type: "cognigyText",
|
|
433
|
+
description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CUSTOM_LOGGING_DATA__DESCRIPTION",
|
|
434
|
+
defaultValue: "",
|
|
435
|
+
condition: {
|
|
436
|
+
key: "advancedLogging",
|
|
437
|
+
value: true
|
|
438
|
+
}
|
|
439
|
+
},
|
|
440
|
+
{
|
|
441
|
+
key: "loggingHeaders",
|
|
442
|
+
type: "keyValuePairs",
|
|
443
|
+
label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_HEADERS__LABEL",
|
|
444
|
+
description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__LOGGING_HEADERS__DESCRIPTION",
|
|
445
|
+
defaultValue: "{}",
|
|
446
|
+
condition: {
|
|
447
|
+
key: "advancedLogging",
|
|
448
|
+
value: true
|
|
449
|
+
}
|
|
450
|
+
},
|
|
451
|
+
{
|
|
452
|
+
key: "conditionForLogging",
|
|
453
|
+
label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CONDITION_FOR_LOGGING__LABEL",
|
|
454
|
+
type: "cognigyText",
|
|
455
|
+
description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__FIELDS__CONDITION_FOR_LOGGING__DESCRIPTION",
|
|
456
|
+
defaultValue: "",
|
|
457
|
+
condition: {
|
|
458
|
+
key: "advancedLogging",
|
|
459
|
+
value: true
|
|
460
|
+
}
|
|
461
|
+
},
|
|
411
462
|
{
|
|
412
463
|
key: "customModelOptions",
|
|
413
464
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__CUSTOM_MODEL_OPTIONS__LABEL",
|
|
@@ -608,7 +659,12 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
608
659
|
"debugLogTokenCount",
|
|
609
660
|
"debugLogRequestAndCompletion",
|
|
610
661
|
"debugLogLLMLatency",
|
|
611
|
-
"debugLogToolDefinitions"
|
|
662
|
+
"debugLogToolDefinitions",
|
|
663
|
+
"advancedLogging",
|
|
664
|
+
"loggingWebhookUrl",
|
|
665
|
+
"loggingCustomData",
|
|
666
|
+
"conditionForLogging",
|
|
667
|
+
"loggingHeaders",
|
|
612
668
|
]
|
|
613
669
|
},
|
|
614
670
|
{
|
|
@@ -646,10 +702,10 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
646
702
|
},
|
|
647
703
|
tags: ["ai", "llm", "gpt", "generative ai", "openai", "azure", "prompt"],
|
|
648
704
|
function: async ({ cognigy, config, childConfigs, nodeId }) => {
|
|
649
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
|
|
705
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y;
|
|
650
706
|
const { api, input, flowReferenceId } = cognigy;
|
|
651
707
|
const { temperature, maxTokens, topP, presencePenalty, frequencyPenalty, useStop, stop, storeLocation, contextKey, inputKey, timeout, streamStopTokens, streamStopTokenOverrides, debugLogTokenCount, debugLogRequestAndCompletion, debugLogLLMLatency, debugLogToolDefinitions, llmProviderReferenceId, usePromptMode, chatTranscriptSteps, responseFormat, streamStoreCopyInInput, seed, immediateOutput, customModelOptions, customRequestOptions, errorHandling = "continue", // default behavior for LLM Prompt node was, continue its execution even though an error occurred (deviating it from the SEO node) & do not output an error message on UI explicitly. However, error is always stored in the input or context object. We can use an extra "say" node to output it.
|
|
652
|
-
errorHandlingGotoTarget, errorMessage, useTextAlternativeForLLM, logErrorToSystem, processImages, transcriptImageHandling, toolChoice, useStrict } = config;
|
|
708
|
+
errorHandlingGotoTarget, errorMessage, useTextAlternativeForLLM, advancedLogging, loggingWebhookUrl, loggingCustomData, loggingHeaders, conditionForLogging, logErrorToSystem, processImages, transcriptImageHandling, toolChoice, useStrict } = config;
|
|
653
709
|
let prompt = config.prompt || "";
|
|
654
710
|
const { traceId } = input;
|
|
655
711
|
// check if custom variables are used and if they have a length modifier
|
|
@@ -743,10 +799,11 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
743
799
|
* Retrieve the tool definitions from the child nodes
|
|
744
800
|
*/
|
|
745
801
|
const { toolIds, toolNames, toolMap, tools } = await (0, createToolDefinitions_1.createToolDefinitions)(childConfigs, api, useStrict);
|
|
802
|
+
const enableAdvancedLogging = advancedLogging && loggingWebhookUrl && (conditionForLogging === "" || !!conditionForLogging);
|
|
746
803
|
/**
|
|
747
804
|
* Generate Prompt Options
|
|
748
805
|
*/
|
|
749
|
-
const llmPromptOptions = Object.assign(Object.assign(Object.assign({ prompt,
|
|
806
|
+
const llmPromptOptions = Object.assign(Object.assign(Object.assign(Object.assign({ prompt,
|
|
750
807
|
temperature,
|
|
751
808
|
maxTokens,
|
|
752
809
|
topP,
|
|
@@ -769,7 +826,9 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
769
826
|
}
|
|
770
827
|
}, streamStopTokens: streamStopTokens || [".", "!", "?", "\\n"], streamStopTokenOverrides, preventNewLineRemoval: isStreamingChannel ? true : false,
|
|
771
828
|
// set to true in order to get token usage
|
|
772
|
-
detailedResults: true, seed: Number(seed) ? Number(seed) : undefined }, (tools.length > 0 && { tools })), (tools.length > 0 && { toolChoice: toolChoice })), {
|
|
829
|
+
detailedResults: true, seed: Number(seed) ? Number(seed) : undefined }, (tools.length > 0 && { tools })), (tools.length > 0 && { toolChoice: toolChoice })), (enableAdvancedLogging && {
|
|
830
|
+
logging: Object.assign(Object.assign({ webhookUrl: loggingWebhookUrl }, (loggingCustomData && { customData: loggingCustomData })), (loggingHeaders && { headers: loggingHeaders }))
|
|
831
|
+
})), { customModelOptions,
|
|
773
832
|
customRequestOptions });
|
|
774
833
|
if (useStop) {
|
|
775
834
|
llmPromptOptions["stop"] = stop;
|
|
@@ -861,19 +920,20 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
861
920
|
node: nodeId,
|
|
862
921
|
} }, (isMcpToolCall && {
|
|
863
922
|
mcpServerUrl: (_h = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _h === void 0 ? void 0 : _h.mcpServerUrl,
|
|
864
|
-
|
|
923
|
+
mcpHeaders: (_j = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _j === void 0 ? void 0 : _j.mcpHeaders,
|
|
924
|
+
timeout: (_k = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _k === void 0 ? void 0 : _k.timeout,
|
|
865
925
|
mcpToolNode: toolChild === null || toolChild === void 0 ? void 0 : toolChild.id,
|
|
866
926
|
})), { toolCall: mainToolCall }),
|
|
867
927
|
});
|
|
868
928
|
// if there are any parameters/arguments, add them to the input slots
|
|
869
929
|
if (mainToolCall.function.arguments) {
|
|
870
|
-
input.llmPrompt = Object.assign(Object.assign({}, input.llmPrompt), { toolArgs: Object.assign(Object.assign({}, (
|
|
930
|
+
input.llmPrompt = Object.assign(Object.assign({}, input.llmPrompt), { toolArgs: Object.assign(Object.assign({}, (_m = (_l = input.llmPrompt) === null || _l === void 0 ? void 0 : _l.toolArgs) !== null && _m !== void 0 ? _m : {}), mainToolCall.function.arguments) });
|
|
871
931
|
}
|
|
872
932
|
// Debug Message for Tool Calls, configured in the Tool Node
|
|
873
|
-
if ((
|
|
933
|
+
if ((_o = toolChild === null || toolChild === void 0 ? void 0 : toolChild.config) === null || _o === void 0 ? void 0 : _o.debugMessage) {
|
|
874
934
|
const messageLines = [`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER:</b> ${await api.parseCognigyScriptText(toolChild.config.toolId)}`];
|
|
875
935
|
// Arguments / Parameters Slots
|
|
876
|
-
const slots = ((
|
|
936
|
+
const slots = ((_p = mainToolCall === null || mainToolCall === void 0 ? void 0 : mainToolCall.function) === null || _p === void 0 ? void 0 : _p.arguments) && Object.keys(mainToolCall.function.arguments);
|
|
877
937
|
const hasSlots = slots && slots.length > 0;
|
|
878
938
|
messageLines.push(`<b>UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__SLOTS</b>${hasSlots ? "" : " -"}`);
|
|
879
939
|
if (hasSlots) {
|
|
@@ -888,7 +948,7 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
888
948
|
messageLines.push(`- ${slot}: ${slotValueAsString}`);
|
|
889
949
|
});
|
|
890
950
|
}
|
|
891
|
-
(
|
|
951
|
+
(_q = api.logDebugMessage) === null || _q === void 0 ? void 0 : _q.call(api, messageLines.join("\n"), "UI__DEBUG_MODE__AI_AGENT_JOB__TOOL_CALL__DEBUG_MESSAGE__HEADER");
|
|
892
952
|
}
|
|
893
953
|
if (toolChild) {
|
|
894
954
|
api.setNextNode(toolChild.id);
|
|
@@ -913,11 +973,11 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
913
973
|
// we stringify objects (e.g. results coming from JSON Mode)
|
|
914
974
|
// so that the transcript only contains text
|
|
915
975
|
const resultToOutput = typeof ((llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult) === "object" ? JSON.stringify((llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult, undefined, 2) : (llmResult === null || llmResult === void 0 ? void 0 : llmResult.result) || llmResult;
|
|
916
|
-
await ((
|
|
976
|
+
await ((_r = api.output) === null || _r === void 0 ? void 0 : _r.call(api, resultToOutput, {}));
|
|
917
977
|
}
|
|
918
978
|
else if (llmResult.finishReason && llmPromptOptions.stream) {
|
|
919
979
|
// send the finishReason as last output for a stream
|
|
920
|
-
(
|
|
980
|
+
(_s = api.output) === null || _s === void 0 ? void 0 : _s.call(api, "", {
|
|
921
981
|
_cognigy: {
|
|
922
982
|
_preventTranscript: true,
|
|
923
983
|
_messageId,
|
|
@@ -940,7 +1000,7 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
940
1000
|
}
|
|
941
1001
|
// Add response to Cognigy Input/Context for further usage
|
|
942
1002
|
if (storeLocation === "context") {
|
|
943
|
-
(
|
|
1003
|
+
(_t = api.addToContext) === null || _t === void 0 ? void 0 : _t.call(api, contextKey, llmResult, "simple");
|
|
944
1004
|
}
|
|
945
1005
|
else if (storeLocation === "input") {
|
|
946
1006
|
api.addToInput(inputKey, llmResult);
|
|
@@ -953,19 +1013,19 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
953
1013
|
const errorDetailsBase = {
|
|
954
1014
|
name: error === null || error === void 0 ? void 0 : error.name,
|
|
955
1015
|
code: (error === null || error === void 0 ? void 0 : error.code) || (error === null || error === void 0 ? void 0 : error.httpStatusCode),
|
|
956
|
-
message: (error === null || error === void 0 ? void 0 : error.message) || ((
|
|
1016
|
+
message: (error === null || error === void 0 ? void 0 : error.message) || ((_u = error.originalErrorDetails) === null || _u === void 0 ? void 0 : _u.message),
|
|
957
1017
|
};
|
|
958
1018
|
const errorDetails = Object.assign(Object.assign({}, errorDetailsBase), { originalErrorDetails: error === null || error === void 0 ? void 0 : error.originalErrorDetails });
|
|
959
1019
|
// return the requestId if it exist in the error obj.
|
|
960
|
-
if ((
|
|
1020
|
+
if ((_v = error.meta) === null || _v === void 0 ? void 0 : _v.requestId) {
|
|
961
1021
|
errorDetails["meta"] = {
|
|
962
|
-
requestId: (
|
|
1022
|
+
requestId: (_w = error.meta) === null || _w === void 0 ? void 0 : _w.requestId
|
|
963
1023
|
};
|
|
964
1024
|
}
|
|
965
1025
|
if (logErrorToSystem) {
|
|
966
|
-
(
|
|
1026
|
+
(_x = api.log) === null || _x === void 0 ? void 0 : _x.call(api, "error", JSON.stringify(errorDetailsBase));
|
|
967
1027
|
}
|
|
968
|
-
(
|
|
1028
|
+
(_y = api.logDebugError) === null || _y === void 0 ? void 0 : _y.call(api, errorDetailsBase, "UI__DEBUG_MODE__LLM_PROMPT__ERROR");
|
|
969
1029
|
await handleServiceError(errorDetails);
|
|
970
1030
|
return;
|
|
971
1031
|
}
|
|
@@ -163,6 +163,13 @@ exports.LLM_PROMPT_MCP_TOOL = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
163
163
|
value: "blacklist",
|
|
164
164
|
},
|
|
165
165
|
},
|
|
166
|
+
{
|
|
167
|
+
key: "mcpHeaders",
|
|
168
|
+
type: "keyValuePairs",
|
|
169
|
+
label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_MCP_TOOL__FIELDS__HEADERS__LABEL",
|
|
170
|
+
description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_MCP_TOOL__FIELDS__HEADERS__DESCRIPTION",
|
|
171
|
+
defaultValue: "{}",
|
|
172
|
+
},
|
|
166
173
|
],
|
|
167
174
|
sections: [
|
|
168
175
|
{
|
|
@@ -175,7 +182,7 @@ exports.LLM_PROMPT_MCP_TOOL = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
175
182
|
key: "advanced",
|
|
176
183
|
label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_JOB__SECTIONS__ADVANCED__LABEL",
|
|
177
184
|
defaultCollapsed: true,
|
|
178
|
-
fields: ["cacheTools", "condition", "toolFilter", "whitelist", "blacklist"],
|
|
185
|
+
fields: ["cacheTools", "condition", "toolFilter", "whitelist", "blacklist", "mcpHeaders"],
|
|
179
186
|
},
|
|
180
187
|
],
|
|
181
188
|
form: [
|
|
@@ -5,7 +5,7 @@ exports.prepareTransferParams = exports.transfer = void 0;
|
|
|
5
5
|
const helper_1 = require("../../../descriptors/voicegateway2/utils/helper");
|
|
6
6
|
const helper_2 = require("../utils/helper");
|
|
7
7
|
exports.transfer = {
|
|
8
|
-
handleInput(endpointType, params, isGenericNode = false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia) {
|
|
8
|
+
handleInput(endpointType, params, isGenericNode = false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, mediaPath, anchorMedia) {
|
|
9
9
|
try {
|
|
10
10
|
switch (endpointType) {
|
|
11
11
|
case "bandwidth":
|
|
@@ -24,14 +24,14 @@ exports.transfer = {
|
|
|
24
24
|
return this.handleAudioCodesInput((0, exports.prepareTransferParams)(params), endpointType);
|
|
25
25
|
case "voiceGateway2":
|
|
26
26
|
default:
|
|
27
|
-
return this.handleVGInput((0, exports.prepareTransferParams)(params), recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia);
|
|
27
|
+
return this.handleVGInput((0, exports.prepareTransferParams)(params), recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, mediaPath, anchorMedia);
|
|
28
28
|
}
|
|
29
29
|
}
|
|
30
30
|
catch (error) {
|
|
31
31
|
throw Error(error.message);
|
|
32
32
|
}
|
|
33
33
|
},
|
|
34
|
-
handleVGInput(transferParam, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia) {
|
|
34
|
+
handleVGInput(transferParam, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, mediaPath, anchorMedia) {
|
|
35
35
|
const { transferType, transferTarget, transferReason, referredBy, useTransferSipHeaders, transferSipHeaders, dialMusic, dialTranscriptionWebhook, dialCallerId, amdEnabled, amdRedirectOnMachineDetected, amdRedirectText, dialTimeout, timeLimit, sttLabel } = transferParam;
|
|
36
36
|
const payload = {
|
|
37
37
|
_voiceGateway2: {
|
|
@@ -55,6 +55,19 @@ exports.transfer = {
|
|
|
55
55
|
if (timeLimit && timeLimit > 0) {
|
|
56
56
|
dialVerb.timeLimit = timeLimit;
|
|
57
57
|
}
|
|
58
|
+
if (process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "true") {
|
|
59
|
+
if (typeof anchorMedia === "boolean") {
|
|
60
|
+
dialVerb.anchorMedia = anchorMedia;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
if (mediaPath) {
|
|
65
|
+
dialVerb.mediaPath = mediaPath;
|
|
66
|
+
}
|
|
67
|
+
else if (typeof anchorMedia === "boolean") {
|
|
68
|
+
dialVerb.mediaPath = anchorMedia ? "fullMedia" : "partialMedia";
|
|
69
|
+
}
|
|
70
|
+
}
|
|
58
71
|
if (amdEnabled) {
|
|
59
72
|
dialVerb.amd = {
|
|
60
73
|
actionHook: "amd"
|
|
@@ -75,6 +88,9 @@ exports.transfer = {
|
|
|
75
88
|
};
|
|
76
89
|
/* By default we set the target to phone */
|
|
77
90
|
dialVerb.target = [phoneTarget];
|
|
91
|
+
if (process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "true") {
|
|
92
|
+
delete dialVerb.mediaPath;
|
|
93
|
+
}
|
|
78
94
|
/* If targets includes an @ we set the target to sip */
|
|
79
95
|
if (transferTarget === null || transferTarget === void 0 ? void 0 : transferTarget.includes("@")) {
|
|
80
96
|
dialVerb.target = [sipTarget];
|
|
@@ -137,12 +153,10 @@ exports.transfer = {
|
|
|
137
153
|
}
|
|
138
154
|
dialVerb.callerId = dialCallerId;
|
|
139
155
|
}
|
|
140
|
-
if (anchorMedia) {
|
|
141
|
-
dialVerb.anchorMedia = anchorMedia;
|
|
142
|
-
}
|
|
143
156
|
payload._voiceGateway2.json["dial"] = dialVerb;
|
|
144
157
|
break;
|
|
145
158
|
case "refer":
|
|
159
|
+
mediaPath = undefined;
|
|
146
160
|
default:
|
|
147
161
|
const referVerb = {
|
|
148
162
|
"referTo": (0, helper_1.cleanTarget)(transferTarget, false),
|
|
@@ -81,6 +81,33 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
81
81
|
value: "dial"
|
|
82
82
|
}
|
|
83
83
|
},
|
|
84
|
+
{
|
|
85
|
+
key: "mediaPath",
|
|
86
|
+
label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__LABEL",
|
|
87
|
+
type: "select",
|
|
88
|
+
description: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__DESCRIPTION",
|
|
89
|
+
defaultValue: "partialMedia",
|
|
90
|
+
params: {
|
|
91
|
+
options: [
|
|
92
|
+
{
|
|
93
|
+
label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__OPTIONS__FULL_MEDIA__LABEL",
|
|
94
|
+
value: "fullMedia"
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__OPTIONS__PARTIAL_MEDIA__LABEL",
|
|
98
|
+
value: "partialMedia"
|
|
99
|
+
},
|
|
100
|
+
{
|
|
101
|
+
label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__OPTIONS__NO_MEDIA__LABEL",
|
|
102
|
+
value: "noMedia"
|
|
103
|
+
}
|
|
104
|
+
]
|
|
105
|
+
},
|
|
106
|
+
condition: {
|
|
107
|
+
key: "transferType",
|
|
108
|
+
value: "dial"
|
|
109
|
+
}
|
|
110
|
+
},
|
|
84
111
|
{
|
|
85
112
|
key: "useTransferSipHeaders",
|
|
86
113
|
label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__USE_TRANSFER_SIP_HEADERS__LABEL",
|
|
@@ -462,7 +489,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
462
489
|
defaultCollapsed: true,
|
|
463
490
|
fields: [
|
|
464
491
|
"referredBy",
|
|
465
|
-
"anchorMedia",
|
|
492
|
+
process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "true" ? "anchorMedia" : "mediaPath",
|
|
466
493
|
"useTransferSipHeaders",
|
|
467
494
|
"transferSipHeaders",
|
|
468
495
|
"enableAnsweringMachineDetection"
|
|
@@ -526,7 +553,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
526
553
|
summary: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__SUMMARY",
|
|
527
554
|
function: async ({ cognigy, config, }) => {
|
|
528
555
|
const { api, input } = cognigy;
|
|
529
|
-
const { transferType, transferTarget, referredBy,
|
|
556
|
+
const { transferType, transferTarget, referredBy, mediaPath, useTransferSipHeaders, transferSipHeaders = {}, transferReason, dialMusic, dialTranscriptionWebhook, dialCallerId, recognitionChannel, sttVendor, sttLanguage, sttDisablePunctuation, dialTimeout, enableTimeLimit, timeLimit, amdEnabled, amdRedirectOnMachineDetected, amdRedirectText, sttLabel, googleModel, sttDeepgramModel, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, agentAssistEnabled, agentAssistHeadersKey = customHeaderDefaultValue, anchorMedia } = config;
|
|
530
557
|
const transferParams = {
|
|
531
558
|
transferType,
|
|
532
559
|
transferReason,
|
|
@@ -544,6 +571,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
544
571
|
timeLimit,
|
|
545
572
|
sttLabel,
|
|
546
573
|
};
|
|
574
|
+
let media = mediaPath;
|
|
547
575
|
try {
|
|
548
576
|
if (input.channel === "adminconsole") {
|
|
549
577
|
let textWarningAdminChannel = "Transferring a call is not supported in the Interaction Panel, please use the VoiceGateway endpoint.";
|
|
@@ -556,6 +584,14 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
556
584
|
if (!enableTimeLimit) {
|
|
557
585
|
delete transferParams.timeLimit;
|
|
558
586
|
}
|
|
587
|
+
if (transferType === "dial" && typeof anchorMedia === "boolean" && (mediaPath === null || mediaPath === undefined) && process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "false") {
|
|
588
|
+
if (anchorMedia) {
|
|
589
|
+
media = "fullMedia";
|
|
590
|
+
}
|
|
591
|
+
else {
|
|
592
|
+
media = "partialMedia";
|
|
593
|
+
}
|
|
594
|
+
}
|
|
559
595
|
if (agentAssistEnabled && dialTranscriptionWebhook) {
|
|
560
596
|
try {
|
|
561
597
|
const agentAssistConfigId = api.getAgentAssistConfigId();
|
|
@@ -591,7 +627,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
591
627
|
transferParams.useTransferSipHeaders = false;
|
|
592
628
|
api.log("error", "Invalid JSON in Transfer SIP Headers");
|
|
593
629
|
}
|
|
594
|
-
const payload = transfer_mapper_1.transfer.handleInput("voiceGateway2", transferParams, false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia);
|
|
630
|
+
const payload = transfer_mapper_1.transfer.handleInput("voiceGateway2", transferParams, false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, media, anchorMedia);
|
|
595
631
|
await api.say(null, {
|
|
596
632
|
_cognigy: payload,
|
|
597
633
|
});
|
|
@@ -59,6 +59,7 @@ exports.organisationDataSchema = {
|
|
|
59
59
|
stepEventsTTLInMinutes: { type: "integer", minimum: 0 },
|
|
60
60
|
disabled: { type: "boolean" },
|
|
61
61
|
name: { type: "string", minLength: 1, format: "resource-name" },
|
|
62
|
+
businessUnitId: { type: "string" },
|
|
62
63
|
passwordPolicy: exports.organisationPasswordPolicySchema,
|
|
63
64
|
quotaMaxChannelsPerProject: { type: "integer", minimum: 0 },
|
|
64
65
|
quotaMaxMessagesPerDay: { type: "integer", minimum: 0 },
|
|
@@ -122,6 +122,7 @@ exports.handoverSettingsSchema = {
|
|
|
122
122
|
redactTranscriptTileMessages: { type: "boolean" },
|
|
123
123
|
enableAgentCopilotAuthentication: { type: "boolean" },
|
|
124
124
|
agentCopilotAuthentication: { type: "string" },
|
|
125
|
+
oAuth2Connection: { type: "string" },
|
|
125
126
|
}
|
|
126
127
|
}
|
|
127
128
|
}
|
|
@@ -154,7 +155,8 @@ exports.foreignSessionDataSchema = {
|
|
|
154
155
|
clientPollTimeout: { type: "number" },
|
|
155
156
|
accessToken: { type: "string" },
|
|
156
157
|
lastEventId: { type: "string" },
|
|
157
|
-
apiKey: { type: "string" }
|
|
158
|
+
apiKey: { type: "string" },
|
|
159
|
+
endHandoverOnParticipantChange: { type: "boolean" }
|
|
158
160
|
}
|
|
159
161
|
};
|
|
160
162
|
exports.rceSettingsSchema = {
|
|
@@ -41,6 +41,7 @@ exports.endpointTypes = [
|
|
|
41
41
|
"zoomContactCenter",
|
|
42
42
|
];
|
|
43
43
|
const transferTypes = ["dial", "sip:refer"];
|
|
44
|
+
const mediaPathTypes = ["fullMedia", "partialMedia", "noMedia"];
|
|
44
45
|
exports.webchatPersistentMenuSchema = {
|
|
45
46
|
title: "webchatPersistentMenuSchema",
|
|
46
47
|
type: "object",
|
|
@@ -154,6 +155,7 @@ const callFailoverSettingsSchema = {
|
|
|
154
155
|
deepgramEndpointing: { type: "boolean" },
|
|
155
156
|
deepgramEndpointingValue: { type: "number" },
|
|
156
157
|
dialTranscribeDeepgramTier: { type: "string" },
|
|
158
|
+
mediaPath: { type: "string", enum: [...mediaPathTypes] },
|
|
157
159
|
anchorMedia: { type: "boolean" }
|
|
158
160
|
}
|
|
159
161
|
};
|
|
@@ -110,7 +110,8 @@ exports.actionTypes = [
|
|
|
110
110
|
"processKnowledgeSourceFile",
|
|
111
111
|
"setupObservationConfig",
|
|
112
112
|
"updateObservationConfig",
|
|
113
|
-
"resolveAiOpsCenterError"
|
|
113
|
+
"resolveAiOpsCenterError",
|
|
114
|
+
"odataRequest",
|
|
114
115
|
];
|
|
115
116
|
exports.auditEventSchema = {
|
|
116
117
|
title: "auditEventSchema",
|
|
@@ -4,7 +4,7 @@ exports.knowledgeSourceSchema = exports.knowledgeSourceDataSchema = exports.know
|
|
|
4
4
|
/* Interfaces & Types */
|
|
5
5
|
const IEntityMeta_1 = require("../IEntityMeta");
|
|
6
6
|
exports.knowledgeSourceStatus = ["ready", "ingesting", "disabled", "failure"];
|
|
7
|
-
exports.knowledgeSourceType = ["pdf", "txt", "docx", "pptx", "ctxt", "url", "manual", "jpeg", "jpg", "png", "bmp", "heif", "tiff"];
|
|
7
|
+
exports.knowledgeSourceType = ["pdf", "txt", "docx", "pptx", "ctxt", "url", "manual", "jpeg", "jpg", "png", "bmp", "heif", "tiff", "extension"];
|
|
8
8
|
exports.knowledgeSourceDataSchema = {
|
|
9
9
|
title: "knowledgeSourceDataSchema",
|
|
10
10
|
type: "object",
|
|
@@ -4,12 +4,12 @@ import { stringifyQuery } from "../shared/helper/rest";
|
|
|
4
4
|
export function InsightsAPIGroup_2_0(instance) {
|
|
5
5
|
const self = instance;
|
|
6
6
|
return {
|
|
7
|
-
indexSteps: (args, options) => GenericAPIFn(`/
|
|
8
|
-
generateReport: (args, options) => GenericAPIFn("/
|
|
9
|
-
loadReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/
|
|
7
|
+
indexSteps: (args, options) => GenericAPIFn(`/v1.0/insights/steps?${stringifyQuery(args)}`, "GET", self)(undefined, options),
|
|
8
|
+
generateReport: (args, options) => GenericAPIFn("/v1.0/insights/reports/generate", "POST", self)(args, options),
|
|
9
|
+
loadReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/v1.0/insights/reports/${queryHash}`, "GET", self)(undefined, options),
|
|
10
10
|
generateMessagesReport: (_a, options) => {
|
|
11
11
|
var { search, skip, limit, next, previous, sort, messageType } = _a, args = __rest(_a, ["search", "skip", "limit", "next", "previous", "sort", "messageType"]);
|
|
12
|
-
return GenericAPIFn(`/
|
|
12
|
+
return GenericAPIFn(`/v1.0/insights/messages/report/generate?${stringifyQuery({
|
|
13
13
|
search,
|
|
14
14
|
skip,
|
|
15
15
|
limit,
|
|
@@ -19,12 +19,12 @@ export function InsightsAPIGroup_2_0(instance) {
|
|
|
19
19
|
messageType
|
|
20
20
|
})}`, "POST", self)(args, options);
|
|
21
21
|
},
|
|
22
|
-
loadMessagesReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/
|
|
23
|
-
generateStepReport: (args, options) => GenericAPIFn("/
|
|
24
|
-
loadStepReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/
|
|
22
|
+
loadMessagesReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/v1.0/insights/messages/report/${queryHash}`, "GET", self)(undefined, options),
|
|
23
|
+
generateStepReport: (args, options) => GenericAPIFn("/v1.0/insights/steps/report/generate", "POST", self)(args, options),
|
|
24
|
+
loadStepReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/v1.0/insights/steps/report/${queryHash}`, "GET", self)(undefined, options),
|
|
25
25
|
generateTranscriptsReport: (_a, options) => {
|
|
26
26
|
var { search, skip, limit, next, previous, sort } = _a, args = __rest(_a, ["search", "skip", "limit", "next", "previous", "sort"]);
|
|
27
|
-
return GenericAPIFn(`/
|
|
27
|
+
return GenericAPIFn(`/v1.0/insights/transcripts/report/generate?${stringifyQuery({
|
|
28
28
|
search,
|
|
29
29
|
skip,
|
|
30
30
|
limit,
|
|
@@ -33,12 +33,12 @@ export function InsightsAPIGroup_2_0(instance) {
|
|
|
33
33
|
sort
|
|
34
34
|
})}`, "POST", self)(args, options);
|
|
35
35
|
},
|
|
36
|
-
loadTranscriptsReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/
|
|
36
|
+
loadTranscriptsReportByQueryHash: ({ queryHash }, options) => GenericAPIFn(`/v1.0/insights/transcripts/report/${queryHash}`, "GET", self)(undefined, options),
|
|
37
37
|
deleteAnalyticsRecords: (_a, options) => {
|
|
38
38
|
var { projectId } = _a, restArgs = __rest(_a, ["projectId"]);
|
|
39
39
|
return GenericAPIFn(`/v2.0/analytics/${projectId}`, "DELETE", self)(restArgs, options);
|
|
40
40
|
},
|
|
41
|
-
insightsJWT: (options) => GenericAPIFn("/
|
|
41
|
+
insightsJWT: (options) => GenericAPIFn("/insights/auth", "POST", self)(undefined, options)
|
|
42
42
|
};
|
|
43
43
|
}
|
|
44
44
|
//# sourceMappingURL=InsightsAPIGroup_2_0.js.map
|
|
@@ -12,6 +12,10 @@ export function MetricsAPIGroup_2_0(instance) {
|
|
|
12
12
|
var { projectId } = _a, args = __rest(_a, ["projectId"]);
|
|
13
13
|
return GenericAPIFn(`/new/v2.0/projects/${projectId}/logs?${stringifyQuery(args)}`, "GET", self)(undefined, options);
|
|
14
14
|
},
|
|
15
|
+
tailLogEntries: (_a, options) => {
|
|
16
|
+
var { projectId } = _a, args = __rest(_a, ["projectId"]);
|
|
17
|
+
return GenericAPIFn(`/new/v2.0/projects/${projectId}/logs/tail?${stringifyQuery(args)}`, "GET", self)(undefined, options);
|
|
18
|
+
},
|
|
15
19
|
readLogEntry: ({ projectId, logEntryId }, options) => GenericAPIFn(`/new/v2.0/projects/${projectId}/logs/${logEntryId}}`, "GET", self)(undefined, options),
|
|
16
20
|
indexProfiles: (args, options) => GenericAPIFn(`/new/v2.0/profiles?${stringifyQuery(args)}`, "GET", self)(undefined, options),
|
|
17
21
|
createProfile: (args, options) => GenericAPIFn("/new/v2.0/profiles", "POST", self)(args, options),
|
|
@@ -103,6 +103,8 @@ export const OAuth2Authentication = function (credentials, self) {
|
|
|
103
103
|
break;
|
|
104
104
|
case "refreshToken":
|
|
105
105
|
{
|
|
106
|
+
// The refresh token from the parameters is to support older token granted via Password Grant flow
|
|
107
|
+
// TODO - remove this in future once PKCE fully takes over
|
|
106
108
|
const { refreshToken } = parameters;
|
|
107
109
|
const credentials = self.credentials;
|
|
108
110
|
const result = yield refreshTokenGrant({
|
|
@@ -129,7 +129,7 @@ export class AxiosAdapter {
|
|
|
129
129
|
}
|
|
130
130
|
}
|
|
131
131
|
if (isAxiosResponse(axiosResponse)) {
|
|
132
|
-
|
|
132
|
+
let errorClass = ErrorCollection[(_b = axiosResponse.data) === null || _b === void 0 ? void 0 : _b.code] ||
|
|
133
133
|
ErrorCollection[axiosResponse.status] ||
|
|
134
134
|
ErrorCollection[ErrorCode.INTERNAL_SERVER_ERROR];
|
|
135
135
|
if ((axiosResponse.status === HttpStatusCode.UNAUTHORIZED ||
|
|
@@ -138,6 +138,9 @@ export class AxiosAdapter {
|
|
|
138
138
|
typeof this.config.onUnauthorized === "function") {
|
|
139
139
|
this.config.onUnauthorized();
|
|
140
140
|
}
|
|
141
|
+
if (axiosResponse.status === HttpStatusCode.CONFLICT) {
|
|
142
|
+
errorClass = ErrorCollection[ErrorCode.CONFLICT_ERROR];
|
|
143
|
+
}
|
|
141
144
|
if (OAuth2Errors.includes((_d = axiosResponse.data) === null || _d === void 0 ? void 0 : _d.error)) {
|
|
142
145
|
throw new errorClass(axiosResponse.data.detail, { traceId: axiosResponse.data.traceId }, undefined, axiosResponse.data);
|
|
143
146
|
}
|
|
@@ -31,12 +31,12 @@ export const FUZZY_SEARCH = createNodeDescriptor({
|
|
|
31
31
|
params: {
|
|
32
32
|
required: true
|
|
33
33
|
},
|
|
34
|
-
defaultValue: `
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
34
|
+
defaultValue: `{
|
|
35
|
+
"$cs":{
|
|
36
|
+
"script":"context.names",
|
|
37
|
+
"type":"array"
|
|
38
|
+
}
|
|
39
|
+
}`
|
|
40
40
|
},
|
|
41
41
|
{
|
|
42
42
|
key: "isCaseSensitive",
|