@cognigy/rest-api-client 4.93.0 → 4.94.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +5 -0
- package/build/authentication/AuthenticationAPI.js +1 -1
- package/build/shared/charts/descriptors/agentAssist/helpers/knowledgeSearch/errorHandler.helper.js +1 -1
- package/build/shared/charts/descriptors/agentAssist/knowledgeAssist.js +1 -1
- package/build/shared/charts/descriptors/agentAssist/nextActionAssist.js +1 -1
- package/build/shared/charts/descriptors/analytics/overwriteAnalytics.js +1 -1
- package/build/shared/charts/descriptors/analytics/requestRating.js +1 -1
- package/build/shared/charts/descriptors/knowledgeSearch/searchExtractOutput.js +6 -6
- package/build/shared/charts/descriptors/liveAgent/assistInfo.js +1 -1
- package/build/shared/charts/descriptors/logic/thinkV2.js +1 -1
- package/build/shared/charts/descriptors/message/checkChannelChange/checkChannelChange.js +2 -2
- package/build/shared/charts/descriptors/message/say.js +2 -2
- package/build/shared/charts/descriptors/message/sendText.js +1 -1
- package/build/shared/charts/descriptors/microsoft/getToken.js +1 -1
- package/build/shared/charts/descriptors/microsoft/invalidateToken.js +1 -1
- package/build/shared/charts/descriptors/service/GPTConversation.js +14 -14
- package/build/shared/charts/descriptors/service/GPTPrompt.js +4 -4
- package/build/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +4 -4
- package/build/shared/charts/descriptors/service/handover.js +1 -1
- package/build/shared/charts/descriptors/service/jwtSecret.js +5 -3
- package/build/shared/charts/descriptors/voice/nodes/bargeIn.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/continuousAsr.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/dtmf.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/hangup.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/muteSpeechInput.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/noUserInput.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/play.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/sendMetadata.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/sessionSpeechParameters.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/transfer.js +2 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/agentAssist.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/callRecording.js +2 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/handover.js +2 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/hangup.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/playURL.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/sendMessage.js +3 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/sendMetaData.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/setSessionParams.js +2 -2
- package/build/shared/charts/descriptors/voicegateway2/nodes/dtmf.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/hangup.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/muteSpeechInput.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/play.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/record.js +3 -3
- package/build/shared/charts/descriptors/voicegateway2/nodes/refer.js +2 -2
- package/build/shared/charts/descriptors/voicegateway2/nodes/sendMetadata.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/setSessionConfig.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/transfer.js +2 -2
- package/build/shared/interfaces/handover.js +2 -1
- package/build/shared/interfaces/messageAPI/endpoints.js +1 -0
- package/build/shared/interfaces/resources/INodeDescriptorSet.js +2 -1
- package/build/shared/interfaces/resources/TRestChannelType.js +7 -2
- package/dist/esm/authentication/AuthenticationAPI.js +1 -1
- package/dist/esm/shared/charts/descriptors/agentAssist/helpers/knowledgeSearch/errorHandler.helper.js +1 -1
- package/dist/esm/shared/charts/descriptors/agentAssist/knowledgeAssist.js +1 -1
- package/dist/esm/shared/charts/descriptors/agentAssist/nextActionAssist.js +1 -1
- package/dist/esm/shared/charts/descriptors/analytics/overwriteAnalytics.js +1 -1
- package/dist/esm/shared/charts/descriptors/analytics/requestRating.js +1 -1
- package/dist/esm/shared/charts/descriptors/knowledgeSearch/searchExtractOutput.js +6 -6
- package/dist/esm/shared/charts/descriptors/liveAgent/assistInfo.js +1 -1
- package/dist/esm/shared/charts/descriptors/logic/thinkV2.js +1 -1
- package/dist/esm/shared/charts/descriptors/message/checkChannelChange/checkChannelChange.js +2 -2
- package/dist/esm/shared/charts/descriptors/message/say.js +2 -2
- package/dist/esm/shared/charts/descriptors/message/sendText.js +1 -1
- package/dist/esm/shared/charts/descriptors/microsoft/getToken.js +1 -1
- package/dist/esm/shared/charts/descriptors/microsoft/invalidateToken.js +1 -1
- package/dist/esm/shared/charts/descriptors/service/GPTConversation.js +15 -15
- package/dist/esm/shared/charts/descriptors/service/GPTPrompt.js +4 -4
- package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +4 -4
- package/dist/esm/shared/charts/descriptors/service/handover.js +1 -1
- package/dist/esm/shared/charts/descriptors/service/jwtSecret.js +5 -3
- package/dist/esm/shared/charts/descriptors/voice/nodes/bargeIn.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/continuousAsr.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/dtmf.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/hangup.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/muteSpeechInput.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/noUserInput.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/play.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/sendMetadata.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/sessionSpeechParameters.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/transfer.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/agentAssist.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/callRecording.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/handover.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/hangup.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/playURL.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/sendMessage.js +3 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/sendMetaData.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/setSessionParams.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/dtmf.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/hangup.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/muteSpeechInput.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/play.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/record.js +3 -3
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/refer.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/sendMetadata.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/setSessionConfig.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/transfer.js +2 -2
- package/dist/esm/shared/interfaces/handover.js +2 -1
- package/dist/esm/shared/interfaces/messageAPI/endpoints.js +1 -0
- package/dist/esm/shared/interfaces/resources/INodeDescriptorSet.js +2 -1
- package/dist/esm/shared/interfaces/resources/TRestChannelType.js +7 -2
- package/package.json +3 -2
- package/types/index.d.ts +17 -5
- package/build/shared/interfaces/license.js.map +0 -1
- package/build/test.js +0 -1
- package/dist/esm/shared/interfaces/license.js.map +0 -1
- package/dist/esm/test.js +0 -1
|
@@ -53,14 +53,14 @@ exports.setSessionParamsNode = (0, createNodeDescriptor_1.createNodeDescriptor)(
|
|
|
53
53
|
const { sessionParams } = config;
|
|
54
54
|
const { isFeatureAccmEnabled } = api.getEndpointSettings();
|
|
55
55
|
if (isFeatureAccmEnabled) {
|
|
56
|
-
return api.say(null, {
|
|
56
|
+
return await api.say(null, {
|
|
57
57
|
_cognigy: (0, utils_1.buildPayloadAccm)("setSessionParams", config, nodeId)
|
|
58
58
|
});
|
|
59
59
|
}
|
|
60
60
|
let compiledParams = sessionParams || {};
|
|
61
61
|
(0, paramUtils_1.compileParams)(config, compiledParams);
|
|
62
62
|
// output the activity to the voice gateway
|
|
63
|
-
api.output(null, {
|
|
63
|
+
await api.output(null, {
|
|
64
64
|
"_cognigy": {
|
|
65
65
|
"_voiceGateway": {
|
|
66
66
|
"json": {
|
|
@@ -34,7 +34,7 @@ exports.hangupNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
34
34
|
const { hangupReason } = config;
|
|
35
35
|
try {
|
|
36
36
|
const payload = hangup_mapper_1.hangUp.handleInput("voiceGateway2", hangupReason);
|
|
37
|
-
api.say(null, {
|
|
37
|
+
await api.say(null, {
|
|
38
38
|
_cognigy: payload
|
|
39
39
|
});
|
|
40
40
|
api.logDebugMessage(`UI__DEBUG_MODE__HANGUP__MESSAGE ${hangupReason}`);
|
|
@@ -50,7 +50,7 @@ exports.muteSpeechInputNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
50
50
|
const { api } = cognigy;
|
|
51
51
|
try {
|
|
52
52
|
const payload = mapper.handleInput(config, api);
|
|
53
|
-
api.say(null, {
|
|
53
|
+
await api.say(null, {
|
|
54
54
|
_cognigy: payload
|
|
55
55
|
});
|
|
56
56
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, config);
|
|
@@ -54,7 +54,7 @@ exports.recordNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
54
54
|
label: "UI__NODE_EDITOR__VOICEGATEWAY2__FIELDS__SIPREC_SERVER_URL__LABEL",
|
|
55
55
|
type: "cognigyText",
|
|
56
56
|
description: "UI__NODE_EDITOR__VOICEGATEWAY2__FIELDS__SIPREC_SERVER_URL__DESCRIPTION",
|
|
57
|
-
defaultValue:
|
|
57
|
+
defaultValue: "",
|
|
58
58
|
condition: {
|
|
59
59
|
key: "action",
|
|
60
60
|
value: "startCallRecording"
|
|
@@ -65,7 +65,7 @@ exports.recordNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
65
65
|
label: "UI__NODE_EDITOR__VOICEGATEWAY2__FIELDS__RECORDING_ID__LABEL",
|
|
66
66
|
type: "cognigyText",
|
|
67
67
|
description: "UI__NODE_EDITOR__VOICEGATEWAY2__FIELDS__RECORDING_ID__DESCRIPTION",
|
|
68
|
-
defaultValue:
|
|
68
|
+
defaultValue: "",
|
|
69
69
|
condition: {
|
|
70
70
|
key: "action",
|
|
71
71
|
value: "startCallRecording"
|
|
@@ -97,7 +97,7 @@ exports.recordNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
97
97
|
};
|
|
98
98
|
try {
|
|
99
99
|
const payload = mapper.handleInput(recordInput, api, false);
|
|
100
|
-
api.say(null, {
|
|
100
|
+
await api.say(null, {
|
|
101
101
|
_cognigy: payload
|
|
102
102
|
});
|
|
103
103
|
}
|
|
@@ -88,10 +88,10 @@ exports.referNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
88
88
|
try {
|
|
89
89
|
const payload = transfer_mapper_1.transfer.handleInput("voiceGateway2", transferParams, false);
|
|
90
90
|
if (input.channel === "adminconsole") {
|
|
91
|
-
api.output('You are using the deprecated Refer Node. Please replace it with new Transfer Node in the Node Selection Menu.', null);
|
|
91
|
+
await api.output('You are using the deprecated Refer Node. Please replace it with new Transfer Node in the Node Selection Menu.', null);
|
|
92
92
|
}
|
|
93
93
|
api.log('error', 'You are using the deprecated Refer Node. Please replace it with new Transfer Node in the Node Selection Menu.');
|
|
94
|
-
api.say(null, {
|
|
94
|
+
await api.say(null, {
|
|
95
95
|
_cognigy: payload
|
|
96
96
|
});
|
|
97
97
|
}
|
|
@@ -37,7 +37,7 @@ exports.sendMetadataNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
37
37
|
if (!config.metadata)
|
|
38
38
|
return;
|
|
39
39
|
try {
|
|
40
|
-
api.say(null, {
|
|
40
|
+
await api.say(null, {
|
|
41
41
|
_cognigy: sendMetadata_mapper_1.sendMetadata.handleInput("voiceGateway2", config.metadata)
|
|
42
42
|
});
|
|
43
43
|
}
|
|
@@ -1076,7 +1076,7 @@ exports.setSessionConfigNode = (0, createNodeDescriptor_1.createNodeDescriptor)(
|
|
|
1076
1076
|
try {
|
|
1077
1077
|
const voiceSettings = (0, setSessionConfig_mapper_2.voiceConfigParamsToVoiceSettings)(config, api);
|
|
1078
1078
|
const payload = setSessionConfig_mapper_1.setSessionConfig.handleVGInput(voiceSettings, sessionParams, api);
|
|
1079
|
-
api.say(null, {
|
|
1079
|
+
await api.say(null, {
|
|
1080
1080
|
_cognigy: payload,
|
|
1081
1081
|
});
|
|
1082
1082
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, voiceSettings);
|
|
@@ -508,7 +508,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
508
508
|
if (agentAssistEnabled && !dialTranscriptionWebhook) {
|
|
509
509
|
textWarningAdminChannel = `${textWarningAdminChannel + "\n"} Copilot is enabled but no transcription webhook is configured. Please configure a transcription webhook in the Transfer node.`;
|
|
510
510
|
}
|
|
511
|
-
api.say(textWarningAdminChannel, null);
|
|
511
|
+
await api.say(textWarningAdminChannel, null);
|
|
512
512
|
return;
|
|
513
513
|
}
|
|
514
514
|
if (agentAssistEnabled && dialTranscriptionWebhook) {
|
|
@@ -547,7 +547,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
547
547
|
api.log("error", "Invalid JSON in Transfer SIP Headers");
|
|
548
548
|
}
|
|
549
549
|
const payload = transfer_mapper_1.transfer.handleInput("voiceGateway2", transferParams, false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia);
|
|
550
|
-
api.say(null, {
|
|
550
|
+
await api.say(null, {
|
|
551
551
|
_cognigy: payload,
|
|
552
552
|
});
|
|
553
553
|
api.logDebugMessage(`${transferType} UI__DEBUG_MODE__TRANSFER__MESSAGE ${transferTarget}`);
|
|
@@ -101,7 +101,8 @@ exports.handoverSettingsSchema = {
|
|
|
101
101
|
enableTranscriptTile: { type: "boolean" },
|
|
102
102
|
enableTranscriptTileChatInput: { type: "boolean" },
|
|
103
103
|
redactTranscriptTileMessages: { type: "boolean" },
|
|
104
|
-
|
|
104
|
+
enableAgentCopilotAuthentication: { type: "boolean" },
|
|
105
|
+
agentCopilotAuthentication: { type: "string" },
|
|
105
106
|
}
|
|
106
107
|
}
|
|
107
108
|
}
|
|
@@ -98,6 +98,7 @@ exports.nodeFieldTypes = [
|
|
|
98
98
|
"checkbox",
|
|
99
99
|
"chipInput",
|
|
100
100
|
"code",
|
|
101
|
+
"cognigyLLMText",
|
|
101
102
|
"cognigyText",
|
|
102
103
|
"cognigyTextArray",
|
|
103
104
|
"condition",
|
|
@@ -140,7 +141,7 @@ exports.nodeFieldTypes = [
|
|
|
140
141
|
"toggle",
|
|
141
142
|
"ttsSelect",
|
|
142
143
|
"typescript",
|
|
143
|
-
"xml"
|
|
144
|
+
"xml"
|
|
144
145
|
];
|
|
145
146
|
exports.searchableNodeFieldTypes = [
|
|
146
147
|
"text",
|
|
@@ -16,7 +16,8 @@ exports.restChannelTypes = [
|
|
|
16
16
|
"nonConversational",
|
|
17
17
|
"amazonLex",
|
|
18
18
|
"genesysBotConnector",
|
|
19
|
-
"niceCXOne"
|
|
19
|
+
"niceCXOne",
|
|
20
|
+
"niceCXOneAAH",
|
|
20
21
|
];
|
|
21
22
|
exports.restChannelDisplayNames = [
|
|
22
23
|
{
|
|
@@ -78,6 +79,10 @@ exports.restChannelDisplayNames = [
|
|
|
78
79
|
{
|
|
79
80
|
channel: "niceCXOne",
|
|
80
81
|
displayName: "NICE CXone"
|
|
81
|
-
}
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
channel: "niceCXOneAAH",
|
|
85
|
+
displayName: "NICE CXone AAH"
|
|
86
|
+
},
|
|
82
87
|
];
|
|
83
88
|
//# sourceMappingURL=TRestChannelType.js.map
|
|
@@ -49,7 +49,7 @@ export function AuthenticationAPI(instance) {
|
|
|
49
49
|
}),
|
|
50
50
|
exchangeOneTimeTokenForRefreshToken: (_a, options) => {
|
|
51
51
|
var { loginToken } = _a, args = __rest(_a, ["loginToken"]);
|
|
52
|
-
return GenericAPIFn(`/auth/
|
|
52
|
+
return GenericAPIFn(`/auth/exchangetoken?${stringifyQuery({ loginToken })}`, "GET", self)(args, Object.assign({ withAuthentication: false }, options));
|
|
53
53
|
}
|
|
54
54
|
};
|
|
55
55
|
}
|
|
@@ -29,7 +29,7 @@ export const handleServiceError = ({ api, cognigy, nodeId, traceId, searchStoreL
|
|
|
29
29
|
if (errorHandling === "continue") {
|
|
30
30
|
// output the timeout message
|
|
31
31
|
if (errorMessage) {
|
|
32
|
-
(_a = api.output) === null || _a === void 0 ? void 0 : _a.call(api, errorMessage, null);
|
|
32
|
+
yield ((_a = api.output) === null || _a === void 0 ? void 0 : _a.call(api, errorMessage, null));
|
|
33
33
|
}
|
|
34
34
|
}
|
|
35
35
|
else if (errorHandling === "goto") {
|
|
@@ -138,7 +138,7 @@ export const KNOWLEDGE_ASSIST = createNodeDescriptor({
|
|
|
138
138
|
{
|
|
139
139
|
key: "prompt",
|
|
140
140
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__LABEL",
|
|
141
|
-
type: "
|
|
141
|
+
type: "cognigyLLMText",
|
|
142
142
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__DESCRIPTION",
|
|
143
143
|
params: {
|
|
144
144
|
required: true,
|
|
@@ -172,7 +172,7 @@ export const NEXT_ACTION_ASSIST = createNodeDescriptor({
|
|
|
172
172
|
{
|
|
173
173
|
key: "prompt",
|
|
174
174
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__LABEL",
|
|
175
|
-
type: "
|
|
175
|
+
type: "cognigyLLMText",
|
|
176
176
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__DESCRIPTION",
|
|
177
177
|
params: {
|
|
178
178
|
required: true,
|
|
@@ -89,7 +89,7 @@ export const OVERWRITE_ANALYTICS = createNodeDescriptor({
|
|
|
89
89
|
{
|
|
90
90
|
key: "intentScore",
|
|
91
91
|
type: "number",
|
|
92
|
-
label: "UI__NODE_EDITOR__OVERWRITE_ANALYTICS__INTENT_SCORE__LABEL"
|
|
92
|
+
label: "UI__NODE_EDITOR__OVERWRITE_ANALYTICS__INTENT_SCORE__LABEL"
|
|
93
93
|
},
|
|
94
94
|
{
|
|
95
95
|
key: "inputText",
|
|
@@ -91,7 +91,7 @@ export const REQUEST_RATING = createNodeDescriptor({
|
|
|
91
91
|
const ratingSubmitButtonText = config.ratingSubmitButtonText;
|
|
92
92
|
const ratingEventBannerText = config.ratingEventBannerText;
|
|
93
93
|
const ratingChatStatusMessage = config.ratingChatStatusMessage;
|
|
94
|
-
api.say("", {
|
|
94
|
+
yield api.say("", {
|
|
95
95
|
_plugin: {
|
|
96
96
|
type: "request-rating",
|
|
97
97
|
data: {
|
|
@@ -216,7 +216,7 @@ export const SEARCH_EXTRACT_OUTPUT = createNodeDescriptor({
|
|
|
216
216
|
{
|
|
217
217
|
key: "prompt",
|
|
218
218
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__LABEL",
|
|
219
|
-
type: "
|
|
219
|
+
type: "cognigyLLMText",
|
|
220
220
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__DESCRIPTION",
|
|
221
221
|
params: {
|
|
222
222
|
required: true,
|
|
@@ -657,7 +657,7 @@ New: `;
|
|
|
657
657
|
if (errorHandling === "continue") {
|
|
658
658
|
// output the timeout message
|
|
659
659
|
if (errorMessage) {
|
|
660
|
-
api.output(errorMessage, null);
|
|
660
|
+
yield api.output(errorMessage, null);
|
|
661
661
|
}
|
|
662
662
|
}
|
|
663
663
|
else if (errorHandling === "goto") {
|
|
@@ -978,7 +978,7 @@ New: `;
|
|
|
978
978
|
if (mode === "seo") {
|
|
979
979
|
switch (outputMode) {
|
|
980
980
|
case "adaptiveCard":
|
|
981
|
-
api.output(null, {
|
|
981
|
+
yield api.output(null, {
|
|
982
982
|
"_cognigy": {
|
|
983
983
|
"_default": {
|
|
984
984
|
"_adaptiveCard": {
|
|
@@ -991,12 +991,12 @@ New: `;
|
|
|
991
991
|
});
|
|
992
992
|
break;
|
|
993
993
|
case "text":
|
|
994
|
-
api.output(promptResponse, null);
|
|
994
|
+
yield api.output(promptResponse, null);
|
|
995
995
|
break;
|
|
996
996
|
case "stream":
|
|
997
997
|
// fallback in case the stream for some reason didn't stream anything
|
|
998
998
|
if (!streamedOutput) {
|
|
999
|
-
api.output(promptResponse, null);
|
|
999
|
+
yield api.output(promptResponse, null);
|
|
1000
1000
|
}
|
|
1001
1001
|
}
|
|
1002
1002
|
}
|
|
@@ -1010,7 +1010,7 @@ New: `;
|
|
|
1010
1010
|
}
|
|
1011
1011
|
else {
|
|
1012
1012
|
if (mode === "seo") {
|
|
1013
|
-
api.output(outputFallback, null);
|
|
1013
|
+
yield api.output(outputFallback, null);
|
|
1014
1014
|
}
|
|
1015
1015
|
}
|
|
1016
1016
|
})
|
|
@@ -187,7 +187,7 @@ export const ASSIST_INFO = createNodeDescriptor({
|
|
|
187
187
|
function: ({ cognigy, config }) => __awaiter(void 0, void 0, void 0, function* () {
|
|
188
188
|
const { api } = cognigy;
|
|
189
189
|
const dataResponse = buildCognigyWhisperAssistResponse(config);
|
|
190
|
-
api.say("", dataResponse);
|
|
190
|
+
yield api.say("", dataResponse);
|
|
191
191
|
}),
|
|
192
192
|
});
|
|
193
193
|
//# sourceMappingURL=assistInfo.js.map
|
|
@@ -172,7 +172,7 @@ export const THINK_V2 = createNodeDescriptor({
|
|
|
172
172
|
if (errorHandling === "continue") {
|
|
173
173
|
// output the provided error message
|
|
174
174
|
if (errorMessage) {
|
|
175
|
-
api.output(errorMessage, null);
|
|
175
|
+
yield api.output(errorMessage, null);
|
|
176
176
|
}
|
|
177
177
|
}
|
|
178
178
|
else if (errorHandling === "goto") {
|
|
@@ -68,7 +68,7 @@ export const CHECK_CHANNEL_CHANGE = createNodeDescriptor({
|
|
|
68
68
|
}
|
|
69
69
|
return;
|
|
70
70
|
}
|
|
71
|
-
api.say(questionToAsk);
|
|
71
|
+
yield api.say(questionToAsk);
|
|
72
72
|
api.setNextNode(thisNodeId);
|
|
73
73
|
api.stopExecution();
|
|
74
74
|
return;
|
|
@@ -83,7 +83,7 @@ export const CHECK_CHANNEL_CHANGE = createNodeDescriptor({
|
|
|
83
83
|
api.resetExecutionAmount(thisNodeId);
|
|
84
84
|
return;
|
|
85
85
|
}
|
|
86
|
-
api.say(validationMessage.replace("\{lastChannel\}", lastChannel) || questionToAsk);
|
|
86
|
+
yield api.say(validationMessage.replace("\{lastChannel\}", lastChannel) || questionToAsk);
|
|
87
87
|
api.setNextNode(thisNodeId);
|
|
88
88
|
api.stopExecution();
|
|
89
89
|
})
|
|
@@ -122,7 +122,7 @@ export const SAY = createNodeDescriptor({
|
|
|
122
122
|
delete _data._cognigy._default;
|
|
123
123
|
}
|
|
124
124
|
outputText = yield rephraseSentenceWithAI(outputText, config, api, organisationId);
|
|
125
|
-
api.say(outputText, _data, settings);
|
|
125
|
+
yield api.say(outputText, _data, settings);
|
|
126
126
|
}
|
|
127
127
|
else {
|
|
128
128
|
const _cognigyDefault = (_b = _data === null || _data === void 0 ? void 0 : _data._cognigy) === null || _b === void 0 ? void 0 : _b._default;
|
|
@@ -149,7 +149,7 @@ export const SAY = createNodeDescriptor({
|
|
|
149
149
|
* This is a say node which is set to a type other than 'text' -
|
|
150
150
|
* so we have one of the new rich-media types such as 'gallery'
|
|
151
151
|
*/
|
|
152
|
-
api.say(null, config.say, settings);
|
|
152
|
+
yield api.say(null, config.say, settings);
|
|
153
153
|
}
|
|
154
154
|
}),
|
|
155
155
|
});
|
|
@@ -23,7 +23,7 @@ export const SEND_TEXT = createNodeDescriptor({
|
|
|
23
23
|
function: ({ cognigy, config }) => __awaiter(void 0, void 0, void 0, function* () {
|
|
24
24
|
const { text, data } = config;
|
|
25
25
|
const { api } = cognigy;
|
|
26
|
-
api.say(text, data);
|
|
26
|
+
yield api.say(text, data);
|
|
27
27
|
})
|
|
28
28
|
});
|
|
29
29
|
//# sourceMappingURL=sendText.js.map
|
|
@@ -20,7 +20,7 @@ export const microsoftGetTokenNode = createNodeDescriptor({
|
|
|
20
20
|
function: ({ cognigy }) => __awaiter(void 0, void 0, void 0, function* () {
|
|
21
21
|
const { api } = cognigy;
|
|
22
22
|
api.addToContext("microsoftSsoPermissionRequest", "pending", "simple");
|
|
23
|
-
api.say("", {
|
|
23
|
+
yield api.say("", {
|
|
24
24
|
_cognigy: {
|
|
25
25
|
_microsoftBotFramework: {
|
|
26
26
|
json: { requestMicrosoftSsoPermissions: true },
|
|
@@ -21,7 +21,7 @@ export const microsoftInvalidateTokenNode = createNodeDescriptor({
|
|
|
21
21
|
function: ({ cognigy }) => __awaiter(void 0, void 0, void 0, function* () {
|
|
22
22
|
const { api } = cognigy;
|
|
23
23
|
api.addToContext("microsoftSsoPermissionRequest", "declined", "simple");
|
|
24
|
-
api.say("", { _cognigy: { _microsoftBotFramework: { json: { revokeMicrosoftSsoPermissions: true } } } });
|
|
24
|
+
yield api.say("", { _cognigy: { _microsoftBotFramework: { json: { revokeMicrosoftSsoPermissions: true } } } });
|
|
25
25
|
})
|
|
26
26
|
});
|
|
27
27
|
//# sourceMappingURL=invalidateToken.js.map
|
|
@@ -388,11 +388,11 @@ export const GPT_CONVERSATION = createNodeDescriptor({
|
|
|
388
388
|
try {
|
|
389
389
|
prompt = yield buildPrompt(context, input, api, config);
|
|
390
390
|
if (debug && input.channel === "adminconsole")
|
|
391
|
-
api.output(prompt, null);
|
|
391
|
+
yield api.output(prompt, null);
|
|
392
392
|
}
|
|
393
393
|
catch (error) {
|
|
394
394
|
if (debug && input.channel === "adminconsole")
|
|
395
|
-
api.output("Error in building prompt: " + error.message, null);
|
|
395
|
+
yield api.output("Error in building prompt: " + error.message, null);
|
|
396
396
|
else
|
|
397
397
|
api.log("error", "Error in building prompt: " + error.message);
|
|
398
398
|
}
|
|
@@ -404,11 +404,11 @@ export const GPT_CONVERSATION = createNodeDescriptor({
|
|
|
404
404
|
}
|
|
405
405
|
catch (error) {
|
|
406
406
|
if (debug && input.channel === "adminconsole")
|
|
407
|
-
api.output("Error in executing prompt: " + error.message, null);
|
|
407
|
+
yield api.output("Error in executing prompt: " + error.message, null);
|
|
408
408
|
else
|
|
409
409
|
api.log("error", "Error in executing prompt: " + error.message);
|
|
410
410
|
if (error.code && error.code === 'ECONNABORTED' && error.isAxiosError) {
|
|
411
|
-
api.output(timeoutMessage, null);
|
|
411
|
+
yield api.output(timeoutMessage, null);
|
|
412
412
|
}
|
|
413
413
|
}
|
|
414
414
|
if (!response) {
|
|
@@ -418,11 +418,11 @@ export const GPT_CONVERSATION = createNodeDescriptor({
|
|
|
418
418
|
}
|
|
419
419
|
catch (error) {
|
|
420
420
|
if (debug && input.channel === "adminconsole")
|
|
421
|
-
api.output("Error in executing prompt: " + error.message, null);
|
|
421
|
+
yield api.output("Error in executing prompt: " + error.message, null);
|
|
422
422
|
else
|
|
423
423
|
api.log("error", "Error in executing prompt: " + error.message);
|
|
424
424
|
if (error.code && error.code === 'ECONNABORTED' && error.isAxiosError) {
|
|
425
|
-
api.output(timeoutMessage, null);
|
|
425
|
+
yield api.output(timeoutMessage, null);
|
|
426
426
|
}
|
|
427
427
|
}
|
|
428
428
|
}
|
|
@@ -435,11 +435,11 @@ export const GPT_CONVERSATION = createNodeDescriptor({
|
|
|
435
435
|
commands = yield parseResponse(api, input, debug, response);
|
|
436
436
|
}
|
|
437
437
|
if (debug && input.channel === "adminconsole")
|
|
438
|
-
api.output(JSON.stringify(commands, null, 4), null);
|
|
438
|
+
yield api.output(JSON.stringify(commands, null, 4), null);
|
|
439
439
|
}
|
|
440
440
|
catch (error) {
|
|
441
441
|
if (debug && input.channel === "adminconsole")
|
|
442
|
-
api.output("Error in parsing response: " + error.message, null);
|
|
442
|
+
yield api.output("Error in parsing response: " + error.message, null);
|
|
443
443
|
else
|
|
444
444
|
api.log("error", "Error in parsing response: " + error.message);
|
|
445
445
|
}
|
|
@@ -448,7 +448,7 @@ export const GPT_CONVERSATION = createNodeDescriptor({
|
|
|
448
448
|
}
|
|
449
449
|
catch (error) {
|
|
450
450
|
if (debug && input.channel === "adminconsole")
|
|
451
|
-
api.output("Error in handling commands: " + error.message, null);
|
|
451
|
+
yield api.output("Error in handling commands: " + error.message, null);
|
|
452
452
|
else
|
|
453
453
|
api.log("error", "Error in handling commands: " + error.message);
|
|
454
454
|
}
|
|
@@ -620,7 +620,7 @@ function parseResponse(api, input, debug, response) {
|
|
|
620
620
|
const individualCommands = apiResponse.substr(1, apiResponse.length - 1).split("##BREAK##");
|
|
621
621
|
individualCommands[individualCommands.length - 1] = individualCommands[individualCommands.length - 1].substring(0, individualCommands[individualCommands.length - 1].length - 1);
|
|
622
622
|
const newCommands = [];
|
|
623
|
-
individualCommands.forEach((command) => {
|
|
623
|
+
individualCommands.forEach((command) => __awaiter(this, void 0, void 0, function* () {
|
|
624
624
|
try {
|
|
625
625
|
// again try to just fix the command and parse
|
|
626
626
|
const { data, changed } = jsonFix(command);
|
|
@@ -666,12 +666,12 @@ function parseResponse(api, input, debug, response) {
|
|
|
666
666
|
catch (err) {
|
|
667
667
|
// if all fails, output an error
|
|
668
668
|
if (debug && input.channel === "adminconsole")
|
|
669
|
-
api.output("Error in parsing command " + command + " : " + err.message, null);
|
|
669
|
+
yield api.output("Error in parsing command " + command + " : " + err.message, null);
|
|
670
670
|
else
|
|
671
671
|
api.log("debug", "Error in parsing command " + command + " : " + err.message);
|
|
672
672
|
}
|
|
673
673
|
}
|
|
674
|
-
});
|
|
674
|
+
}));
|
|
675
675
|
return newCommands;
|
|
676
676
|
}
|
|
677
677
|
}
|
|
@@ -697,7 +697,7 @@ function handleCommands(context, input, api, debug, commands, config) {
|
|
|
697
697
|
if (command.message) {
|
|
698
698
|
context.lastOutput = command.message;
|
|
699
699
|
if (handleOutputs === "output") {
|
|
700
|
-
api.output(command.message, null);
|
|
700
|
+
yield api.output(command.message, null);
|
|
701
701
|
}
|
|
702
702
|
else {
|
|
703
703
|
outputBuffer.push(command.message);
|
|
@@ -721,13 +721,13 @@ function handleCommands(context, input, api, debug, commands, config) {
|
|
|
721
721
|
}
|
|
722
722
|
catch (err) {
|
|
723
723
|
if (debug && input.channel === "adminconsole")
|
|
724
|
-
api.output("Couldn't parse command " + command, null);
|
|
724
|
+
yield api.output("Couldn't parse command " + command, null);
|
|
725
725
|
else
|
|
726
726
|
api.log("error", "Couldn't parse command " + command);
|
|
727
727
|
}
|
|
728
728
|
}
|
|
729
729
|
if (!outputs) {
|
|
730
|
-
api.output("Sorry, I was absentminded for a second, could you say that again please?", null);
|
|
730
|
+
yield api.output("Sorry, I was absentminded for a second, could you say that again please?", null);
|
|
731
731
|
api.log("error", JSON.stringify(commands));
|
|
732
732
|
}
|
|
733
733
|
if (outputBuffer && outputBuffer.length > 0) {
|
|
@@ -21,7 +21,7 @@ export const GPT_PROMPT = createNodeDescriptor({
|
|
|
21
21
|
{
|
|
22
22
|
key: "prompt",
|
|
23
23
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT_SYSTEM__LABEL",
|
|
24
|
-
type: "
|
|
24
|
+
type: "cognigyLLMText",
|
|
25
25
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT_SYSTEM__DESCRIPTION",
|
|
26
26
|
params: {
|
|
27
27
|
multiline: true,
|
|
@@ -595,7 +595,7 @@ export const GPT_PROMPT = createNodeDescriptor({
|
|
|
595
595
|
if (errorHandling === "continue") {
|
|
596
596
|
// output the timeout message
|
|
597
597
|
if (errorMessage) {
|
|
598
|
-
api.output(errorMessage, null);
|
|
598
|
+
yield api.output(errorMessage, null);
|
|
599
599
|
}
|
|
600
600
|
}
|
|
601
601
|
else if (errorHandling === "goto") {
|
|
@@ -680,7 +680,7 @@ export const GPT_PROMPT = createNodeDescriptor({
|
|
|
680
680
|
// output result immediately if toggle is set
|
|
681
681
|
if (immediateOutput) {
|
|
682
682
|
const resultToOutput = typeof ((response === null || response === void 0 ? void 0 : response.result) || response) === "object" ? JSON.stringify((response === null || response === void 0 ? void 0 : response.result) || response, undefined, 2) : (response === null || response === void 0 ? void 0 : response.result) || response;
|
|
683
|
-
api.output(resultToOutput, null);
|
|
683
|
+
yield api.output(resultToOutput, null);
|
|
684
684
|
}
|
|
685
685
|
}
|
|
686
686
|
else if (storeLocation === "input" || (storeLocation === "stream" && streamStoreCopyInInput)) {
|
|
@@ -690,7 +690,7 @@ export const GPT_PROMPT = createNodeDescriptor({
|
|
|
690
690
|
// this means we don't output the result again if we streamed
|
|
691
691
|
if (storeLocation === "input" && immediateOutput) {
|
|
692
692
|
const resultToOutput = typeof ((response === null || response === void 0 ? void 0 : response.result) || response) === "object" ? JSON.stringify((response === null || response === void 0 ? void 0 : response.result) || response, undefined, 2) : (response === null || response === void 0 ? void 0 : response.result) || response;
|
|
693
|
-
api.output(resultToOutput, null);
|
|
693
|
+
yield api.output(resultToOutput, null);
|
|
694
694
|
}
|
|
695
695
|
}
|
|
696
696
|
}
|
|
@@ -766,11 +766,11 @@ export const AI_AGENT_JOB = createNodeDescriptor({
|
|
|
766
766
|
const generated_buffer_phrase = (_h = (_g = lastToolCall === null || lastToolCall === void 0 ? void 0 : lastToolCall.function) === null || _g === void 0 ? void 0 : _g.arguments) === null || _h === void 0 ? void 0 : _h.generated_buffer_phrase;
|
|
767
767
|
if (generated_buffer_phrase) {
|
|
768
768
|
// output the generated buffer phrase. Don't add it to the transcript, else the LLM will repeat it next time.
|
|
769
|
-
(_j = api.output) === null || _j === void 0 ? void 0 : _j.call(api, generated_buffer_phrase, {
|
|
769
|
+
yield ((_j = api.output) === null || _j === void 0 ? void 0 : _j.call(api, generated_buffer_phrase, {
|
|
770
770
|
_cognigy: {
|
|
771
771
|
_preventTranscript: true
|
|
772
772
|
}
|
|
773
|
-
});
|
|
773
|
+
}));
|
|
774
774
|
}
|
|
775
775
|
}
|
|
776
776
|
if (knowledgeSearchTags && knowledgeSearchTags.length > 0) {
|
|
@@ -1076,7 +1076,7 @@ export const AI_AGENT_JOB = createNodeDescriptor({
|
|
|
1076
1076
|
}
|
|
1077
1077
|
// Optionally output the result immediately
|
|
1078
1078
|
if (llmResult.result && outputImmediately && !llmPromptOptions.stream) {
|
|
1079
|
-
(_2 = api.output) === null || _2 === void 0 ? void 0 : _2.call(api, llmResult.result, {});
|
|
1079
|
+
yield ((_2 = api.output) === null || _2 === void 0 ? void 0 : _2.call(api, llmResult.result, {}));
|
|
1080
1080
|
}
|
|
1081
1081
|
// If we are streaming and we got a result, also store it into the transcript, since streamed chunks are not stored there
|
|
1082
1082
|
if (llmResult.result && llmPromptOptions.stream) {
|
|
@@ -1122,7 +1122,7 @@ export const AI_AGENT_JOB = createNodeDescriptor({
|
|
|
1122
1122
|
if (errorHandling === "continue") {
|
|
1123
1123
|
// output the timeout message
|
|
1124
1124
|
if (errorMessage) {
|
|
1125
|
-
(_8 = api.output) === null || _8 === void 0 ? void 0 : _8.call(api, errorMessage, null);
|
|
1125
|
+
yield ((_8 = api.output) === null || _8 === void 0 ? void 0 : _8.call(api, errorMessage, null));
|
|
1126
1126
|
}
|
|
1127
1127
|
// Set default node as next node
|
|
1128
1128
|
const defaultChild = childConfigs.find(child => child.type === "aiAgentJobDefault");
|
|
@@ -99,7 +99,7 @@ export const HANDOVER = createNodeDescriptor({
|
|
|
99
99
|
function: ({ cognigy, config, childConfigs, nodeId }) => __awaiter(void 0, void 0, void 0, function* () {
|
|
100
100
|
const { api, input } = cognigy;
|
|
101
101
|
if (input.channel === "adminconsole") {
|
|
102
|
-
api.output("You are using the deprecated Handover Node. Please find the new Handover to Agent Node in the Node Selection Menu.", null);
|
|
102
|
+
yield api.output("You are using the deprecated Handover Node. Please find the new Handover to Agent Node in the Node Selection Menu.", null);
|
|
103
103
|
}
|
|
104
104
|
yield api.handover({ cognigy, config, nodeId, childConfigs });
|
|
105
105
|
})
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
/* JWT Secret Connection */
|
|
2
2
|
export const JWT_SECRET_CONNECTION = {
|
|
3
|
-
type: "
|
|
4
|
-
label: "
|
|
5
|
-
fields: [
|
|
3
|
+
type: "AgentCopilotAuthentication",
|
|
4
|
+
label: "UI__NODE_EDITOR__AGENT_COPILOT_AUTHENTICATION_SECRET__LABEL",
|
|
5
|
+
fields: [
|
|
6
|
+
{ fieldName: "jwtSecret", label: "UI__CONNECTION_EDITOR__FIELD_JWT SECRET" },
|
|
7
|
+
],
|
|
6
8
|
};
|
|
7
9
|
//# sourceMappingURL=jwtSecret.js.map
|
|
@@ -94,7 +94,7 @@ export const BARGE_IN = createNodeDescriptor({
|
|
|
94
94
|
const payload = setSessionConfig.handleInput(endpointType, voiceConfigParamsToVoiceSettings(config, api));
|
|
95
95
|
/* we need to store this for the DTMF Collect - Config node */
|
|
96
96
|
api.setSystemContext("vgSettings", { bargeInOnDtmf: config.bargeInOnDtmf });
|
|
97
|
-
api.say(null, {
|
|
97
|
+
yield api.say(null, {
|
|
98
98
|
_cognigy: payload,
|
|
99
99
|
});
|
|
100
100
|
logFullConfigToDebugMode(cognigy, config);
|
|
@@ -58,7 +58,7 @@ export const CONTINUOUS_ASR = createNodeDescriptor({
|
|
|
58
58
|
const { endpointType } = input;
|
|
59
59
|
try {
|
|
60
60
|
const payload = setSessionConfig.handleInput(endpointType, voiceConfigParamsToVoiceSettings(config, api));
|
|
61
|
-
api.say(null, {
|
|
61
|
+
yield api.say(null, {
|
|
62
62
|
_cognigy: payload,
|
|
63
63
|
});
|
|
64
64
|
logFullConfigToDebugMode(cognigy, config);
|