@cognigy/rest-api-client 4.93.0 → 4.94.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +5 -0
- package/build/authentication/AuthenticationAPI.js +1 -1
- package/build/shared/charts/descriptors/agentAssist/helpers/knowledgeSearch/errorHandler.helper.js +1 -1
- package/build/shared/charts/descriptors/agentAssist/knowledgeAssist.js +1 -1
- package/build/shared/charts/descriptors/agentAssist/nextActionAssist.js +1 -1
- package/build/shared/charts/descriptors/analytics/overwriteAnalytics.js +1 -1
- package/build/shared/charts/descriptors/analytics/requestRating.js +1 -1
- package/build/shared/charts/descriptors/knowledgeSearch/searchExtractOutput.js +6 -6
- package/build/shared/charts/descriptors/liveAgent/assistInfo.js +1 -1
- package/build/shared/charts/descriptors/logic/thinkV2.js +1 -1
- package/build/shared/charts/descriptors/message/checkChannelChange/checkChannelChange.js +2 -2
- package/build/shared/charts/descriptors/message/say.js +2 -2
- package/build/shared/charts/descriptors/message/sendText.js +1 -1
- package/build/shared/charts/descriptors/microsoft/getToken.js +1 -1
- package/build/shared/charts/descriptors/microsoft/invalidateToken.js +1 -1
- package/build/shared/charts/descriptors/service/GPTConversation.js +14 -14
- package/build/shared/charts/descriptors/service/GPTPrompt.js +4 -4
- package/build/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +4 -4
- package/build/shared/charts/descriptors/service/handover.js +1 -1
- package/build/shared/charts/descriptors/service/jwtSecret.js +5 -3
- package/build/shared/charts/descriptors/voice/nodes/bargeIn.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/continuousAsr.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/dtmf.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/hangup.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/muteSpeechInput.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/noUserInput.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/play.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/sendMetadata.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/sessionSpeechParameters.js +1 -1
- package/build/shared/charts/descriptors/voice/nodes/transfer.js +2 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/agentAssist.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/callRecording.js +2 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/handover.js +2 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/hangup.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/playURL.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/sendMessage.js +3 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/sendMetaData.js +1 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/setSessionParams.js +2 -2
- package/build/shared/charts/descriptors/voicegateway2/nodes/dtmf.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/hangup.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/muteSpeechInput.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/play.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/record.js +3 -3
- package/build/shared/charts/descriptors/voicegateway2/nodes/refer.js +2 -2
- package/build/shared/charts/descriptors/voicegateway2/nodes/sendMetadata.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/setSessionConfig.js +1 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/transfer.js +2 -2
- package/build/shared/interfaces/handover.js +2 -1
- package/build/shared/interfaces/messageAPI/endpoints.js +1 -0
- package/build/shared/interfaces/resources/INodeDescriptorSet.js +2 -1
- package/build/shared/interfaces/resources/TRestChannelType.js +7 -2
- package/dist/esm/authentication/AuthenticationAPI.js +1 -1
- package/dist/esm/shared/charts/descriptors/agentAssist/helpers/knowledgeSearch/errorHandler.helper.js +1 -1
- package/dist/esm/shared/charts/descriptors/agentAssist/knowledgeAssist.js +1 -1
- package/dist/esm/shared/charts/descriptors/agentAssist/nextActionAssist.js +1 -1
- package/dist/esm/shared/charts/descriptors/analytics/overwriteAnalytics.js +1 -1
- package/dist/esm/shared/charts/descriptors/analytics/requestRating.js +1 -1
- package/dist/esm/shared/charts/descriptors/knowledgeSearch/searchExtractOutput.js +6 -6
- package/dist/esm/shared/charts/descriptors/liveAgent/assistInfo.js +1 -1
- package/dist/esm/shared/charts/descriptors/logic/thinkV2.js +1 -1
- package/dist/esm/shared/charts/descriptors/message/checkChannelChange/checkChannelChange.js +2 -2
- package/dist/esm/shared/charts/descriptors/message/say.js +2 -2
- package/dist/esm/shared/charts/descriptors/message/sendText.js +1 -1
- package/dist/esm/shared/charts/descriptors/microsoft/getToken.js +1 -1
- package/dist/esm/shared/charts/descriptors/microsoft/invalidateToken.js +1 -1
- package/dist/esm/shared/charts/descriptors/service/GPTConversation.js +15 -15
- package/dist/esm/shared/charts/descriptors/service/GPTPrompt.js +4 -4
- package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +4 -4
- package/dist/esm/shared/charts/descriptors/service/handover.js +1 -1
- package/dist/esm/shared/charts/descriptors/service/jwtSecret.js +5 -3
- package/dist/esm/shared/charts/descriptors/voice/nodes/bargeIn.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/continuousAsr.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/dtmf.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/hangup.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/muteSpeechInput.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/noUserInput.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/play.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/sendMetadata.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/sessionSpeechParameters.js +1 -1
- package/dist/esm/shared/charts/descriptors/voice/nodes/transfer.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/agentAssist.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/callRecording.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/handover.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/hangup.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/playURL.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/sendMessage.js +3 -2
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/sendMetaData.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway/nodes/setSessionParams.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/dtmf.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/hangup.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/muteSpeechInput.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/play.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/record.js +3 -3
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/refer.js +2 -2
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/sendMetadata.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/setSessionConfig.js +1 -1
- package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/transfer.js +2 -2
- package/dist/esm/shared/interfaces/handover.js +2 -1
- package/dist/esm/shared/interfaces/messageAPI/endpoints.js +1 -0
- package/dist/esm/shared/interfaces/resources/INodeDescriptorSet.js +2 -1
- package/dist/esm/shared/interfaces/resources/TRestChannelType.js +7 -2
- package/package.json +3 -2
- package/types/index.d.ts +17 -5
- package/build/shared/interfaces/license.js.map +0 -1
- package/build/test.js +0 -1
- package/dist/esm/shared/interfaces/license.js.map +0 -1
- package/dist/esm/test.js +0 -1
package/CHANGELOG.md
CHANGED
|
@@ -62,7 +62,7 @@ function AuthenticationAPI(instance) {
|
|
|
62
62
|
},
|
|
63
63
|
exchangeOneTimeTokenForRefreshToken: (_a, options) => {
|
|
64
64
|
var { loginToken } = _a, args = __rest(_a, ["loginToken"]);
|
|
65
|
-
return (0, GenericAPIFn_1.GenericAPIFn)(`/auth/
|
|
65
|
+
return (0, GenericAPIFn_1.GenericAPIFn)(`/auth/exchangetoken?${(0, rest_1.stringifyQuery)({ loginToken })}`, "GET", self)(args, Object.assign({ withAuthentication: false }, options));
|
|
66
66
|
}
|
|
67
67
|
};
|
|
68
68
|
}
|
package/build/shared/charts/descriptors/agentAssist/helpers/knowledgeSearch/errorHandler.helper.js
CHANGED
|
@@ -32,7 +32,7 @@ const handleServiceError = ({ api, cognigy, nodeId, traceId, searchStoreLocation
|
|
|
32
32
|
if (errorHandling === "continue") {
|
|
33
33
|
// output the timeout message
|
|
34
34
|
if (errorMessage) {
|
|
35
|
-
(_a = api.output) === null || _a === void 0 ? void 0 : _a.call(api, errorMessage, null);
|
|
35
|
+
await ((_a = api.output) === null || _a === void 0 ? void 0 : _a.call(api, errorMessage, null));
|
|
36
36
|
}
|
|
37
37
|
}
|
|
38
38
|
else if (errorHandling === "goto") {
|
|
@@ -140,7 +140,7 @@ exports.KNOWLEDGE_ASSIST = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
140
140
|
{
|
|
141
141
|
key: "prompt",
|
|
142
142
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__LABEL",
|
|
143
|
-
type: "
|
|
143
|
+
type: "cognigyLLMText",
|
|
144
144
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__DESCRIPTION",
|
|
145
145
|
params: {
|
|
146
146
|
required: true,
|
|
@@ -174,7 +174,7 @@ exports.NEXT_ACTION_ASSIST = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
174
174
|
{
|
|
175
175
|
key: "prompt",
|
|
176
176
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__LABEL",
|
|
177
|
-
type: "
|
|
177
|
+
type: "cognigyLLMText",
|
|
178
178
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__DESCRIPTION",
|
|
179
179
|
params: {
|
|
180
180
|
required: true,
|
|
@@ -91,7 +91,7 @@ exports.OVERWRITE_ANALYTICS = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
91
91
|
{
|
|
92
92
|
key: "intentScore",
|
|
93
93
|
type: "number",
|
|
94
|
-
label: "UI__NODE_EDITOR__OVERWRITE_ANALYTICS__INTENT_SCORE__LABEL"
|
|
94
|
+
label: "UI__NODE_EDITOR__OVERWRITE_ANALYTICS__INTENT_SCORE__LABEL"
|
|
95
95
|
},
|
|
96
96
|
{
|
|
97
97
|
key: "inputText",
|
|
@@ -93,7 +93,7 @@ exports.REQUEST_RATING = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
93
93
|
const ratingSubmitButtonText = config.ratingSubmitButtonText;
|
|
94
94
|
const ratingEventBannerText = config.ratingEventBannerText;
|
|
95
95
|
const ratingChatStatusMessage = config.ratingChatStatusMessage;
|
|
96
|
-
api.say("", {
|
|
96
|
+
await api.say("", {
|
|
97
97
|
_plugin: {
|
|
98
98
|
type: "request-rating",
|
|
99
99
|
data: {
|
|
@@ -218,7 +218,7 @@ exports.SEARCH_EXTRACT_OUTPUT = (0, createNodeDescriptor_1.createNodeDescriptor)
|
|
|
218
218
|
{
|
|
219
219
|
key: "prompt",
|
|
220
220
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__LABEL",
|
|
221
|
-
type: "
|
|
221
|
+
type: "cognigyLLMText",
|
|
222
222
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT__DESCRIPTION",
|
|
223
223
|
params: {
|
|
224
224
|
required: true,
|
|
@@ -659,7 +659,7 @@ New: `;
|
|
|
659
659
|
if (errorHandling === "continue") {
|
|
660
660
|
// output the timeout message
|
|
661
661
|
if (errorMessage) {
|
|
662
|
-
api.output(errorMessage, null);
|
|
662
|
+
await api.output(errorMessage, null);
|
|
663
663
|
}
|
|
664
664
|
}
|
|
665
665
|
else if (errorHandling === "goto") {
|
|
@@ -980,7 +980,7 @@ New: `;
|
|
|
980
980
|
if (mode === "seo") {
|
|
981
981
|
switch (outputMode) {
|
|
982
982
|
case "adaptiveCard":
|
|
983
|
-
api.output(null, {
|
|
983
|
+
await api.output(null, {
|
|
984
984
|
"_cognigy": {
|
|
985
985
|
"_default": {
|
|
986
986
|
"_adaptiveCard": {
|
|
@@ -993,12 +993,12 @@ New: `;
|
|
|
993
993
|
});
|
|
994
994
|
break;
|
|
995
995
|
case "text":
|
|
996
|
-
api.output(promptResponse, null);
|
|
996
|
+
await api.output(promptResponse, null);
|
|
997
997
|
break;
|
|
998
998
|
case "stream":
|
|
999
999
|
// fallback in case the stream for some reason didn't stream anything
|
|
1000
1000
|
if (!streamedOutput) {
|
|
1001
|
-
api.output(promptResponse, null);
|
|
1001
|
+
await api.output(promptResponse, null);
|
|
1002
1002
|
}
|
|
1003
1003
|
}
|
|
1004
1004
|
}
|
|
@@ -1012,7 +1012,7 @@ New: `;
|
|
|
1012
1012
|
}
|
|
1013
1013
|
else {
|
|
1014
1014
|
if (mode === "seo") {
|
|
1015
|
-
api.output(outputFallback, null);
|
|
1015
|
+
await api.output(outputFallback, null);
|
|
1016
1016
|
}
|
|
1017
1017
|
}
|
|
1018
1018
|
}
|
|
@@ -189,7 +189,7 @@ exports.ASSIST_INFO = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
189
189
|
function: async ({ cognigy, config }) => {
|
|
190
190
|
const { api } = cognigy;
|
|
191
191
|
const dataResponse = (0, utils_1.buildCognigyWhisperAssistResponse)(config);
|
|
192
|
-
api.say("", dataResponse);
|
|
192
|
+
await api.say("", dataResponse);
|
|
193
193
|
},
|
|
194
194
|
});
|
|
195
195
|
//# sourceMappingURL=assistInfo.js.map
|
|
@@ -174,7 +174,7 @@ exports.THINK_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
174
174
|
if (errorHandling === "continue") {
|
|
175
175
|
// output the provided error message
|
|
176
176
|
if (errorMessage) {
|
|
177
|
-
api.output(errorMessage, null);
|
|
177
|
+
await api.output(errorMessage, null);
|
|
178
178
|
}
|
|
179
179
|
}
|
|
180
180
|
else if (errorHandling === "goto") {
|
|
@@ -70,7 +70,7 @@ exports.CHECK_CHANNEL_CHANGE = (0, createNodeDescriptor_1.createNodeDescriptor)(
|
|
|
70
70
|
}
|
|
71
71
|
return;
|
|
72
72
|
}
|
|
73
|
-
api.say(questionToAsk);
|
|
73
|
+
await api.say(questionToAsk);
|
|
74
74
|
api.setNextNode(thisNodeId);
|
|
75
75
|
api.stopExecution();
|
|
76
76
|
return;
|
|
@@ -85,7 +85,7 @@ exports.CHECK_CHANNEL_CHANGE = (0, createNodeDescriptor_1.createNodeDescriptor)(
|
|
|
85
85
|
api.resetExecutionAmount(thisNodeId);
|
|
86
86
|
return;
|
|
87
87
|
}
|
|
88
|
-
api.say(validationMessage.replace("\{lastChannel\}", lastChannel) || questionToAsk);
|
|
88
|
+
await api.say(validationMessage.replace("\{lastChannel\}", lastChannel) || questionToAsk);
|
|
89
89
|
api.setNextNode(thisNodeId);
|
|
90
90
|
api.stopExecution();
|
|
91
91
|
}
|
|
@@ -124,7 +124,7 @@ exports.SAY = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
124
124
|
delete _data._cognigy._default;
|
|
125
125
|
}
|
|
126
126
|
outputText = await (0, rephraseSentenceWithAi_1.rephraseSentenceWithAI)(outputText, config, api, organisationId);
|
|
127
|
-
api.say(outputText, _data, settings);
|
|
127
|
+
await api.say(outputText, _data, settings);
|
|
128
128
|
}
|
|
129
129
|
else {
|
|
130
130
|
const _cognigyDefault = (_b = _data === null || _data === void 0 ? void 0 : _data._cognigy) === null || _b === void 0 ? void 0 : _b._default;
|
|
@@ -151,7 +151,7 @@ exports.SAY = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
151
151
|
* This is a say node which is set to a type other than 'text' -
|
|
152
152
|
* so we have one of the new rich-media types such as 'gallery'
|
|
153
153
|
*/
|
|
154
|
-
api.say(null, config.say, settings);
|
|
154
|
+
await api.say(null, config.say, settings);
|
|
155
155
|
}
|
|
156
156
|
},
|
|
157
157
|
});
|
|
@@ -25,7 +25,7 @@ exports.SEND_TEXT = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
25
25
|
function: async ({ cognigy, config }) => {
|
|
26
26
|
const { text, data } = config;
|
|
27
27
|
const { api } = cognigy;
|
|
28
|
-
api.say(text, data);
|
|
28
|
+
await api.say(text, data);
|
|
29
29
|
}
|
|
30
30
|
});
|
|
31
31
|
//# sourceMappingURL=sendText.js.map
|
|
@@ -22,7 +22,7 @@ exports.microsoftGetTokenNode = (0, createNodeDescriptor_1.createNodeDescriptor)
|
|
|
22
22
|
function: async ({ cognigy }) => {
|
|
23
23
|
const { api } = cognigy;
|
|
24
24
|
api.addToContext("microsoftSsoPermissionRequest", "pending", "simple");
|
|
25
|
-
api.say("", {
|
|
25
|
+
await api.say("", {
|
|
26
26
|
_cognigy: {
|
|
27
27
|
_microsoftBotFramework: {
|
|
28
28
|
json: { requestMicrosoftSsoPermissions: true },
|
|
@@ -23,7 +23,7 @@ exports.microsoftInvalidateTokenNode = (0, createNodeDescriptor_1.createNodeDesc
|
|
|
23
23
|
function: async ({ cognigy }) => {
|
|
24
24
|
const { api } = cognigy;
|
|
25
25
|
api.addToContext("microsoftSsoPermissionRequest", "declined", "simple");
|
|
26
|
-
api.say("", { _cognigy: { _microsoftBotFramework: { json: { revokeMicrosoftSsoPermissions: true } } } });
|
|
26
|
+
await api.say("", { _cognigy: { _microsoftBotFramework: { json: { revokeMicrosoftSsoPermissions: true } } } });
|
|
27
27
|
}
|
|
28
28
|
});
|
|
29
29
|
//# sourceMappingURL=invalidateToken.js.map
|
|
@@ -390,11 +390,11 @@ exports.GPT_CONVERSATION = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
390
390
|
try {
|
|
391
391
|
prompt = await buildPrompt(context, input, api, config);
|
|
392
392
|
if (debug && input.channel === "adminconsole")
|
|
393
|
-
api.output(prompt, null);
|
|
393
|
+
await api.output(prompt, null);
|
|
394
394
|
}
|
|
395
395
|
catch (error) {
|
|
396
396
|
if (debug && input.channel === "adminconsole")
|
|
397
|
-
api.output("Error in building prompt: " + error.message, null);
|
|
397
|
+
await api.output("Error in building prompt: " + error.message, null);
|
|
398
398
|
else
|
|
399
399
|
api.log("error", "Error in building prompt: " + error.message);
|
|
400
400
|
}
|
|
@@ -406,11 +406,11 @@ exports.GPT_CONVERSATION = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
406
406
|
}
|
|
407
407
|
catch (error) {
|
|
408
408
|
if (debug && input.channel === "adminconsole")
|
|
409
|
-
api.output("Error in executing prompt: " + error.message, null);
|
|
409
|
+
await api.output("Error in executing prompt: " + error.message, null);
|
|
410
410
|
else
|
|
411
411
|
api.log("error", "Error in executing prompt: " + error.message);
|
|
412
412
|
if (error.code && error.code === 'ECONNABORTED' && error.isAxiosError) {
|
|
413
|
-
api.output(timeoutMessage, null);
|
|
413
|
+
await api.output(timeoutMessage, null);
|
|
414
414
|
}
|
|
415
415
|
}
|
|
416
416
|
if (!response) {
|
|
@@ -420,11 +420,11 @@ exports.GPT_CONVERSATION = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
420
420
|
}
|
|
421
421
|
catch (error) {
|
|
422
422
|
if (debug && input.channel === "adminconsole")
|
|
423
|
-
api.output("Error in executing prompt: " + error.message, null);
|
|
423
|
+
await api.output("Error in executing prompt: " + error.message, null);
|
|
424
424
|
else
|
|
425
425
|
api.log("error", "Error in executing prompt: " + error.message);
|
|
426
426
|
if (error.code && error.code === 'ECONNABORTED' && error.isAxiosError) {
|
|
427
|
-
api.output(timeoutMessage, null);
|
|
427
|
+
await api.output(timeoutMessage, null);
|
|
428
428
|
}
|
|
429
429
|
}
|
|
430
430
|
}
|
|
@@ -437,11 +437,11 @@ exports.GPT_CONVERSATION = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
437
437
|
commands = await parseResponse(api, input, debug, response);
|
|
438
438
|
}
|
|
439
439
|
if (debug && input.channel === "adminconsole")
|
|
440
|
-
api.output(JSON.stringify(commands, null, 4), null);
|
|
440
|
+
await api.output(JSON.stringify(commands, null, 4), null);
|
|
441
441
|
}
|
|
442
442
|
catch (error) {
|
|
443
443
|
if (debug && input.channel === "adminconsole")
|
|
444
|
-
api.output("Error in parsing response: " + error.message, null);
|
|
444
|
+
await api.output("Error in parsing response: " + error.message, null);
|
|
445
445
|
else
|
|
446
446
|
api.log("error", "Error in parsing response: " + error.message);
|
|
447
447
|
}
|
|
@@ -450,7 +450,7 @@ exports.GPT_CONVERSATION = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
450
450
|
}
|
|
451
451
|
catch (error) {
|
|
452
452
|
if (debug && input.channel === "adminconsole")
|
|
453
|
-
api.output("Error in handling commands: " + error.message, null);
|
|
453
|
+
await api.output("Error in handling commands: " + error.message, null);
|
|
454
454
|
else
|
|
455
455
|
api.log("error", "Error in handling commands: " + error.message);
|
|
456
456
|
}
|
|
@@ -619,7 +619,7 @@ async function parseResponse(api, input, debug, response) {
|
|
|
619
619
|
const individualCommands = apiResponse.substr(1, apiResponse.length - 1).split("##BREAK##");
|
|
620
620
|
individualCommands[individualCommands.length - 1] = individualCommands[individualCommands.length - 1].substring(0, individualCommands[individualCommands.length - 1].length - 1);
|
|
621
621
|
const newCommands = [];
|
|
622
|
-
individualCommands.forEach((command) => {
|
|
622
|
+
individualCommands.forEach(async (command) => {
|
|
623
623
|
try {
|
|
624
624
|
// again try to just fix the command and parse
|
|
625
625
|
const { data, changed } = jsonFix(command);
|
|
@@ -665,7 +665,7 @@ async function parseResponse(api, input, debug, response) {
|
|
|
665
665
|
catch (err) {
|
|
666
666
|
// if all fails, output an error
|
|
667
667
|
if (debug && input.channel === "adminconsole")
|
|
668
|
-
api.output("Error in parsing command " + command + " : " + err.message, null);
|
|
668
|
+
await api.output("Error in parsing command " + command + " : " + err.message, null);
|
|
669
669
|
else
|
|
670
670
|
api.log("debug", "Error in parsing command " + command + " : " + err.message);
|
|
671
671
|
}
|
|
@@ -694,7 +694,7 @@ async function handleCommands(context, input, api, debug, commands, config) {
|
|
|
694
694
|
if (command.message) {
|
|
695
695
|
context.lastOutput = command.message;
|
|
696
696
|
if (handleOutputs === "output") {
|
|
697
|
-
api.output(command.message, null);
|
|
697
|
+
await api.output(command.message, null);
|
|
698
698
|
}
|
|
699
699
|
else {
|
|
700
700
|
outputBuffer.push(command.message);
|
|
@@ -718,13 +718,13 @@ async function handleCommands(context, input, api, debug, commands, config) {
|
|
|
718
718
|
}
|
|
719
719
|
catch (err) {
|
|
720
720
|
if (debug && input.channel === "adminconsole")
|
|
721
|
-
api.output("Couldn't parse command " + command, null);
|
|
721
|
+
await api.output("Couldn't parse command " + command, null);
|
|
722
722
|
else
|
|
723
723
|
api.log("error", "Couldn't parse command " + command);
|
|
724
724
|
}
|
|
725
725
|
}
|
|
726
726
|
if (!outputs) {
|
|
727
|
-
api.output("Sorry, I was absentminded for a second, could you say that again please?", null);
|
|
727
|
+
await api.output("Sorry, I was absentminded for a second, could you say that again please?", null);
|
|
728
728
|
api.log("error", JSON.stringify(commands));
|
|
729
729
|
}
|
|
730
730
|
if (outputBuffer && outputBuffer.length > 0) {
|
|
@@ -23,7 +23,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
23
23
|
{
|
|
24
24
|
key: "prompt",
|
|
25
25
|
label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT_SYSTEM__LABEL",
|
|
26
|
-
type: "
|
|
26
|
+
type: "cognigyLLMText",
|
|
27
27
|
description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__PROMPT_SYSTEM__DESCRIPTION",
|
|
28
28
|
params: {
|
|
29
29
|
multiline: true,
|
|
@@ -597,7 +597,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
597
597
|
if (errorHandling === "continue") {
|
|
598
598
|
// output the timeout message
|
|
599
599
|
if (errorMessage) {
|
|
600
|
-
api.output(errorMessage, null);
|
|
600
|
+
await api.output(errorMessage, null);
|
|
601
601
|
}
|
|
602
602
|
}
|
|
603
603
|
else if (errorHandling === "goto") {
|
|
@@ -682,7 +682,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
682
682
|
// output result immediately if toggle is set
|
|
683
683
|
if (immediateOutput) {
|
|
684
684
|
const resultToOutput = typeof ((response === null || response === void 0 ? void 0 : response.result) || response) === "object" ? JSON.stringify((response === null || response === void 0 ? void 0 : response.result) || response, undefined, 2) : (response === null || response === void 0 ? void 0 : response.result) || response;
|
|
685
|
-
api.output(resultToOutput, null);
|
|
685
|
+
await api.output(resultToOutput, null);
|
|
686
686
|
}
|
|
687
687
|
}
|
|
688
688
|
else if (storeLocation === "input" || (storeLocation === "stream" && streamStoreCopyInInput)) {
|
|
@@ -692,7 +692,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
692
692
|
// this means we don't output the result again if we streamed
|
|
693
693
|
if (storeLocation === "input" && immediateOutput) {
|
|
694
694
|
const resultToOutput = typeof ((response === null || response === void 0 ? void 0 : response.result) || response) === "object" ? JSON.stringify((response === null || response === void 0 ? void 0 : response.result) || response, undefined, 2) : (response === null || response === void 0 ? void 0 : response.result) || response;
|
|
695
|
-
api.output(resultToOutput, null);
|
|
695
|
+
await api.output(resultToOutput, null);
|
|
696
696
|
}
|
|
697
697
|
}
|
|
698
698
|
}
|
|
@@ -779,11 +779,11 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
779
779
|
const generated_buffer_phrase = (_h = (_g = lastToolCall === null || lastToolCall === void 0 ? void 0 : lastToolCall.function) === null || _g === void 0 ? void 0 : _g.arguments) === null || _h === void 0 ? void 0 : _h.generated_buffer_phrase;
|
|
780
780
|
if (generated_buffer_phrase) {
|
|
781
781
|
// output the generated buffer phrase. Don't add it to the transcript, else the LLM will repeat it next time.
|
|
782
|
-
(_j = api.output) === null || _j === void 0 ? void 0 : _j.call(api, generated_buffer_phrase, {
|
|
782
|
+
await ((_j = api.output) === null || _j === void 0 ? void 0 : _j.call(api, generated_buffer_phrase, {
|
|
783
783
|
_cognigy: {
|
|
784
784
|
_preventTranscript: true
|
|
785
785
|
}
|
|
786
|
-
});
|
|
786
|
+
}));
|
|
787
787
|
}
|
|
788
788
|
}
|
|
789
789
|
if (knowledgeSearchTags && knowledgeSearchTags.length > 0) {
|
|
@@ -1089,7 +1089,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
1089
1089
|
}
|
|
1090
1090
|
// Optionally output the result immediately
|
|
1091
1091
|
if (llmResult.result && outputImmediately && !llmPromptOptions.stream) {
|
|
1092
|
-
(_2 = api.output) === null || _2 === void 0 ? void 0 : _2.call(api, llmResult.result, {});
|
|
1092
|
+
await ((_2 = api.output) === null || _2 === void 0 ? void 0 : _2.call(api, llmResult.result, {}));
|
|
1093
1093
|
}
|
|
1094
1094
|
// If we are streaming and we got a result, also store it into the transcript, since streamed chunks are not stored there
|
|
1095
1095
|
if (llmResult.result && llmPromptOptions.stream) {
|
|
@@ -1135,7 +1135,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
1135
1135
|
if (errorHandling === "continue") {
|
|
1136
1136
|
// output the timeout message
|
|
1137
1137
|
if (errorMessage) {
|
|
1138
|
-
(_8 = api.output) === null || _8 === void 0 ? void 0 : _8.call(api, errorMessage, null);
|
|
1138
|
+
await ((_8 = api.output) === null || _8 === void 0 ? void 0 : _8.call(api, errorMessage, null));
|
|
1139
1139
|
}
|
|
1140
1140
|
// Set default node as next node
|
|
1141
1141
|
const defaultChild = childConfigs.find(child => child.type === "aiAgentJobDefault");
|
|
@@ -101,7 +101,7 @@ exports.HANDOVER = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
101
101
|
function: async ({ cognigy, config, childConfigs, nodeId }) => {
|
|
102
102
|
const { api, input } = cognigy;
|
|
103
103
|
if (input.channel === "adminconsole") {
|
|
104
|
-
api.output("You are using the deprecated Handover Node. Please find the new Handover to Agent Node in the Node Selection Menu.", null);
|
|
104
|
+
await api.output("You are using the deprecated Handover Node. Please find the new Handover to Agent Node in the Node Selection Menu.", null);
|
|
105
105
|
}
|
|
106
106
|
await api.handover({ cognigy, config, nodeId, childConfigs });
|
|
107
107
|
}
|
|
@@ -3,8 +3,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.JWT_SECRET_CONNECTION = void 0;
|
|
4
4
|
/* JWT Secret Connection */
|
|
5
5
|
exports.JWT_SECRET_CONNECTION = {
|
|
6
|
-
type: "
|
|
7
|
-
label: "
|
|
8
|
-
fields: [
|
|
6
|
+
type: "AgentCopilotAuthentication",
|
|
7
|
+
label: "UI__NODE_EDITOR__AGENT_COPILOT_AUTHENTICATION_SECRET__LABEL",
|
|
8
|
+
fields: [
|
|
9
|
+
{ fieldName: "jwtSecret", label: "UI__CONNECTION_EDITOR__FIELD_JWT SECRET" },
|
|
10
|
+
],
|
|
9
11
|
};
|
|
10
12
|
//# sourceMappingURL=jwtSecret.js.map
|
|
@@ -96,7 +96,7 @@ exports.BARGE_IN = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
96
96
|
const payload = setSessionConfig_mapper_1.setSessionConfig.handleInput(endpointType, (0, setSessionConfig_mapper_2.voiceConfigParamsToVoiceSettings)(config, api));
|
|
97
97
|
/* we need to store this for the DTMF Collect - Config node */
|
|
98
98
|
api.setSystemContext("vgSettings", { bargeInOnDtmf: config.bargeInOnDtmf });
|
|
99
|
-
api.say(null, {
|
|
99
|
+
await api.say(null, {
|
|
100
100
|
_cognigy: payload,
|
|
101
101
|
});
|
|
102
102
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, config);
|
|
@@ -60,7 +60,7 @@ exports.CONTINUOUS_ASR = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
60
60
|
const { endpointType } = input;
|
|
61
61
|
try {
|
|
62
62
|
const payload = setSessionConfig_mapper_1.setSessionConfig.handleInput(endpointType, (0, setSessionConfig_mapper_2.voiceConfigParamsToVoiceSettings)(config, api));
|
|
63
|
-
api.say(null, {
|
|
63
|
+
await api.say(null, {
|
|
64
64
|
_cognigy: payload,
|
|
65
65
|
});
|
|
66
66
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, config);
|
|
@@ -69,7 +69,7 @@ exports.DTMF = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
69
69
|
const { endpointType } = input;
|
|
70
70
|
try {
|
|
71
71
|
const payload = setSessionConfig_mapper_1.setSessionConfig.handleInput(endpointType, (0, setSessionConfig_mapper_2.voiceConfigParamsToVoiceSettings)(config, api));
|
|
72
|
-
api.say(null, {
|
|
72
|
+
await api.say(null, {
|
|
73
73
|
_cognigy: payload,
|
|
74
74
|
});
|
|
75
75
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, config);
|
|
@@ -35,7 +35,7 @@ exports.HANG_UP = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
35
35
|
const { endpointType } = input;
|
|
36
36
|
try {
|
|
37
37
|
const payload = hangup_mapper_1.hangUp.handleInput(endpointType, hangupReason);
|
|
38
|
-
api.say(null, {
|
|
38
|
+
await api.say(null, {
|
|
39
39
|
_cognigy: payload
|
|
40
40
|
});
|
|
41
41
|
}
|
|
@@ -40,7 +40,7 @@ exports.MUTE_SPEECH_INPUT = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
40
40
|
try {
|
|
41
41
|
mapper.endpointType = endpointType;
|
|
42
42
|
const payload = mapper.handleInput(config, api);
|
|
43
|
-
api.say(null, {
|
|
43
|
+
await api.say(null, {
|
|
44
44
|
_cognigy: payload
|
|
45
45
|
});
|
|
46
46
|
api.logDebugMessage(`UI__DEBUG_MODE__MUTE_SPEECH__MESSAGE ${config.muteSpeechInput}`);
|
|
@@ -95,7 +95,7 @@ exports.USER_INPUT_TIMEOUT = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
95
95
|
const { endpointType } = input;
|
|
96
96
|
try {
|
|
97
97
|
const payload = setSessionConfig_mapper_1.setSessionConfig.handleInput(endpointType, (0, setSessionConfig_mapper_2.voiceConfigParamsToVoiceSettings)(config, api));
|
|
98
|
-
api.say(null, {
|
|
98
|
+
await api.say(null, {
|
|
99
99
|
_cognigy: payload,
|
|
100
100
|
});
|
|
101
101
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, config);
|
|
@@ -38,7 +38,7 @@ exports.SEND_METADATA = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
38
38
|
if (!config.metadata)
|
|
39
39
|
return;
|
|
40
40
|
try {
|
|
41
|
-
api.say(null, {
|
|
41
|
+
await api.say(null, {
|
|
42
42
|
_cognigy: sendMetadata_mapper_1.sendMetadata.handleInput(endpointType, config.metadata)
|
|
43
43
|
});
|
|
44
44
|
}
|
|
@@ -330,7 +330,7 @@ exports.SESSION_SPEECH_PARAMETERS = (0, createNodeDescriptor_1.createNodeDescrip
|
|
|
330
330
|
const { endpointType } = input;
|
|
331
331
|
try {
|
|
332
332
|
const payload = setSessionConfig_mapper_1.setSessionConfig.handleInput(endpointType, (0, setSessionConfig_mapper_1.voiceConfigParamsToVoiceSettings)(config, api));
|
|
333
|
-
api.say(null, {
|
|
333
|
+
await api.say(null, {
|
|
334
334
|
_cognigy: payload,
|
|
335
335
|
});
|
|
336
336
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, config);
|
|
@@ -97,7 +97,7 @@ exports.TRANSFER_VOICE = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
97
97
|
const { endpointType } = input;
|
|
98
98
|
try {
|
|
99
99
|
if (input.channel === "adminconsole") {
|
|
100
|
-
api.say("Transferring a call is not supported in the Interaction Panel.", null);
|
|
100
|
+
await api.say("Transferring a call is not supported in the Interaction Panel.", null);
|
|
101
101
|
return;
|
|
102
102
|
}
|
|
103
103
|
const transferParams = {
|
|
@@ -115,7 +115,7 @@ exports.TRANSFER_VOICE = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
115
115
|
api.log("error", "Invalid JSON in Transfer SIP Headers");
|
|
116
116
|
}
|
|
117
117
|
const payload = transfer_mapper_1.transfer.handleInput(endpointType, transferParams, true);
|
|
118
|
-
api.say(null, {
|
|
118
|
+
await api.say(null, {
|
|
119
119
|
_cognigy: payload,
|
|
120
120
|
});
|
|
121
121
|
(0, logFullConfigToDebugMode_1.logFullConfigToDebugMode)(cognigy, config);
|
|
@@ -109,11 +109,11 @@ exports.callRecordingNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
109
109
|
}
|
|
110
110
|
const { isFeatureAccmEnabled } = api.getEndpointSettings();
|
|
111
111
|
if (isFeatureAccmEnabled) {
|
|
112
|
-
return api.say(null, {
|
|
112
|
+
return await api.say(null, {
|
|
113
113
|
_cognigy: (0, utils_1.buildPayloadAccm)("callRecording", config, nodeId)
|
|
114
114
|
});
|
|
115
115
|
}
|
|
116
|
-
api.output(null, {
|
|
116
|
+
await api.output(null, {
|
|
117
117
|
_cognigy: {
|
|
118
118
|
_voiceGateway: {
|
|
119
119
|
json: {
|
|
@@ -131,12 +131,12 @@ exports.handoverNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
131
131
|
};
|
|
132
132
|
try {
|
|
133
133
|
if (input.channel === "adminconsole") {
|
|
134
|
-
api.say("Transferring a call is not supported in the Interaction Panel, please use the AudioCodes endpoint.", null);
|
|
134
|
+
await api.say("Transferring a call is not supported in the Interaction Panel, please use the AudioCodes endpoint.", null);
|
|
135
135
|
return;
|
|
136
136
|
}
|
|
137
137
|
const { isFeatureAccmEnabled } = api.getEndpointSettings();
|
|
138
138
|
const payload = isFeatureAccmEnabled ? (0, utils_1.buildPayloadAccm)("handover", transferParams, nodeId) : transfer_mapper_1.transfer.handleInput("audioCodes", transferParams);
|
|
139
|
-
api.say(null, {
|
|
139
|
+
await api.say(null, {
|
|
140
140
|
_cognigy: payload
|
|
141
141
|
});
|
|
142
142
|
}
|
|
@@ -38,7 +38,7 @@ exports.hangupNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
38
38
|
try {
|
|
39
39
|
const { isFeatureAccmEnabled } = api.getEndpointSettings();
|
|
40
40
|
const payload = isFeatureAccmEnabled ? (0, utils_1.buildPayloadAccm)("hangup", config, nodeId) : hangup_mapper_1.hangUp.handleInput("audioCodes", hangupReason);
|
|
41
|
-
api.say(null, {
|
|
41
|
+
await api.say(null, {
|
|
42
42
|
_cognigy: payload
|
|
43
43
|
});
|
|
44
44
|
}
|
|
@@ -119,7 +119,7 @@ exports.playURLNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
119
119
|
try {
|
|
120
120
|
const { isFeatureAccmEnabled } = api.getEndpointSettings();
|
|
121
121
|
const payload = isFeatureAccmEnabled ? (0, utils_1.buildPayloadAccm)("playURL", config, nodeId) : play_mapper_1.play.handleInput({ endpointType: 'audioCodes', config });
|
|
122
|
-
api.say(null, {
|
|
122
|
+
await api.say(null, {
|
|
123
123
|
_cognigy: payload
|
|
124
124
|
});
|
|
125
125
|
}
|
|
@@ -250,16 +250,17 @@ exports.sendMessageNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
250
250
|
const { text, activityParams, setActivityParams } = config;
|
|
251
251
|
const { isFeatureAccmEnabled } = api.getEndpointSettings();
|
|
252
252
|
if (isFeatureAccmEnabled && text) {
|
|
253
|
-
|
|
253
|
+
await api.say(null, {
|
|
254
254
|
_cognigy: (0, utils_1.buildPayloadAccm)("sendMessage", config, nodeId)
|
|
255
255
|
});
|
|
256
|
+
return;
|
|
256
257
|
}
|
|
257
258
|
let compiledParams = activityParams || {};
|
|
258
259
|
if (setActivityParams) {
|
|
259
260
|
(0, paramUtils_1.compileParams)(config, compiledParams);
|
|
260
261
|
}
|
|
261
262
|
if (text) {
|
|
262
|
-
api.output(null, {
|
|
263
|
+
await api.output(null, {
|
|
263
264
|
"_cognigy": {
|
|
264
265
|
"_voiceGateway": {
|
|
265
266
|
"json": {
|
|
@@ -37,7 +37,7 @@ exports.sendMetaDataNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
37
37
|
try {
|
|
38
38
|
const { isFeatureAccmEnabled } = api.getEndpointSettings();
|
|
39
39
|
const payload = isFeatureAccmEnabled ? (0, utils_1.buildPayloadAccm)("sendMetadata", config, nodeId) : sendMetadata_mapper_1.sendMetadata.handleInput("audioCodes", config.metaData);
|
|
40
|
-
api.output(null, {
|
|
40
|
+
await api.output(null, {
|
|
41
41
|
_cognigy: payload
|
|
42
42
|
});
|
|
43
43
|
}
|