@cognigy/rest-api-client 0.13.1 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintConfig.json +117 -0
- package/.eslintrc.json +2 -2
- package/CHANGELOG.md +12 -1
- package/build/apigroups/ResourcesAPIGroup_2_0.js +25 -4
- package/build/shared/charts/descriptors/allFields.js +8 -0
- package/build/shared/charts/descriptors/apps/getAppSessionPin.js +9 -9
- package/build/shared/charts/descriptors/apps/initAppSession.js +36 -53
- package/build/shared/charts/descriptors/apps/setAdaptiveCardAppState.js +5 -18
- package/build/shared/charts/descriptors/apps/setAppState.js +6 -6
- package/build/shared/charts/descriptors/apps/setHtmlAppState.js +85 -15
- package/build/shared/charts/descriptors/apps/utils/buildAppUrl.js +12 -0
- package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/azureOpenAIProviderConnection.js +14 -0
- package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/index.js +19 -0
- package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/openAIProviderConnection.js +11 -0
- package/build/shared/charts/descriptors/index.js +9 -6
- package/build/shared/charts/descriptors/logic/switch/switch.js +3 -2
- package/build/shared/charts/descriptors/message/question/optionalQuestion.js +30 -7
- package/build/shared/charts/descriptors/message/question/question.js +200 -28
- package/build/shared/charts/descriptors/message/question/utils/evaluateQuestionAnswer.js +6 -0
- package/build/shared/charts/descriptors/message/say.js +33 -2
- package/build/shared/charts/descriptors/service/completeText.js +316 -0
- package/build/shared/charts/descriptors/service/handoverV2.js +48 -2
- package/build/shared/charts/descriptors/service/index.js +3 -1
- package/build/shared/charts/descriptors/voice/mappers/setSessionConfig.mapper.js +62 -26
- package/build/shared/charts/descriptors/voice/mappers/transfer.mapper.js +93 -32
- package/build/shared/charts/descriptors/voice/nodes/bargeIn.js +53 -0
- package/build/shared/charts/descriptors/voice/nodes/dtmf.js +5 -3
- package/build/shared/charts/descriptors/voice/nodes/play.js +14 -5
- package/build/shared/charts/descriptors/voice/nodes/sessionSpeechParameters.js +90 -8
- package/build/shared/charts/descriptors/voice/nodes/transfer.js +6 -2
- package/build/shared/charts/descriptors/voice/utils/helper.js +11 -1
- package/build/shared/charts/descriptors/voicegateway/nodes/agentAssist.js +2 -2
- package/build/shared/charts/descriptors/voicegateway/nodes/handover.js +5 -1
- package/build/shared/charts/descriptors/voicegateway2/nodes/play.js +20 -5
- package/build/shared/charts/descriptors/voicegateway2/nodes/refer.js +3 -3
- package/build/shared/charts/descriptors/voicegateway2/nodes/setSessionConfig.js +150 -33
- package/build/shared/charts/descriptors/voicegateway2/nodes/transfer.js +95 -7
- package/build/shared/charts/descriptors/voicegateway2/utils/helper.js +18 -11
- package/build/shared/charts/descriptors/voicegateway2/utils/strip-nulls.js +4 -1
- package/build/shared/charts/helpers/generativeAI/getRephraseWithAIFields.js +100 -0
- package/build/shared/charts/helpers/generativeAI/rephraseSentenceWithAi.js +44 -0
- package/build/shared/constants.js +2 -1
- package/build/shared/errors/BadGatewayError.js +2 -1
- package/build/shared/errors/BadRequestError.js +2 -1
- package/build/shared/errors/ForbiddenError.js +2 -1
- package/build/shared/errors/GatewayTimeoutError.js +2 -1
- package/build/shared/errors/MethodNotAllowedError.js +2 -1
- package/build/shared/errors/NetworkError.js +2 -1
- package/build/shared/errors/PayloadTooLargeError.js +2 -1
- package/build/shared/errors/PaymentRequiredError.js +2 -1
- package/build/shared/errors/ServiceUnavailableError.js +2 -1
- package/build/shared/errors/UnauthorizedError.js +2 -1
- package/build/shared/errors/baseError.js +27 -4
- package/build/shared/errors/conflict.js +2 -1
- package/build/shared/errors/databaseConnectError.js +2 -2
- package/build/shared/errors/databaseQueryError.js +2 -2
- package/build/shared/errors/databaseRead.js +2 -2
- package/build/shared/errors/databaseWrite.js +2 -2
- package/build/shared/errors/exportError.js +2 -2
- package/build/shared/errors/fileRead.js +2 -2
- package/build/shared/errors/fileWrite.js +2 -2
- package/build/shared/errors/importError.js +1 -1
- package/build/shared/errors/inputOutputError.js +2 -2
- package/build/shared/errors/internalServerError.js +2 -1
- package/build/shared/errors/invalidArgument.js +2 -2
- package/build/shared/errors/missingArgument.js +2 -2
- package/build/shared/errors/notImplementedError.js +2 -1
- package/build/shared/errors/process.js +2 -2
- package/build/shared/errors/resourceNotFound.js +5 -1
- package/build/shared/errors/smtpConnectError.js +2 -2
- package/build/shared/errors/timeoutError.js +2 -2
- package/build/shared/interfaces/ai.js +16 -0
- package/build/shared/interfaces/analytics/IAnalyticsDataGoals.js +3 -0
- package/build/shared/interfaces/endpointInterface.js +1 -0
- package/build/shared/interfaces/filemanager/IRuntimeFile.js +30 -0
- package/build/shared/interfaces/filemanager/index.js +1 -0
- package/build/shared/interfaces/handover.js +40 -3
- package/build/shared/interfaces/messageAPI/endpoints.js +2 -0
- package/build/shared/interfaces/messageAPI/handover.js +6 -0
- package/build/shared/interfaces/resources/IAgentAssistConfig.js +56 -0
- package/build/shared/interfaces/resources/IConnectionSchema.js +2 -1
- package/build/shared/interfaces/resources/IExtension.js +2 -1
- package/build/shared/interfaces/resources/IFlow.js +2 -1
- package/build/shared/interfaces/resources/ILexicon.js +15 -2
- package/build/shared/interfaces/resources/ILocale.js +25 -3
- package/build/shared/interfaces/resources/INodeDescriptorSet.js +2 -1
- package/build/shared/interfaces/resources/TResourceType.js +3 -0
- package/build/shared/interfaces/resources/intent/IIntent.js +5 -2
- package/build/shared/interfaces/resources/intent/IIntentRelation.js +3 -1
- package/build/shared/interfaces/resources/settings/IAgentSettings.js +9 -4
- package/build/shared/interfaces/resources/settings/IGenerativeAISettings.js +136 -0
- package/build/shared/interfaces/resources/settings/ISharedSettings.js +1 -1
- package/build/shared/interfaces/resources/settings/index.js +7 -1
- package/build/shared/interfaces/resources/yesNoIntent/IYesNoItem.js +5 -5
- package/build/shared/interfaces/restAPI/administration/liveAgent/v2.0/ICognigyLiveAgentMiddleware_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/agentAssist/v2.0/IAgentAssistConfig_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/agentAssist/v2.0/ICreateAgentAssistConfigRest_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/agentAssist/v2.0/IDeleteAgentAssistConfigRest_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/agentAssist/v2.0/IIndexAgentAssistConfigsRest_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/agentAssist/v2.0/IReadAgentAssistConfigRest_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/agentAssist/v2.0/IUpdateAgentAssistConfigRest_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/agentAssist/v2.0/index.js +3 -0
- package/build/shared/interfaces/restAPI/resources/flow/v2.0/sentence/IGenerateSentencesRest_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/locales/v2.0/yesNoIntents/ITrainYesNoIntentsProjectRest_2_0.js +3 -0
- package/build/shared/interfaces/restAPI/resources/locales/v2.0/yesNoIntents/IYesNoIntents_2_0.js +4 -4
- package/build/shared/interfaces/security/IACL.js +1 -1
- package/build/shared/interfaces/security/IPermission.js +2 -0
- package/build/shared/interfaces/security/IRole.js +4 -0
- package/package.json +2 -2
- package/types/index.d.ts +925 -388
|
@@ -15,7 +15,7 @@ exports.voiceConfigFields = [
|
|
|
15
15
|
description: "Defines the minimum number of words that the user must say for the Voice Gateway to consider it a barge in",
|
|
16
16
|
defaultValue: 1,
|
|
17
17
|
params: {
|
|
18
|
-
min:
|
|
18
|
+
min: 1,
|
|
19
19
|
max: 5,
|
|
20
20
|
step: 1
|
|
21
21
|
}
|
|
@@ -31,7 +31,7 @@ exports.voiceConfigFields = [
|
|
|
31
31
|
key: "bargeInOnDtmf",
|
|
32
32
|
type: "toggle",
|
|
33
33
|
label: "Barge In On DTMF",
|
|
34
|
-
description: "Allows the user to interrupt bot output with DTMF.
|
|
34
|
+
description: "Allows the user to interrupt bot output with DTMF. Enabling Barge in on DTMF will also enable DTMF capture.",
|
|
35
35
|
defaultValue: false
|
|
36
36
|
},
|
|
37
37
|
{
|
|
@@ -44,6 +44,12 @@ exports.voiceConfigFields = [
|
|
|
44
44
|
languageKey: "config.sttLanguage"
|
|
45
45
|
}
|
|
46
46
|
},
|
|
47
|
+
{
|
|
48
|
+
key: "sttLanguage",
|
|
49
|
+
type: "sttSelect",
|
|
50
|
+
defaultValue: "",
|
|
51
|
+
label: "_unused_"
|
|
52
|
+
},
|
|
47
53
|
{
|
|
48
54
|
key: "sttHints",
|
|
49
55
|
type: "textArray",
|
|
@@ -51,13 +57,6 @@ exports.voiceConfigFields = [
|
|
|
51
57
|
description: "Array of words or phrases to assist speech detection",
|
|
52
58
|
defaultValue: [""]
|
|
53
59
|
},
|
|
54
|
-
{
|
|
55
|
-
key: "sttVadEnabled",
|
|
56
|
-
type: "toggle",
|
|
57
|
-
label: "Enable Voice Activity Detection",
|
|
58
|
-
description: "Delay connection to cloud recognizer until speech is detected",
|
|
59
|
-
defaultValue: true
|
|
60
|
-
},
|
|
61
60
|
{
|
|
62
61
|
key: "sttDisablePunctuation",
|
|
63
62
|
type: "toggle",
|
|
@@ -65,6 +64,13 @@ exports.voiceConfigFields = [
|
|
|
65
64
|
description: "Prevents the STT response from the bot to include punctuation marks",
|
|
66
65
|
defaultValue: false
|
|
67
66
|
},
|
|
67
|
+
{
|
|
68
|
+
key: "sttVadEnabled",
|
|
69
|
+
type: "toggle",
|
|
70
|
+
label: "Enable Voice Activity Detection",
|
|
71
|
+
description: "Delay connection to cloud recognizer until speech is detected",
|
|
72
|
+
defaultValue: false
|
|
73
|
+
},
|
|
68
74
|
{
|
|
69
75
|
key: "sttVadMode",
|
|
70
76
|
type: "slider",
|
|
@@ -116,6 +122,18 @@ exports.voiceConfigFields = [
|
|
|
116
122
|
voiceKey: "config.ttsVoice"
|
|
117
123
|
}
|
|
118
124
|
},
|
|
125
|
+
{
|
|
126
|
+
key: "ttsVoice",
|
|
127
|
+
type: "ttsSelect",
|
|
128
|
+
defaultValue: "",
|
|
129
|
+
label: "_unused_"
|
|
130
|
+
},
|
|
131
|
+
{
|
|
132
|
+
key: "ttsLanguage",
|
|
133
|
+
type: "ttsSelect",
|
|
134
|
+
defaultValue: "",
|
|
135
|
+
label: "_unused_"
|
|
136
|
+
},
|
|
119
137
|
{
|
|
120
138
|
key: "userNoInputMode",
|
|
121
139
|
type: "select",
|
|
@@ -125,13 +143,16 @@ exports.voiceConfigFields = [
|
|
|
125
143
|
params: {
|
|
126
144
|
options: [
|
|
127
145
|
{
|
|
128
|
-
label: "Speak",
|
|
146
|
+
label: "Speak",
|
|
147
|
+
value: "speech"
|
|
129
148
|
},
|
|
130
149
|
{
|
|
131
|
-
label: "Play File",
|
|
150
|
+
label: "Play File",
|
|
151
|
+
value: "play"
|
|
132
152
|
},
|
|
133
153
|
{
|
|
134
|
-
label: "Send Event",
|
|
154
|
+
label: "Send Event",
|
|
155
|
+
value: "event"
|
|
135
156
|
}
|
|
136
157
|
]
|
|
137
158
|
}
|
|
@@ -143,8 +164,8 @@ exports.voiceConfigFields = [
|
|
|
143
164
|
description: "Define the timeout for user input in ms",
|
|
144
165
|
defaultValue: 10000,
|
|
145
166
|
params: {
|
|
146
|
-
min: 1000
|
|
147
|
-
}
|
|
167
|
+
min: 1000
|
|
168
|
+
}
|
|
148
169
|
},
|
|
149
170
|
{
|
|
150
171
|
key: "userNoInputRetries",
|
|
@@ -153,8 +174,8 @@ exports.voiceConfigFields = [
|
|
|
153
174
|
description: "Define how often the bot should retry to get an input from a user before completing the call",
|
|
154
175
|
defaultValue: 1,
|
|
155
176
|
params: {
|
|
156
|
-
min: 0
|
|
157
|
-
}
|
|
177
|
+
min: 0
|
|
178
|
+
}
|
|
158
179
|
},
|
|
159
180
|
{
|
|
160
181
|
key: "userNoInputSpeech",
|
|
@@ -199,12 +220,12 @@ exports.voiceConfigFields = [
|
|
|
199
220
|
defaultValue: 2000,
|
|
200
221
|
params: {
|
|
201
222
|
min: 1000,
|
|
202
|
-
max: 60000
|
|
223
|
+
max: 60000
|
|
203
224
|
},
|
|
204
225
|
condition: {
|
|
205
226
|
key: "dtmfEnable",
|
|
206
227
|
value: true
|
|
207
|
-
}
|
|
228
|
+
}
|
|
208
229
|
},
|
|
209
230
|
{
|
|
210
231
|
key: "dtmfMaxDigits",
|
|
@@ -213,7 +234,7 @@ exports.voiceConfigFields = [
|
|
|
213
234
|
description: "Defines the maximum number of digits which can be entered by the user, the digits are being submitted automatically once this limit is reached",
|
|
214
235
|
defaultValue: 1,
|
|
215
236
|
params: {
|
|
216
|
-
min: 1
|
|
237
|
+
min: 1
|
|
217
238
|
},
|
|
218
239
|
condition: {
|
|
219
240
|
key: "dtmfEnable",
|
|
@@ -227,7 +248,7 @@ exports.voiceConfigFields = [
|
|
|
227
248
|
description: "Defines the mininum number of digits before they are forwarded to the bot. This can be overridden by a submit digit",
|
|
228
249
|
defaultValue: 1,
|
|
229
250
|
params: {
|
|
230
|
-
min: 1
|
|
251
|
+
min: 1
|
|
231
252
|
},
|
|
232
253
|
condition: {
|
|
233
254
|
key: "dtmfEnable",
|
|
@@ -270,20 +291,99 @@ exports.voiceConfigFields = [
|
|
|
270
291
|
description: "Defines the number of milliseconds of silence before the accumulated recognitions are send to the flow",
|
|
271
292
|
defaultValue: 0,
|
|
272
293
|
params: {
|
|
273
|
-
min: 0
|
|
294
|
+
min: 0
|
|
274
295
|
},
|
|
275
296
|
condition: {
|
|
276
297
|
key: "asrEnabled",
|
|
277
298
|
value: true
|
|
278
299
|
}
|
|
279
300
|
},
|
|
301
|
+
{
|
|
302
|
+
key: "enableAdvancedSTTConfig",
|
|
303
|
+
type: "toggle",
|
|
304
|
+
label: "Enable Advanced STT Config",
|
|
305
|
+
description: "Advanced STT Configuration",
|
|
306
|
+
defaultValue: false,
|
|
307
|
+
condition: {
|
|
308
|
+
key: 'sttVendor',
|
|
309
|
+
value: 'microsoft'
|
|
310
|
+
}
|
|
311
|
+
},
|
|
312
|
+
{
|
|
313
|
+
key: "azureSttContextId",
|
|
314
|
+
type: "cognigyText",
|
|
315
|
+
label: "Azure STT Context ID",
|
|
316
|
+
description: "Azure's Custom Speech model deployment ID",
|
|
317
|
+
defaultValue: "",
|
|
318
|
+
condition: {
|
|
319
|
+
and: [
|
|
320
|
+
{
|
|
321
|
+
key: "enableAdvancedSTTConfig",
|
|
322
|
+
value: true
|
|
323
|
+
},
|
|
324
|
+
{
|
|
325
|
+
key: "sttVendor",
|
|
326
|
+
value: "microsoft"
|
|
327
|
+
}
|
|
328
|
+
]
|
|
329
|
+
}
|
|
330
|
+
},
|
|
331
|
+
{
|
|
332
|
+
key: "azureEnableAudioLogging",
|
|
333
|
+
type: "toggle",
|
|
334
|
+
label: "Enable Audio Logging",
|
|
335
|
+
description: "Enables recording and logging of audio from the user on Azure.",
|
|
336
|
+
defaultValue: false,
|
|
337
|
+
condition: {
|
|
338
|
+
and: [
|
|
339
|
+
{
|
|
340
|
+
key: "enableAdvancedSTTConfig",
|
|
341
|
+
value: true
|
|
342
|
+
},
|
|
343
|
+
{
|
|
344
|
+
key: "sttVendor",
|
|
345
|
+
value: "microsoft"
|
|
346
|
+
}
|
|
347
|
+
]
|
|
348
|
+
}
|
|
349
|
+
},
|
|
350
|
+
{
|
|
351
|
+
key: "enableAdvancedTTSConfig",
|
|
352
|
+
type: "toggle",
|
|
353
|
+
label: "Enable Advanced TTS Config",
|
|
354
|
+
description: "Enable Advanced TTS Configuration",
|
|
355
|
+
defaultValue: false,
|
|
356
|
+
condition: {
|
|
357
|
+
key: "ttsVendor",
|
|
358
|
+
value: "microsoft",
|
|
359
|
+
}
|
|
360
|
+
},
|
|
361
|
+
{
|
|
362
|
+
key: "azureTtsDeploymentId",
|
|
363
|
+
type: "cognigyText",
|
|
364
|
+
label: "Azure Custom Voice Endpoint",
|
|
365
|
+
description: "Azure's Custom Speech model deployment ID",
|
|
366
|
+
defaultValue: "",
|
|
367
|
+
condition: {
|
|
368
|
+
and: [
|
|
369
|
+
{
|
|
370
|
+
key: "enableAdvancedTTSConfig",
|
|
371
|
+
value: true
|
|
372
|
+
},
|
|
373
|
+
{
|
|
374
|
+
key: "ttsVendor",
|
|
375
|
+
value: "microsoft"
|
|
376
|
+
}
|
|
377
|
+
]
|
|
378
|
+
}
|
|
379
|
+
}
|
|
280
380
|
];
|
|
281
381
|
exports.setSessionConfigNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
282
382
|
type: "setSessionConfig",
|
|
283
383
|
defaultLabel: "Set Session Config",
|
|
284
384
|
summary: "Change configuration settings for the call session",
|
|
285
385
|
appearance: {
|
|
286
|
-
color: design_1.nodeColor
|
|
386
|
+
color: design_1.nodeColor,
|
|
287
387
|
},
|
|
288
388
|
fields: exports.voiceConfigFields,
|
|
289
389
|
sections: [
|
|
@@ -291,19 +391,29 @@ exports.setSessionConfigNode = (0, createNodeDescriptor_1.createNodeDescriptor)(
|
|
|
291
391
|
key: "params_stt",
|
|
292
392
|
label: "Recognizer (STT)",
|
|
293
393
|
defaultCollapsed: true,
|
|
294
|
-
fields: [
|
|
394
|
+
fields: [
|
|
395
|
+
"sttVendor",
|
|
396
|
+
"sttHints",
|
|
397
|
+
"sttDisablePunctuation",
|
|
398
|
+
"sttVadEnabled",
|
|
399
|
+
"sttVadMode",
|
|
400
|
+
"sttVadVoiceMs",
|
|
401
|
+
"enableAdvancedSTTConfig",
|
|
402
|
+
"azureSttContextId",
|
|
403
|
+
"azureEnableAudioLogging"
|
|
404
|
+
],
|
|
295
405
|
},
|
|
296
406
|
{
|
|
297
407
|
key: "params_tts",
|
|
298
408
|
label: "Synthesizer (TTS)",
|
|
299
409
|
defaultCollapsed: true,
|
|
300
|
-
fields: ["ttsVendor"]
|
|
410
|
+
fields: ["ttsVendor", "enableAdvancedTTSConfig", "azureTtsDeploymentId"],
|
|
301
411
|
},
|
|
302
412
|
{
|
|
303
413
|
key: "params_bargein",
|
|
304
414
|
label: "Barge In",
|
|
305
415
|
defaultCollapsed: true,
|
|
306
|
-
fields: ["bargeInOnSpeech", "bargeInOnDtmf", "bargeInMinWordCount"]
|
|
416
|
+
fields: ["bargeInOnSpeech", "bargeInOnDtmf", "bargeInMinWordCount"],
|
|
307
417
|
},
|
|
308
418
|
{
|
|
309
419
|
key: "params_user_timeouts",
|
|
@@ -314,21 +424,27 @@ exports.setSessionConfigNode = (0, createNodeDescriptor_1.createNodeDescriptor)(
|
|
|
314
424
|
"userNoInputSpeech",
|
|
315
425
|
"userNoInputUrl",
|
|
316
426
|
"userNoInputTimeout",
|
|
317
|
-
"userNoInputRetries"
|
|
318
|
-
]
|
|
427
|
+
"userNoInputRetries",
|
|
428
|
+
],
|
|
319
429
|
},
|
|
320
430
|
{
|
|
321
431
|
key: "params_dtmf",
|
|
322
432
|
label: "DTMF",
|
|
323
433
|
defaultCollapsed: true,
|
|
324
|
-
fields: [
|
|
434
|
+
fields: [
|
|
435
|
+
"dtmfEnable",
|
|
436
|
+
"dtmfInterDigitTimeout",
|
|
437
|
+
"dtmfMaxDigits",
|
|
438
|
+
"dtmfMinDigits",
|
|
439
|
+
"dtmfSubmitDigit",
|
|
440
|
+
],
|
|
325
441
|
},
|
|
326
442
|
{
|
|
327
443
|
key: "params_continuous_asr",
|
|
328
444
|
label: "Continuous ASR",
|
|
329
445
|
defaultCollapsed: true,
|
|
330
446
|
fields: ["asrEnabled", "asrDigit", "asrTimeout"],
|
|
331
|
-
}
|
|
447
|
+
},
|
|
332
448
|
],
|
|
333
449
|
form: [
|
|
334
450
|
{ type: "section", key: "params_tts" },
|
|
@@ -337,18 +453,19 @@ exports.setSessionConfigNode = (0, createNodeDescriptor_1.createNodeDescriptor)(
|
|
|
337
453
|
{ type: "section", key: "params_user_timeouts" },
|
|
338
454
|
{ type: "section", key: "params_dtmf" },
|
|
339
455
|
{ type: "section", key: "params_continuous_asr" },
|
|
456
|
+
{ type: "section", key: "params_azure_config" },
|
|
340
457
|
],
|
|
341
458
|
function: async ({ cognigy, config }) => {
|
|
459
|
+
const { api } = cognigy;
|
|
342
460
|
try {
|
|
343
|
-
const payload = setSessionConfig_mapper_1.setSessionConfig.handleVGInput((0, setSessionConfig_mapper_2.voiceConfigParamsToVoiceSettings)(config,
|
|
344
|
-
|
|
461
|
+
const payload = setSessionConfig_mapper_1.setSessionConfig.handleVGInput((0, setSessionConfig_mapper_2.voiceConfigParamsToVoiceSettings)(config, api));
|
|
462
|
+
api.say(null, {
|
|
345
463
|
_cognigy: payload,
|
|
346
464
|
});
|
|
347
465
|
}
|
|
348
466
|
catch (error) {
|
|
349
467
|
throw new Error(`[VG2] Error on set session config node. Error message: ${error.message}`);
|
|
350
468
|
}
|
|
351
|
-
|
|
352
|
-
}
|
|
469
|
+
},
|
|
353
470
|
});
|
|
354
471
|
//# sourceMappingURL=setSessionConfig.js.map
|
|
@@ -12,7 +12,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
12
12
|
type: "transfer",
|
|
13
13
|
defaultLabel: "Transfer",
|
|
14
14
|
preview: {
|
|
15
|
-
key: "
|
|
15
|
+
key: "transferTarget",
|
|
16
16
|
type: "text",
|
|
17
17
|
},
|
|
18
18
|
appearance: {
|
|
@@ -20,7 +20,26 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
20
20
|
},
|
|
21
21
|
fields: [
|
|
22
22
|
{
|
|
23
|
-
key: "
|
|
23
|
+
key: "transferType",
|
|
24
|
+
label: "Transfer Type",
|
|
25
|
+
type: "select",
|
|
26
|
+
params: {
|
|
27
|
+
options: [
|
|
28
|
+
{
|
|
29
|
+
label: "Refer",
|
|
30
|
+
value: "refer"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
label: "Dial",
|
|
34
|
+
value: "dial"
|
|
35
|
+
}
|
|
36
|
+
]
|
|
37
|
+
},
|
|
38
|
+
defaultValue: "refer",
|
|
39
|
+
description: "Refer the call or create a new one",
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
key: "transferTarget",
|
|
24
43
|
label: "Target",
|
|
25
44
|
type: "cognigyText",
|
|
26
45
|
params: {
|
|
@@ -29,6 +48,16 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
29
48
|
description: "E.164 syntax or a SIP URI are supported",
|
|
30
49
|
defaultValue: "+49123456789",
|
|
31
50
|
},
|
|
51
|
+
{
|
|
52
|
+
key: "transferReason",
|
|
53
|
+
label: "Reason",
|
|
54
|
+
type: "cognigyText",
|
|
55
|
+
params: {
|
|
56
|
+
required: true,
|
|
57
|
+
},
|
|
58
|
+
description: "Reason for the transfer",
|
|
59
|
+
defaultValue: "Bot initiated a transfer.",
|
|
60
|
+
},
|
|
32
61
|
{
|
|
33
62
|
key: "useTransferSipHeaders",
|
|
34
63
|
label: "Custom Transfer SIP Headers",
|
|
@@ -49,6 +78,37 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
49
78
|
value: true,
|
|
50
79
|
},
|
|
51
80
|
},
|
|
81
|
+
/* Dial specific */
|
|
82
|
+
{
|
|
83
|
+
key: "dialCallerId",
|
|
84
|
+
label: "Caller ID",
|
|
85
|
+
type: "text",
|
|
86
|
+
description: "The caller ID, some carriers like Twilio require a registered number for outgoing calls.",
|
|
87
|
+
condition: {
|
|
88
|
+
key: "transferType",
|
|
89
|
+
value: "dial",
|
|
90
|
+
},
|
|
91
|
+
},
|
|
92
|
+
{
|
|
93
|
+
key: "dialMusic",
|
|
94
|
+
label: "Dial Music",
|
|
95
|
+
type: "text",
|
|
96
|
+
description: "URL to a .wav or .mp3 audio file of custom audio or ringback to play to the caller while the outbound call is ringing",
|
|
97
|
+
condition: {
|
|
98
|
+
key: "transferType",
|
|
99
|
+
value: "dial",
|
|
100
|
+
},
|
|
101
|
+
},
|
|
102
|
+
{
|
|
103
|
+
key: "dialTranscriptionWebhook",
|
|
104
|
+
label: "Transcription Webhook",
|
|
105
|
+
type: "text",
|
|
106
|
+
description: "Webhook to receive an HTTP POST when an interim or final transcription is received. Uses the default recognizer.",
|
|
107
|
+
condition: {
|
|
108
|
+
key: "transferType",
|
|
109
|
+
value: "dial",
|
|
110
|
+
},
|
|
111
|
+
}
|
|
52
112
|
],
|
|
53
113
|
sections: [
|
|
54
114
|
{
|
|
@@ -61,7 +121,27 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
61
121
|
form: [
|
|
62
122
|
{
|
|
63
123
|
type: "field",
|
|
64
|
-
key: "
|
|
124
|
+
key: "transferType",
|
|
125
|
+
},
|
|
126
|
+
{
|
|
127
|
+
type: "field",
|
|
128
|
+
key: "transferReason",
|
|
129
|
+
},
|
|
130
|
+
{
|
|
131
|
+
type: "field",
|
|
132
|
+
key: "transferTarget",
|
|
133
|
+
},
|
|
134
|
+
{
|
|
135
|
+
type: "field",
|
|
136
|
+
key: "dialCallerId",
|
|
137
|
+
},
|
|
138
|
+
{
|
|
139
|
+
type: "field",
|
|
140
|
+
key: "dialMusic",
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
type: "field",
|
|
144
|
+
key: "dialTranscriptionWebhook",
|
|
65
145
|
},
|
|
66
146
|
{
|
|
67
147
|
type: "section",
|
|
@@ -70,15 +150,23 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
|
|
|
70
150
|
],
|
|
71
151
|
summary: "Transfer the call to another target",
|
|
72
152
|
function: async ({ cognigy, config }) => {
|
|
73
|
-
const { api } = cognigy;
|
|
74
|
-
const {
|
|
153
|
+
const { api, input } = cognigy;
|
|
154
|
+
const { transferType, transferTarget, useTransferSipHeaders, transferSipHeaders, transferReason, dialMusic, dialTranscriptionWebhook, dialCallerId } = config;
|
|
75
155
|
const transferParams = {
|
|
76
|
-
|
|
77
|
-
|
|
156
|
+
transferType,
|
|
157
|
+
transferReason,
|
|
158
|
+
transferTarget,
|
|
78
159
|
useTransferSipHeaders,
|
|
79
160
|
transferSipHeaders,
|
|
161
|
+
dialMusic,
|
|
162
|
+
dialCallerId,
|
|
163
|
+
dialTranscriptionWebhook,
|
|
80
164
|
};
|
|
81
165
|
try {
|
|
166
|
+
if (input.channel === "adminconsole") {
|
|
167
|
+
api.say("Transferring a call is not supported in the Interaction Panel, please use the VoiceGateway endpoint.", null);
|
|
168
|
+
return;
|
|
169
|
+
}
|
|
82
170
|
if (useTransferSipHeaders &&
|
|
83
171
|
transferSipHeaders &&
|
|
84
172
|
!(0, helper_1.isValidJSON)(transferSipHeaders)) {
|
|
@@ -1,20 +1,27 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.isValidJSON = exports.cleanTarget = void 0;
|
|
4
|
-
function cleanTarget(
|
|
5
|
-
if (
|
|
6
|
-
|
|
7
|
-
|
|
4
|
+
function cleanTarget(rawTarget, isPrefixRequired) {
|
|
5
|
+
if (rawTarget.includes("@")) {
|
|
6
|
+
// handle sip refer
|
|
7
|
+
const sip = rawTarget.replace(/\s/g, "");
|
|
8
|
+
if (rawTarget.substring(0, 4) === "sip:")
|
|
8
9
|
return sip;
|
|
9
10
|
return `sip:${sip}`;
|
|
10
11
|
}
|
|
11
|
-
|
|
12
|
-
let
|
|
13
|
-
if (
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
12
|
+
// handle telephone refer
|
|
13
|
+
let processedTarget = rawTarget;
|
|
14
|
+
if (processedTarget.toLowerCase().startsWith("tel:")) {
|
|
15
|
+
// remove "tel:" first
|
|
16
|
+
processedTarget = processedTarget.slice(4);
|
|
17
|
+
}
|
|
18
|
+
// clear up invalid chars
|
|
19
|
+
processedTarget = processedTarget.replace(/\s|\(|\)|\-/g, "");
|
|
20
|
+
if (processedTarget.substring(0, 2) === "00") {
|
|
21
|
+
// replace "00" with "+"
|
|
22
|
+
processedTarget = `+${processedTarget.slice(2)}`;
|
|
23
|
+
}
|
|
24
|
+
return isPrefixRequired ? "tel:" + processedTarget : processedTarget;
|
|
18
25
|
}
|
|
19
26
|
exports.cleanTarget = cleanTarget;
|
|
20
27
|
function isValidJSON(json) {
|
|
@@ -3,7 +3,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.stripNulls = void 0;
|
|
4
4
|
function stripNulls(data) {
|
|
5
5
|
Object.entries(data).forEach(([key, value]) => {
|
|
6
|
-
if (typeof value === "undefined" ||
|
|
6
|
+
if (typeof value === "undefined" ||
|
|
7
|
+
typeof value === null ||
|
|
8
|
+
(typeof value === "string" &&
|
|
9
|
+
value.length == 0)) {
|
|
7
10
|
delete data[key];
|
|
8
11
|
}
|
|
9
12
|
if (typeof value === "object" && (typeof value !== "number" || typeof value !== "string")) {
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getRephraseWithAISection = exports.getRephraseWithAIFields = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* @returns The fields for rephraseWithAI
|
|
6
|
+
*/
|
|
7
|
+
const getRephraseWithAIFields = () => {
|
|
8
|
+
const fields = [
|
|
9
|
+
{
|
|
10
|
+
key: "generativeAI_rephraseOutputMode",
|
|
11
|
+
type: "select",
|
|
12
|
+
label: "Rephrase Output",
|
|
13
|
+
params: {
|
|
14
|
+
required: true,
|
|
15
|
+
options: [
|
|
16
|
+
{
|
|
17
|
+
label: "None",
|
|
18
|
+
value: "none"
|
|
19
|
+
},
|
|
20
|
+
{
|
|
21
|
+
label: "Based on Previous User Inputs",
|
|
22
|
+
value: "userInputs"
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
label: "Based on Custom Input",
|
|
26
|
+
value: "customInputs"
|
|
27
|
+
},
|
|
28
|
+
],
|
|
29
|
+
},
|
|
30
|
+
defaultValue: "none",
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
key: "generativeAI_amountOfLastUserInputs",
|
|
34
|
+
type: "slider",
|
|
35
|
+
label: "Number of Previous User Inputs",
|
|
36
|
+
params: {
|
|
37
|
+
min: 1,
|
|
38
|
+
max: 10,
|
|
39
|
+
step: 1,
|
|
40
|
+
},
|
|
41
|
+
defaultValue: 5,
|
|
42
|
+
condition: {
|
|
43
|
+
key: "generativeAI_rephraseOutputMode",
|
|
44
|
+
value: "userInputs",
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
{
|
|
48
|
+
key: "generativeAI_customInputs",
|
|
49
|
+
type: "cognigyTextArray",
|
|
50
|
+
label: "Custom Inputs",
|
|
51
|
+
condition: {
|
|
52
|
+
key: "generativeAI_rephraseOutputMode",
|
|
53
|
+
value: "customInputs",
|
|
54
|
+
},
|
|
55
|
+
},
|
|
56
|
+
{
|
|
57
|
+
key: "generativeAI_temperature",
|
|
58
|
+
type: "slider",
|
|
59
|
+
label: "Temperature",
|
|
60
|
+
params: {
|
|
61
|
+
min: 0,
|
|
62
|
+
max: 1,
|
|
63
|
+
step: 0.1,
|
|
64
|
+
},
|
|
65
|
+
defaultValue: 0.7,
|
|
66
|
+
condition: {
|
|
67
|
+
or: [
|
|
68
|
+
{
|
|
69
|
+
key: "generativeAI_rephraseOutputMode",
|
|
70
|
+
value: "userInputs",
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
key: "generativeAI_rephraseOutputMode",
|
|
74
|
+
value: "customInputs",
|
|
75
|
+
}
|
|
76
|
+
],
|
|
77
|
+
},
|
|
78
|
+
},
|
|
79
|
+
];
|
|
80
|
+
return fields;
|
|
81
|
+
};
|
|
82
|
+
exports.getRephraseWithAIFields = getRephraseWithAIFields;
|
|
83
|
+
/**
|
|
84
|
+
* @returns The section for rephraseWithAI
|
|
85
|
+
*/
|
|
86
|
+
const getRephraseWithAISection = () => {
|
|
87
|
+
return {
|
|
88
|
+
key: "aiEnhancedOutput",
|
|
89
|
+
label: "AI-enhanced output",
|
|
90
|
+
defaultCollapsed: true,
|
|
91
|
+
fields: [
|
|
92
|
+
"generativeAI_rephraseOutputMode",
|
|
93
|
+
"generativeAI_amountOfLastUserInputs",
|
|
94
|
+
"generativeAI_customInputs",
|
|
95
|
+
"generativeAI_temperature",
|
|
96
|
+
]
|
|
97
|
+
};
|
|
98
|
+
};
|
|
99
|
+
exports.getRephraseWithAISection = getRephraseWithAISection;
|
|
100
|
+
//# sourceMappingURL=getRephraseWithAIFields.js.map
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.rephraseMultipleSentencesWithAI = exports.rephraseSentenceWithAI = void 0;
|
|
4
|
+
const rephraseSentenceWithAI = async (sentence, config, api, organisationId) => {
|
|
5
|
+
const { generativeAI_rephraseOutputMode } = config;
|
|
6
|
+
if (generativeAI_rephraseOutputMode === "userInputs" || generativeAI_rephraseOutputMode === "customInputs") {
|
|
7
|
+
sentence = await api.rephraseSentenceWithAI(sentence, createRephraseSentenceWithAIOptions(config));
|
|
8
|
+
}
|
|
9
|
+
return sentence;
|
|
10
|
+
};
|
|
11
|
+
exports.rephraseSentenceWithAI = rephraseSentenceWithAI;
|
|
12
|
+
const rephraseMultipleSentencesWithAI = async (sentences, config, api, organisationId) => {
|
|
13
|
+
const { generativeAI_rephraseOutputMode, generativeAI_amountOfLastUserInputs, generativeAI_customInputs, generativeAI_temperature } = config;
|
|
14
|
+
if (generativeAI_rephraseOutputMode === "userInputs" || generativeAI_rephraseOutputMode === "customInputs") {
|
|
15
|
+
sentences = await api.rephraseMultipleSentencesWithAI(sentences, createRephraseSentenceWithAIOptions(config));
|
|
16
|
+
}
|
|
17
|
+
return sentences;
|
|
18
|
+
};
|
|
19
|
+
exports.rephraseMultipleSentencesWithAI = rephraseMultipleSentencesWithAI;
|
|
20
|
+
const createRephraseSentenceWithAIOptions = (params) => {
|
|
21
|
+
const options = {
|
|
22
|
+
promptType: params.promtType,
|
|
23
|
+
questionType: params.questionType,
|
|
24
|
+
temperature: params.generativeAI_temperature,
|
|
25
|
+
question: params.question,
|
|
26
|
+
answer: params.answer,
|
|
27
|
+
};
|
|
28
|
+
switch (params.generativeAI_rephraseOutputMode) {
|
|
29
|
+
case "none":
|
|
30
|
+
break;
|
|
31
|
+
case "userInputs":
|
|
32
|
+
options.useLastUserInputs = true;
|
|
33
|
+
options.amountOfUserInputs = params.generativeAI_amountOfLastUserInputs;
|
|
34
|
+
return options;
|
|
35
|
+
case "customInputs":
|
|
36
|
+
options.useLastUserInputs = false;
|
|
37
|
+
options.customInputs = params.generativeAI_customInputs;
|
|
38
|
+
return options;
|
|
39
|
+
default:
|
|
40
|
+
break;
|
|
41
|
+
}
|
|
42
|
+
return {};
|
|
43
|
+
};
|
|
44
|
+
//# sourceMappingURL=rephraseSentenceWithAi.js.map
|