@tiledesk/tiledesk-voice-twilio-connector 0.1.22 → 0.1.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +98 -28
- package/logs/app.log +2950 -0
- package/package.json +3 -2
- package/tiledesk/TiledeskTwilioTranslator.js +20 -7
- package/tiledesk/VoiceChannel.js +52 -1
- package/tiledesk/constants.js +4 -3
- package/tiledesk/fileUtils.js +3 -2
- package/tiledesk/services/AiService.js +93 -5
- package/tiledesk/services/IntegrationService.js +3 -4
- package/tiledesk/services/UploadService.js +2 -1
package/index.js
CHANGED
|
@@ -58,6 +58,7 @@ let API_URL = null;
|
|
|
58
58
|
let BASE_URL = null;
|
|
59
59
|
let BASE_FILE_URL = null;
|
|
60
60
|
let OPENAI_ENDPOINT = null;
|
|
61
|
+
let ELEVENLABS_ENDPOINT = null;
|
|
61
62
|
let REDIS_HOST = null;
|
|
62
63
|
let REDIS_PORT = null;
|
|
63
64
|
let REDIS_PASSWORD = null;
|
|
@@ -164,7 +165,7 @@ router.post('/webhook/:id_project', async (req, res) => {
|
|
|
164
165
|
winston.debug("(voice) conversation returned:"+ conversation_id);
|
|
165
166
|
|
|
166
167
|
|
|
167
|
-
|
|
168
|
+
//GET AND SAVE GPT-KET IF
|
|
168
169
|
let integrations = [], publicKey = false;
|
|
169
170
|
let key = await integrationService.getKeyFromIntegrations(project_id, 'openai', settings.token)
|
|
170
171
|
if (!key) {
|
|
@@ -179,6 +180,12 @@ router.post('/webhook/:id_project', async (req, res) => {
|
|
|
179
180
|
}
|
|
180
181
|
integrations.push({type: 'openai', key: key, publicKey: publicKey})
|
|
181
182
|
|
|
183
|
+
let eleven_labs = await integrationService.getKeyFromIntegrations(project_id, 'elevenlabs', settings.token)
|
|
184
|
+
if (eleven_labs) {
|
|
185
|
+
winston.debug("(voice) - Key found in Integrations: "+ eleven_labs);
|
|
186
|
+
integrations.push({type: 'elevenlabs', key: eleven_labs, publicKey: false})
|
|
187
|
+
}
|
|
188
|
+
|
|
182
189
|
//save data to redis
|
|
183
190
|
let session_data = {
|
|
184
191
|
from: from,
|
|
@@ -207,7 +214,7 @@ router.post('/webhook/:id_project', async (req, res) => {
|
|
|
207
214
|
attributes: {
|
|
208
215
|
subtype: 'info',
|
|
209
216
|
payload: {
|
|
210
|
-
... req.
|
|
217
|
+
... req.body //send all attributes back to chatbot
|
|
211
218
|
}
|
|
212
219
|
},
|
|
213
220
|
channel: { name: CHANNEL_NAME },
|
|
@@ -321,12 +328,12 @@ router.post('/nextblock/:callSid/', async(req, res) => {
|
|
|
321
328
|
// convert response to vxml
|
|
322
329
|
let messageToVXML = await tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo)
|
|
323
330
|
winston.debug("(voice) VXML to SEND: "+ messageToVXML);
|
|
324
|
-
|
|
331
|
+
|
|
325
332
|
let end_call = new Date()
|
|
326
333
|
console.log('Time to responde to /nextblock/:callSid : ', end_call-start_call, '[ms]')
|
|
327
334
|
|
|
328
335
|
// Render the response as XML in reply to the webhook request
|
|
329
|
-
res.set('Content-Type', '
|
|
336
|
+
res.set('Content-Type', 'application/xml;');
|
|
330
337
|
res.status(200).send(messageToVXML);
|
|
331
338
|
|
|
332
339
|
})
|
|
@@ -363,6 +370,8 @@ async function getMessage(callSid, from, project_id, conversation_id){
|
|
|
363
370
|
await tdChannel.removeMessageFromQueue(conversation_id, message._id)
|
|
364
371
|
//reset delayIndex for wait command message time
|
|
365
372
|
await voiceChannel.clearDelayTimeForCallId(callSid)
|
|
373
|
+
//manage attributes for current callId
|
|
374
|
+
await voiceChannel.saveSettingsForCallId(message.attributes, callSid)
|
|
366
375
|
|
|
367
376
|
resolve(message);
|
|
368
377
|
return;
|
|
@@ -918,23 +927,16 @@ router.post('/record/:callSid/',async (req, res) => {
|
|
|
918
927
|
return res.status(404).send({error: "VOICE Channel not already connected"})
|
|
919
928
|
}
|
|
920
929
|
|
|
921
|
-
//SPEECH TO TEXT
|
|
922
|
-
let key = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPEN_AI))?.key
|
|
923
|
-
let publicKey = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPEN_AI))?.publicKey
|
|
924
|
-
//check quotes if user is using public GPT_KEY
|
|
925
|
-
if(publicKey){
|
|
926
|
-
let keep_going = await aiService.checkQuoteAvailability(project_id, user.token);
|
|
927
|
-
if(!keep_going){
|
|
928
|
-
//no toke is available
|
|
929
|
-
}
|
|
930
|
-
}
|
|
931
930
|
|
|
932
|
-
let
|
|
933
|
-
|
|
934
|
-
})
|
|
935
|
-
console.log('(voice) Message captured after STT -->', textMessage)
|
|
931
|
+
let attributes = await voiceChannel.getSettingsForCallId(callSid);
|
|
932
|
+
console.log('attributessss', attributes)
|
|
936
933
|
|
|
937
|
-
|
|
934
|
+
//SPEECH TO TEXT
|
|
935
|
+
console.log('getting text message . . . ', audioFileUrl, attributes.STT_MODEL)
|
|
936
|
+
let tiledeskMessage = await generateSTT(audioFileUrl, attributes, sessionInfo, settings)
|
|
937
|
+
console.log('(voice) Message captured after STT -->', tiledeskMessage)
|
|
938
|
+
|
|
939
|
+
if(!tiledeskMessage){
|
|
938
940
|
//case NO_INPUT
|
|
939
941
|
const queryString = utils.buildQueryString(req.query);
|
|
940
942
|
winston.debug('case no input.. redirect '+ queryString)
|
|
@@ -953,13 +955,7 @@ router.post('/record/:callSid/',async (req, res) => {
|
|
|
953
955
|
})
|
|
954
956
|
}
|
|
955
957
|
|
|
956
|
-
|
|
957
|
-
let tiledeskMessage= {
|
|
958
|
-
text:textMessage,
|
|
959
|
-
senderFullname: from,
|
|
960
|
-
type: 'text',
|
|
961
|
-
channel: { name: CHANNEL_NAME }
|
|
962
|
-
};
|
|
958
|
+
|
|
963
959
|
let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
|
|
964
960
|
winston.debug("message sent : ", tdMessage);
|
|
965
961
|
|
|
@@ -979,6 +975,77 @@ router.post('/record/:callSid/',async (req, res) => {
|
|
|
979
975
|
|
|
980
976
|
})
|
|
981
977
|
|
|
978
|
+
async function generateSTT(audioFileUrl, attributes, sessionInfo, settings){
|
|
979
|
+
|
|
980
|
+
winston.debug("(voice) generateSTT: "+ attributes.VOICE_PROVIDER);
|
|
981
|
+
|
|
982
|
+
let tiledeskMessage = {}, text = null;
|
|
983
|
+
switch(attributes.VOICE_PROVIDER){
|
|
984
|
+
case VOICE_PROVIDER.OPENAI:
|
|
985
|
+
let GPT_KEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.key
|
|
986
|
+
let publicKey = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.publicKey
|
|
987
|
+
if(publicKey){
|
|
988
|
+
let keep_going = await aiService.checkQuoteAvailability(sessionInfo.project_id, settings.token).catch((err)=>{
|
|
989
|
+
winston.error('errr while checkQuoteAvailability for project:', sessionInfo.project_id, err.response?.data)
|
|
990
|
+
})
|
|
991
|
+
winston.verbose('(voice) checkQuoteAvailability return: '+ keep_going);
|
|
992
|
+
if(!keep_going){
|
|
993
|
+
//no token is available --> close conversation
|
|
994
|
+
return tiledeskMessage= {
|
|
995
|
+
//text:'\\close',
|
|
996
|
+
text:'/close',
|
|
997
|
+
senderFullname: sessionInfo.from,
|
|
998
|
+
type: 'text',
|
|
999
|
+
channel: { name: CHANNEL_NAME },
|
|
1000
|
+
attributes: {
|
|
1001
|
+
subtype: "info",
|
|
1002
|
+
action: 'close'+JSON.stringify({event: 'quota_exceeded'}),
|
|
1003
|
+
payload: {
|
|
1004
|
+
catchEvent: 'quota_exceeded'
|
|
1005
|
+
},
|
|
1006
|
+
timestamp: 'xxxxxx'
|
|
1007
|
+
}
|
|
1008
|
+
};
|
|
1009
|
+
|
|
1010
|
+
}
|
|
1011
|
+
}
|
|
1012
|
+
|
|
1013
|
+
text = await aiService.speechToText(audioFileUrl, attributes.STT_MODEL, GPT_KEY).catch((err)=>{
|
|
1014
|
+
winston.error('errr while transcript', err.response?.data)
|
|
1015
|
+
})
|
|
1016
|
+
tiledeskMessage= {
|
|
1017
|
+
text: text,
|
|
1018
|
+
senderFullname: sessionInfo.from,
|
|
1019
|
+
type: 'text',
|
|
1020
|
+
channel: { name: CHANNEL_NAME }
|
|
1021
|
+
};
|
|
1022
|
+
break;
|
|
1023
|
+
case VOICE_PROVIDER.ELEVENLABS:
|
|
1024
|
+
let ELEVENLABS_APIKEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.ELEVENLABS))?.key
|
|
1025
|
+
// La condizione negli input del metodo è corretta, ma può essere scritta in modo più leggibile:
|
|
1026
|
+
const ttsLanguage = attributes.TTS_LANGUAGE || 'en';
|
|
1027
|
+
text = await this.aiService.speechToTextElevenLabs(
|
|
1028
|
+
audioFileUrl,
|
|
1029
|
+
attributes.STT_MODEL,
|
|
1030
|
+
ttsLanguage,
|
|
1031
|
+
ELEVENLABS_APIKEY
|
|
1032
|
+
).catch((err) => {
|
|
1033
|
+
winston.error('errr while creating elevenlabs audio message', err?.response?.data);
|
|
1034
|
+
});
|
|
1035
|
+
tiledeskMessage= {
|
|
1036
|
+
text: text,
|
|
1037
|
+
senderFullname: sessionInfo.from,
|
|
1038
|
+
type: 'text',
|
|
1039
|
+
channel: { name: CHANNEL_NAME }
|
|
1040
|
+
};
|
|
1041
|
+
break;
|
|
1042
|
+
|
|
1043
|
+
}
|
|
1044
|
+
|
|
1045
|
+
return tiledeskMessage
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
|
|
982
1049
|
|
|
983
1050
|
router.get('/addon/transcript', async (req, res) => {
|
|
984
1051
|
winston.verbose("(vxml) called GET /transcript query-->" , req.query);
|
|
@@ -1225,12 +1292,14 @@ async function startApp(settings, callback) {
|
|
|
1225
1292
|
if(settings.OPENAI_ENDPOINT){
|
|
1226
1293
|
OPENAI_ENDPOINT = settings.OPENAI_ENDPOINT
|
|
1227
1294
|
}
|
|
1295
|
+
if(settings.ELEVENLABS_ENDPOINT){
|
|
1296
|
+
ELEVENLABS_ENDPOINT = settings.ELEVENLABS_ENDPOINT
|
|
1297
|
+
}
|
|
1228
1298
|
|
|
1229
1299
|
if(settings.MAX_POLLING_TIME){
|
|
1230
|
-
MAX_POLLING_TIME = settings.MAX_POLLING_TIME;
|
|
1300
|
+
MAX_POLLING_TIME = settings.MAX_POLLING_TIME/10000; //convert in seconds
|
|
1231
1301
|
}
|
|
1232
1302
|
|
|
1233
|
-
|
|
1234
1303
|
if (settings.REDIS_HOST && settings.REDIS_PORT) {
|
|
1235
1304
|
REDIS_HOST = settings.REDIS_HOST;
|
|
1236
1305
|
REDIS_PORT = settings.REDIS_PORT;
|
|
@@ -1249,6 +1318,7 @@ async function startApp(settings, callback) {
|
|
|
1249
1318
|
//init Services
|
|
1250
1319
|
aiService = new AiService({
|
|
1251
1320
|
OPENAI_ENDPOINT: OPENAI_ENDPOINT,
|
|
1321
|
+
ELEVENLABS_ENDPOINT: ELEVENLABS_ENDPOINT,
|
|
1252
1322
|
API_URL: API_URL
|
|
1253
1323
|
})
|
|
1254
1324
|
integrationService = new IntegrationService({
|