@tiledesk/tiledesk-voice-twilio-connector 0.1.26-rc9 → 0.1.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -73,7 +73,7 @@ let start1= ''
73
73
  let time = null;
74
74
 
75
75
  /*UTILS*/
76
- const utilsMess = require('./tiledesk/utils-message.js')
76
+ const utilsMessage = require('./tiledesk/utils-message.js')
77
77
  const TYPE_MESSAGE = require('./tiledesk/constants').TYPE_MESSAGE
78
78
 
79
79
  let messageTimeout = null;
@@ -100,24 +100,19 @@ router.post("/tiledesk", async (req, res) => {
100
100
 
101
101
  /*SKIP INFO MESSAGES*/
102
102
  /*SKIP CURRENT USER MESSAGES*/
103
- if(!utilsMess.messageType(TYPE_MESSAGE.INFO, tiledeskMessage) && !(tiledeskMessage.sender.indexOf("vxml") > -1) ){
104
- winston.verbose("> whook SAVE MESSAGE TO QUEUE " + JSON.stringify(tiledeskMessage) );
105
- start1 = new Date().getTime();
106
- console.log("(WH) time: ", new Date().getTime() - time)
107
- console.log("(WH) received message: ", tiledeskMessage.text, ' --- at time:', new Date(), start1)
103
+ if(!utilsMessage.messageType(TYPE_MESSAGE.INFO, tiledeskMessage) && !(tiledeskMessage.sender.indexOf("vxml") > -1) ){
104
+ winston.verbose(`> whook SAVE MESSAGE "${tiledeskMessage.text}" TO QUEUE at time ` + new Date() );
108
105
  }
109
-
106
+
110
107
  await tdChannel.addMessageToQueue(tiledeskMessage)
111
108
 
112
109
  res.send("(voice) Message received from Voice Twilio Proxy");
113
-
114
-
115
110
  });
116
111
 
117
112
  // TWILIO WEBHOOK : message from user to tiledesk
118
113
  router.post('/webhook/:id_project', async (req, res) => {
119
- winston.debug('(voice) called POST /webhook/:id_project '+ new Date(), req.params)
120
114
  let start_call = new Date().getTime();
115
+ winston.debug('(voice) called POST /webhook/:id_project '+ new Date(), req.params)
121
116
 
122
117
  let project_id = req.params.id_project;
123
118
  let callSid = req.body.CallSid;
@@ -137,6 +132,8 @@ router.post('/webhook/:id_project', async (req, res) => {
137
132
  }
138
133
 
139
134
  let vxmlAttributes = {
135
+ TTS_VOICE_LANGUAGE: VOICE_LANGUAGE,
136
+ TTS_VOICE_NAME: VOICE_NAME,
140
137
  callSid: callSid
141
138
  };
142
139
 
@@ -148,7 +145,8 @@ router.post('/webhook/:id_project', async (req, res) => {
148
145
 
149
146
  const tdTranslator = new TiledeskTwilioTranslator({
150
147
  BASE_URL: BASE_URL,
151
- aiService: aiService
148
+ aiService: aiService,
149
+ uploadService: uploadService
152
150
  });
153
151
 
154
152
  let start2 = new Date().getTime();
@@ -158,32 +156,33 @@ router.post('/webhook/:id_project', async (req, res) => {
158
156
  return;
159
157
  }
160
158
  let end2 = new Date().getTime();
161
- console.log('Time after signIn: ', end2-start2, '[ms]')
162
159
 
163
160
  //let conversation_id = await tdChannel.getConversation(ani, callId, user.token);
164
161
  let conversation_id = await tdChannel.generateConversation(from, callSid, user.token);
165
162
  winston.debug("(voice) conversation returned:"+ conversation_id);
166
163
 
167
-
168
- //GET AND SAVE GPT-KET IF
169
164
  let integrations = [], publicKey = false;
170
- let key = await integrationService.getKeyFromIntegrations(project_id, 'openai', settings.token)
171
- if (!key) {
172
- winston.debug("(voice) - Key not found in Integrations. Searching in kb settings...");
173
- key = await integrationService.getKeyFromKbSettings(project_id, settings.token);
174
- }
175
- if (!key) {
176
- winston.debug("(voice) - Retrieve public gptkey")
177
- key = GPT_KEY;
178
- publicKey = true;
165
+ try {
166
+ //GET AND SAVE GPT-KET IF
167
+ let key = await integrationService.getKeyFromIntegrations(project_id, 'openai', settings.token)
168
+ if (!key) {
169
+ winston.debug("(voice) - Key not found in Integrations. Searching in kb settings...");
170
+ key = await integrationService.getKeyFromKbSettings(project_id, settings.token);
171
+ }
172
+ if (!key) {
173
+ winston.debug("(voice) - Retrieve public gptkey")
174
+ key = GPT_KEY;
175
+ publicKey = true;
176
+ }
177
+ integrations.push({type: 'openai', key: key, publicKey: publicKey})
179
178
 
180
- }
181
- integrations.push({type: 'openai', key: key, publicKey: publicKey})
182
-
183
- let eleven_labs = await integrationService.getKeyFromIntegrations(project_id, 'elevenlabs', settings.token)
184
- if (eleven_labs) {
185
- winston.debug("(voice) - Key found in Integrations: "+ eleven_labs);
186
- integrations.push({type: 'elevenlabs', key: eleven_labs, publicKey: false})
179
+ let eleven_labs = await integrationService.getKeyFromIntegrations(project_id, 'elevenlabs', settings.token)
180
+ if (eleven_labs) {
181
+ winston.debug("(voice) - Key found in Integrations: "+ eleven_labs);
182
+ integrations.push({type: 'elevenlabs', key: eleven_labs, publicKey: false})
183
+ }
184
+ } catch (error) {
185
+ winston.error('(voice) - Error retrieving integrations keys:', error);
187
186
  }
188
187
 
189
188
  //save data to redis
@@ -229,7 +228,7 @@ router.post('/webhook/:id_project', async (req, res) => {
229
228
  let start_time_get_message = new Date()
230
229
  let message = await getMessage(callSid, from, project_id, conversation_id)
231
230
  let end_time_get_message = new Date()
232
- winston.verbose('Time to getMessage from queue in /:project_id/start : ' + end_time_get_message-start_time_get_message + '[ms] --- at time:' + new Date())
231
+ winston.verbose(`Time to getMessage from queue in /webhook/:${project_id} : ${(end_time_get_message-start_time_get_message)}[ms] --- at time:` + new Date())
233
232
 
234
233
  // //generate Tiledesk wait message
235
234
  // let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
@@ -239,16 +238,14 @@ router.post('/webhook/:id_project', async (req, res) => {
239
238
 
240
239
  // send standard wait vxml message
241
240
  let messageToVXML = await tdTranslator.toVXML(message, callSid, vxmlAttributes, session_data)
242
- winston.debug('(voice) messageVXML-->'+ messageToVXML)
241
+ winston.debug('(voice) /webhook/:id_project messageVXML-->'+ messageToVXML)
243
242
 
243
+ let end_call = new Date().getTime();
244
+ winston.info(`Time to respond to /webhook/${project_id}: ${(end_call-start_call)}[ms]`)
244
245
 
245
246
  // Render the response as XML in reply to the webhook request
246
247
  res.set('Content-Type', 'text/xml');
247
248
  res.status(200).send(messageToVXML);
248
-
249
- let end_call = new Date().getTime();
250
- console.log('Time to responde to /webhook/:id_project : ', end_call-start_call, '[ms]')
251
-
252
249
  });
253
250
 
254
251
 
@@ -256,7 +253,6 @@ router.post('/nextblock_old/:callSid/', async(req, res) => {
256
253
  let start_call = new Date()
257
254
  winston.debug("(voice) called POST /nextblock ", req.body);
258
255
  winston.debug("(voice) called POST /nextblock query ", req.query);
259
- console.log('/nextblock at: ', new Date(), 'with text:', req.body.SpeechResult)
260
256
 
261
257
  let usertext = req.body.SpeechResult;
262
258
  let confidence = req.body.Confidence
@@ -343,9 +339,7 @@ router.post('/nextblock_old/:callSid/', async(req, res) => {
343
339
 
344
340
  router.post('/nextblock/:callSid/', async(req, res) => {
345
341
  let start_call = new Date()
346
- winston.debug("(voice) called POST /nextblock ", req.body);
347
- winston.debug("(voice) called POST /nextblock query ", req.query);
348
- console.log("(voice) called POST /nextblock at" + new Date() + "with text: " + req.body.SpeechResult)
342
+ winston.verbose("(voice) called POST /nextblock at " + new Date() + "with text: "+ req.body.SpeechResult);
349
343
 
350
344
  let usertext = req.body.SpeechResult;
351
345
  let confidence = req.body.Confidence
@@ -389,13 +383,13 @@ router.post('/nextblock/:callSid/', async(req, res) => {
389
383
 
390
384
  let start_promise_message= new Date();
391
385
  let message = await new Promise(async (resolve, reject) => {
392
- winston.debug('******* user text -->'+ usertext)
386
+ winston.debug('(voice) ******* user text -->'+ usertext)
393
387
  if(usertext === '' || !usertext){
394
388
 
395
389
  let start_time_get_message = new Date()
396
390
  let message = await getMessage(callSid, from, project_id, conversation_id)
397
391
  let end_time_get_message = new Date()
398
- winston.verbose(`(if) Time to getMessage from queue in /nextblock/${callSid} : ` + end_time_get_message-start_time_get_message + '[ms]')
392
+ winston.verbose(`(if) Time to getMessage from queue in /nextblock/${callSid} : ${(end_time_get_message-start_time_get_message)}[ms]`)
399
393
  resolve(message)
400
394
  }else{
401
395
  //CASE:usertext is not empty and queue is empty --> send message to tiledesk and manage delayTime
@@ -409,13 +403,12 @@ router.post('/nextblock/:callSid/', async(req, res) => {
409
403
  let start_time_send_message = new Date()
410
404
  let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
411
405
  let end_time_send_message = new Date()
412
- winston.debug("message sent : ", tdMessage);
413
- winston.verbose(`(else) Time to send message to tiledesk in /nextblock/${callSid} : ` + end_time_send_message-start_time_send_message + '[ms] with text ' + tdMessage.text + ' --- at time:' + new Date())
406
+ winston.verbose(`(else) Time to send message to tiledesk in /nextblock/${callSid} : ${(end_time_send_message-start_time_send_message)}[ms] with text ` + tdMessage.text + ' --- at time:' + new Date())
414
407
 
415
408
  let start_time_get_message = new Date()
416
409
  let message = await getMessage(callSid, from, project_id, conversation_id)
417
410
  let end_time_get_message = new Date()
418
- winston.verbose(`(else) Time to getMessage from queue in /nextblock/${callSid} : ` + end_time_get_message-start_time_get_message + '[ms] --- at time:' + new Date())
411
+ winston.verbose(`(else) Time to getMessage from queue in /nextblock/${callSid} : ${(end_time_get_message-start_time_get_message)}[ms]` + ' --- at time:' + new Date())
419
412
  resolve(message)
420
413
 
421
414
  //generate Tiledesk wait message
@@ -427,14 +420,14 @@ router.post('/nextblock/:callSid/', async(req, res) => {
427
420
  }
428
421
  })
429
422
  let end_promise_message = new Date()
430
- winston.verbose(`Time to manage message in Promise /nextblock/${callSid}: ` + end_promise_message-start_promise_message + '[ms] with text' + message.text + ' --- at time --' + new Date())
423
+ winston.verbose(`Time to manage message in Promise /nextblock/${callSid}: ${(end_promise_message-start_promise_message)}[ms]` + ' with text:' + message.text + ' --- at time --' + new Date())
431
424
 
432
425
  // convert response to vxml
433
426
  let messageToVXML = await tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo)
434
427
  winston.debug("(voice) VXML to SEND: "+ messageToVXML);
435
428
 
436
429
  let end_call = new Date()
437
- console.log(`Time to responde to /nextblock/${callSid} : `, end_call-start_call, '[ms]')
430
+ winston.info(`Time to respond to /nextblock/${callSid} : ${(end_call-start_call)} [ms]`)
438
431
 
439
432
  // Render the response as XML in reply to the webhook request
440
433
  res.set('Content-Type', 'application/xml');
@@ -467,7 +460,7 @@ async function getMessage(callSid, ani, project_id, conversation_id){
467
460
 
468
461
  // 1. First attempt: read from queue
469
462
  queue = await tdChannel.getMessagesFromQueue(conversation_id)
470
- console.log('[getMessage] /NEXT check queue length--> '+ queue.length)
463
+ winston.debug('[getMessage] /NEXT check queue length--> '+ queue.length)
471
464
 
472
465
  if (queue && queue.length > 0) {
473
466
  //CASE: queue has at least one message to reproduce --> get message from tiledesk queue and reset delayTime
@@ -528,11 +521,8 @@ async function getMessage(callSid, ani, project_id, conversation_id){
528
521
  }
529
522
 
530
523
  router.post('/speechresult/:callSid', async (req, res) => {
531
-
532
524
  let start_call = new Date();
533
- winston.verbose("(voice) called POST /speechresult ", req.body);
534
- winston.verbose("(voice) called POST /speechresult query ", req.query);
535
- console.log('/speechresult at: ', new Date(), 'with text:', req.body.SpeechResult)
525
+ winston.verbose("(voice) called POST /speechresult at" + new Date() + "with text: "+ req.body.SpeechResult);
536
526
 
537
527
  let usertext = req.body.SpeechResult;
538
528
  let confidence = req.body.Confidence
@@ -572,54 +562,200 @@ router.post('/speechresult/:callSid', async (req, res) => {
572
562
  uploadService: uploadService
573
563
  });
574
564
 
575
- winston.verbose("(voice) usertext "+usertext);
576
- let message = {};
577
- if(usertext){
578
- let tiledeskMessage= {
579
- text:usertext,
580
- senderFullname: from,
581
- type: 'text',
582
- channel: { name: CHANNEL_NAME }
583
- };
584
- let startSend= new Date().getTime()
585
- let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
586
- winston.debug("message sent : ", tdMessage);
587
- let endSend = new Date().getTime();
588
- console.log('Time to send messagge ( ', usertext, '): ', endSend - startSend, '[ms] at time', new Date())
589
- //generate Tiledesk wait message
590
- let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
591
- message = await tdChannel.generateWaitTdMessage(from, delayTime)
592
- //update delayIndex for wait command message time
593
- await voiceChannel.saveDelayIndexForCallId(callSid)
594
-
595
- }else {
596
-
597
- let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
598
- message = await tdChannel.generateWaitTdMessage(from, delayTime)
599
- //update delayIndex for wait command message time
600
- await voiceChannel.saveDelayIndexForCallId(from)
601
- }
602
565
 
566
+ let start_promise_message= new Date();
567
+ let message = await new Promise(async (resolve, reject) => {
568
+ winston.debug('(voice) ******* user text -->'+ usertext)
569
+ if(usertext === '' || !usertext){
570
+
571
+ let start_time_get_message = new Date()
572
+ let message = await getMessage(callSid, from, project_id, conversation_id)
573
+ let end_time_get_message = new Date()
574
+ winston.verbose(`(if) Time to getMessage from queue in /speechresult/${callSid} : ${(end_time_get_message-start_time_get_message)}[ms]`)
575
+ resolve(message)
576
+ }else{
577
+ //CASE:usertext is not empty and queue is empty --> send message to tiledesk and manage delayTime
578
+ let tiledeskMessage= {
579
+ text:usertext,
580
+ senderFullname: from,
581
+ type: 'text',
582
+ channel: { name: CHANNEL_NAME }
583
+ };
584
+
585
+ let start_time_send_message = new Date()
586
+ let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
587
+ let end_time_send_message = new Date()
588
+ winston.debug("message sent : ", tdMessage);
589
+ winston.verbose(`(else) Time to send message to tiledesk in /speechresult/${callSid} : ${(end_time_send_message-start_time_send_message)}[ms] with text ` + tdMessage.text + ' --- at time:' + new Date())
590
+
591
+ let start_time_get_message = new Date()
592
+ let message = await getMessage(callSid, from, project_id, conversation_id)
593
+ let end_time_get_message = new Date()
594
+ winston.verbose(`(else) Time to getMessage from queue in /speechresult/${callSid} : ${(end_time_get_message-start_time_get_message)}[ms]` + ' --- at time:' + new Date())
595
+ resolve(message)
596
+
597
+ //generate Tiledesk wait message
598
+ // let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
599
+ // let message = await tdChannel.generateWaitTdMessage(from, delayTime)
600
+ // //update delayIndex for wait command message time
601
+ // await voiceChannel.saveDelayIndexForCallId(callSid)
602
+ // resolve(message)
603
+ }
604
+ });
605
+ let end_promise_message = new Date()
606
+ winston.verbose(`Time to manage message in Promise /speechresult/${callSid}: ${(end_promise_message-start_promise_message)}[ms]` + ' with text' + message.text + ' --- at time --' + new Date())
607
+
603
608
  // convert response to vxml
604
609
  let messageToVXML = await tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo)
605
610
  winston.debug("(voice) VXML to SEND: "+ messageToVXML);
606
611
 
612
+ let end_call = new Date()
613
+ winston.info(`Time to respond to /speechresult/${callSid} : ${(end_call-start_call)} [ms]`)
607
614
 
608
615
  // Render the response as XML in reply to the webhook request
609
616
  res.set('Content-Type', 'application/xml');
610
617
  res.status(200).send(messageToVXML);
618
+ })
619
+
620
+ /* ----> called with Record tag in action property <----- */
621
+ router.post('/record/action/:callSid/',async (req, res) => {
622
+ winston.verbose('+++++++++++(voice) called POST record/action/:callSid at time '+ new Date() + "at timestamp " + new Date().getTime());
623
+ let start_call = new Date();
624
+
625
+ let callSid = req.body.CallSid;
626
+ let sessionInfo;
627
+ let project_id, conversation_id, user;
628
+ let from, to;
629
+
630
+ let redis_data = await redis_client.get('tiledesk:voice:'+callSid+':session');
631
+ if (!redis_data) {
632
+ return res.status(500).send({ success: "false", message: "Can't retrive data for callSid ->" + callSid });
633
+ }
634
+ sessionInfo = JSON.parse(redis_data)
635
+ project_id = sessionInfo.project_id;
636
+ from = sessionInfo.from;
637
+ to = sessionInfo.to;
638
+ conversation_id = sessionInfo.conversation_id;
639
+ user = sessionInfo.user;
640
+
641
+ let vxmlAttributes = {
642
+ TTS_VOICE_LANGUAGE: VOICE_LANGUAGE,
643
+ TTS_VOICE_NAME: VOICE_NAME,
644
+ callSid: callSid,
645
+ };
646
+
647
+ const tdChannel = new TiledeskChannel({
648
+ API_URL: API_URL,
649
+ redis_client: redis_client
650
+ })
651
+ tdChannel.setProjectId(project_id)
652
+
653
+ const tdTranslator = new TiledeskTwilioTranslator({
654
+ BASE_URL: BASE_URL,
655
+ aiService: aiService,
656
+ uploadService: uploadService
657
+ });
658
+
659
+
660
+ let start_time_get_message = new Date()
661
+ let message = await getMessage(callSid, from, project_id, conversation_id)
662
+ winston.debug('message from getMessage in /record/action/: ', message)
663
+ let end_time_get_message = new Date()
664
+ winston.verbose(`Time to getMessage from queue in /record/action/${callSid} : ${(end_time_get_message-start_time_get_message)}[ms]` + ' --- at time:' + new Date())
665
+
666
+
667
+ //generate Tiledesk wait message
668
+ // let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
669
+ // let message = await tdChannel.generateWaitTdMessage(from, delayTime)
670
+ // //update delayIndex for wait command message time
671
+ // await voiceChannel.saveDelayIndexForCallId(callSid)
672
+
673
+ // convert response to vxml
674
+ let messageToVXML = await tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo)
675
+ winston.debug("(voice) /record/action VXML to SEND: "+ messageToVXML);
676
+
677
+ let end_call = new Date();
678
+ winston.info(`Time to respond to /record/action/${callSid} : ${(end_call-start_call)}[ms]`)
679
+ res.set('Content-Type', 'application/xml');
680
+ res.status(200).send(messageToVXML);
681
+
682
+ });
683
+
684
+ /* ----> called with Record tag in recordingStatusCallback property <----- */
685
+ router.post('/record/callback/:callSid/',async (req, res) => {
686
+ winston.verbose('+++++++++++(voice) called POST record/callback/:callSid at time'+ new Date() + "at timestamp " + new Date().getTime());
687
+ let start_call = new Date();
688
+
689
+ let callSid = req.params.callSid || req.body.CallSid;
690
+ let audioFileUrl = req.body.RecordingUrl;
691
+ let audioFileDuration = req.body.RecordingDuration;
692
+ let button_action = req.query.button_action ? '#' + req.query.button_action : '';
693
+ let previousIntentName = req.query.intentName || '';
694
+
611
695
 
612
- let end_call = new Date()
613
- console.log('Time to responde to /speechresult/:callSid : ', end_call-start_call, '[ms]')
696
+ let sessionInfo;
697
+ let project_id, conversation_id, user;
698
+ let from, to;
699
+
700
+ let redis_data = await redis_client.get('tiledesk:voice:'+callSid+':session');
701
+ if (!redis_data) {
702
+ return res.status(500).send({ success: "false", message: "Can't retrive data for callSid ->" + callSid });
703
+ }
704
+ sessionInfo = JSON.parse(redis_data)
705
+ project_id = sessionInfo.project_id;
706
+ from = sessionInfo.from;
707
+ to = sessionInfo.to;
708
+ conversation_id = sessionInfo.conversation_id;
709
+ user = sessionInfo.user;
710
+
711
+ const tdChannel = new TiledeskChannel({
712
+ API_URL: API_URL,
713
+ redis_client: redis_client
714
+ })
715
+ tdChannel.setProjectId(project_id)
716
+
717
+ const CONTENT_KEY = CHANNEL_NAME + "-" + project_id;
718
+ let settings = await db.get(CONTENT_KEY);
719
+ if(!settings){
720
+ return res.status(404).send({error: "VOICE Channel not already connected"})
721
+ }
722
+
723
+ let tiledeskMessage = null;
724
+ // tiledeskMessage = buildNoInputMessage('no_input', { from, button_action, payload: { event: 'no_input', lastBlock: previousIntentName, lastTimestamp: Date.now()} });
725
+
726
+ //SPEECH TO TEXT
727
+ const attributes = await voiceChannel.getSettingsForCallId(callSid);
728
+ winston.debug(`[VOICE] getting text message from STT: ${audioFileUrl}, model: ${attributes.STT_MODEL}`);
729
+ // generateSTT ritorna sempre un oggetto coerente (anche vuoto o /close)
730
+ tiledeskMessage = await generateSTT(audioFileUrl, attributes, sessionInfo, settings)
731
+ winston.debug('[VOICE] tiledeskMessage from STT: ', tiledeskMessage)
732
+ if (!tiledeskMessage || Object.keys(tiledeskMessage).length === 0) {
733
+ winston.debug(`[VOICE] STT result empty, fallback to no_input branch for callSid ${callSid}`);
734
+ tiledeskMessage = buildNoInputMessage('no_input', { from, button_action, payload: { event: 'no_input', lastBlock: previousIntentName, lastTimestamp: Date.now()} });
735
+ }else {
736
+ const normalizedText = utils.normalizeSTT(tiledeskMessage.text);
737
+ winston.verbose(`[VOICE] normalized STT text: ${normalizedText} for callSid ${callSid}`);
738
+ if(!normalizedText){
739
+ tiledeskMessage = buildNoInputMessage('no_input', { from, button_action, payload: { event: 'no_input', lastBlock: previousIntentName, lastTimestamp: Date.now()} });
740
+ }else{
741
+ tiledeskMessage.text = normalizedText;
742
+ }
743
+ }
744
+
745
+ //send message to tiledesk
746
+ let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
747
+ let end_call = new Date();
748
+ winston.info(`Time to respond to /record/callback/${callSid} : ${(end_call-start_call)} [ms] with text ` + tiledeskMessage.text);
614
749
 
750
+ res.status(200).send({ success: true , message: "Message sent to Tiledesk for callSid " + callSid});
615
751
  })
616
752
 
617
753
 
618
754
  router.post('/menublock/:callSid', async (req, res) => {
619
- winston.verbose("(voice) called POST /menu", req.body);
620
- winston.verbose("(voice) called POST /menu query" , req.query);
621
755
  let start_call = new Date().getTime();
622
- console.log('/menublock at: ', new Date(), 'with text:', req.body.Digits)
756
+ winston.debug("(voice) called POST /menu", req.body);
757
+ winston.debug("(voice) called POST /menu query" , req.query);
758
+ winston.verbose('/menublock at: ' + new Date() + 'with text:'+ req.body.Digits)
623
759
 
624
760
  let message_text = '';
625
761
  let attributes = {};
@@ -697,7 +833,6 @@ router.post('/menublock/:callSid', async (req, res) => {
697
833
  uploadService: uploadService
698
834
  });
699
835
 
700
- let startSend = new Date().getTime();
701
836
  //send message to tiledesk
702
837
  let tiledeskMessage= {
703
838
  text:message_text,
@@ -706,30 +841,35 @@ router.post('/menublock/:callSid', async (req, res) => {
706
841
  channel: { name: CHANNEL_NAME },
707
842
  attributes: attributes
708
843
  };
709
- let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
710
- let endSend = new Date().getTime();
711
- console.log('(menu) Time to send messagge ( ', message_text, '): ', endSend - startSend, '[ms] at time', new Date())
844
+ let response = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
845
+ if(!response){
846
+ return res.status(503).send({ message: "Bad response: Quota exceeded" })
847
+ }
712
848
 
713
- //generate Tiledesk wait message
714
- let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
715
- let message = await tdChannel.generateWaitTdMessage(from, delayTime)
716
- //update delayIndex for wait command message time
717
- await voiceChannel.saveDelayIndexForCallId(callSid)
849
+ let start_time_get_message = new Date()
850
+ let message = await getMessage(callSid, from, project_id, conversation_id)
851
+ let end_time_get_message = new Date()
852
+ winston.verbose(`Time to getMessage from queue in /menublock/${callSid} : ${(end_time_get_message-start_time_get_message)}[ms]` + ' --- at time:' + new Date())
853
+
854
+ // //generate Tiledesk wait message
855
+ // let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
856
+ // let message = await tdChannel.generateWaitTdMessage(from, delayTime)
857
+ // //update delayIndex for wait command message time
858
+ // await voiceChannel.saveDelayIndexForCallId(callSid)
718
859
 
719
860
  // convert response to vxml
720
861
  let messageToVXML = await tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo)
721
862
  winston.debug("(voice) VXML to SEND: "+ messageToVXML);
722
863
 
723
- res.set('Content-Type', 'application/xml');
724
- res.status(200).send(messageToVXML);
725
-
726
864
  let end_call = new Date().getTime();
727
- console.log('Time to responde to /menublock/<call_id>/ : ', end_call-start_call, '[ms]')
865
+ winston.info(`Time to respond to /menublock/${callSid} : ${(end_call-start_call)} [ms]`)
728
866
 
867
+ res.set('Content-Type', 'application/xml');
868
+ res.status(200).send(messageToVXML);
729
869
  });
730
870
 
731
871
  router.post('/handle/:callSid/:event', async (req, res) => {
732
- winston.verbose("(voice) called POST /handle", req.body);
872
+ winston.debug("(voice) called POST /handle", req.body);
733
873
  winston.debug("(voice) called POST /handle query -->", req.query);
734
874
  winston.debug("(voice) called POST /handle params-->", req.params);
735
875
 
@@ -808,7 +948,7 @@ router.post('/handle/:callSid/:event', async (req, res) => {
808
948
 
809
949
  /* ----> catch Event block <----- */
810
950
  router.post('/event/:callSid/:event', async(req, res)=> {
811
- winston.verbose("(voice) called POST /event" , req.params);
951
+ winston.debug("(voice) called POST /event" , req.params);
812
952
  winston.debug("(voice) called POST /event query" , req.query);
813
953
  winston.debug("(voice) called POST /event body" , req.body);
814
954
 
@@ -912,7 +1052,7 @@ router.post('/event/:callSid/:event', async(req, res)=> {
912
1052
 
913
1053
  /* ----> catch Twilio Events <----- */
914
1054
  router.post('/twilio/status',async (req, res) => {
915
- winston.verbose('+++++++++++(voice) called POST twilio/status ', req.body);
1055
+ winston.debug('+++++++++++(voice) called POST twilio/status ', req.body);
916
1056
 
917
1057
  let event = req.body.CallStatus;
918
1058
  let callSid = req.body.CallSid;
@@ -978,8 +1118,7 @@ router.post('/twilio/status',async (req, res) => {
978
1118
  let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
979
1119
 
980
1120
  //remove session data for current callId and relative queue data
981
- await redis_client.del('tiledesk:voice:'+callSid+':session');
982
- await redis_client.del('tiledesk:voice:'+callSid+':delayIndex');
1121
+ await voiceChannel.deleteCallKeys(callSid);
983
1122
  await tdChannel.clearQueue(conversation_id);
984
1123
  break;
985
1124
  }
@@ -992,7 +1131,7 @@ router.post('/twilio/status',async (req, res) => {
992
1131
 
993
1132
 
994
1133
  router.post('/twilio/fail',async (req, res) => {
995
- winston.verbose('+++++++++++(voice) called POST twilio/fail ', req.params)
1134
+ winston.debug('+++++++++++(voice) called POST twilio/fail ', req.params)
996
1135
  winston.debug('+++++++++++(voice) called POST twilio/fail ', req.body)
997
1136
 
998
1137
  res.set('Content-Type', 'application/xml');
@@ -1000,175 +1139,100 @@ router.post('/twilio/fail',async (req, res) => {
1000
1139
  })
1001
1140
 
1002
1141
 
1003
- /* ----> catch Twilio Events <----- */
1004
- router.post('/record/:callSid/',async (req, res) => {
1005
- winston.verbose('+++++++++++(voice) called POST record/:callSid ', req.body);
1006
-
1007
- let callSid = req.body.CallSid;
1008
- let audioFileUrl = req.body.RecordingUrl;
1009
-
1010
- let sessionInfo;
1011
- let project_id, conversation_id, user;
1012
- let from, to;
1013
-
1014
- let redis_data = await redis_client.get('tiledesk:voice:'+callSid+':session');
1015
- if (!redis_data) {
1016
- return res.status(500).send({ success: "false", message: "Can't retrive data for callSid ->" + callSid });
1017
- }
1018
- sessionInfo = JSON.parse(redis_data)
1019
- project_id = sessionInfo.project_id;
1020
- from = sessionInfo.from;
1021
- to = sessionInfo.to;
1022
- conversation_id = sessionInfo.conversation_id;
1023
- user = sessionInfo.user;
1024
-
1025
- let vxmlAttributes = {
1026
- TTS_VOICE_LANGUAGE: VOICE_LANGUAGE,
1027
- TTS_VOICE_NAME: VOICE_NAME,
1028
- callSid: callSid,
1029
- };
1030
-
1031
-
1032
- const tdChannel = new TiledeskChannel({
1033
- API_URL: API_URL,
1034
- redis_client: redis_client
1035
- })
1036
- tdChannel.setProjectId(project_id)
1037
-
1038
- const tdTranslator = new TiledeskTwilioTranslator({
1039
- BASE_URL: BASE_URL,
1040
- aiService: aiService,
1041
- uploadService: uploadService
1042
- });
1043
-
1044
- const CONTENT_KEY = CHANNEL_NAME + "-" + project_id;
1045
- let settings = await db.get(CONTENT_KEY);
1046
- if(!settings){
1047
- return res.status(404).send({error: "VOICE Channel not already connected"})
1048
- }
1049
-
1050
-
1051
- let attributes = await voiceChannel.getSettingsForCallId(callSid);
1052
- console.log('attributessss', attributes)
1053
-
1054
- //SPEECH TO TEXT
1055
- console.log('getting text message . . . ', audioFileUrl, attributes.STT_MODEL)
1056
- let tiledeskMessage = await generateSTT(audioFileUrl, attributes, sessionInfo, settings)
1057
- console.log('(voice) Message captured after STT -->', tiledeskMessage)
1058
-
1059
- if(!tiledeskMessage){
1060
- //case NO_INPUT
1061
- const queryString = utils.buildQueryString(req.query);
1062
- winston.debug('case no input.. redirect '+ queryString)
1063
-
1064
- return await axios({
1065
- url: "http://localhost:3000/handle/" + callSid + '/no_input'+ queryString,
1066
- headers: req.headers,
1067
- data: req.body,
1068
- method: 'POST'
1069
- }).then((response) => {
1070
- winston.debug("[TiledeskChannel] speechToText response : ", response.data);
1071
- return res.status(response.status).send(response.data);
1072
- }).catch((err) => {
1073
- winston.error("[TiledeskChannel] speechToText error: ", err);
1074
- return res.status(500).send({ success: false, message: "Errore while redirect to /handle for callSid " + callSid});;
1075
- })
1076
- }
1077
-
1078
-
1079
- let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
1080
- winston.debug("message sent : ", tdMessage);
1081
-
1082
-
1083
- //generate Tiledesk wait message
1084
- let delayTime = await voiceChannel.getNextDelayTimeForCallId(callSid)
1085
- let message = await tdChannel.generateWaitTdMessage(from, delayTime)
1086
- //update delayIndex for wait command message time
1087
- await voiceChannel.saveDelayIndexForCallId(callSid)
1088
-
1089
- // convert response to vxml
1090
- let messageToVXML = await tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo)
1091
- winston.debug("(voice) VXML to SEND: "+ messageToVXML);
1092
-
1093
- res.set('Content-Type', 'application/xml');
1094
- res.status(200).send(messageToVXML);
1095
-
1096
- })
1097
1142
 
1098
1143
  async function generateSTT(audioFileUrl, attributes, sessionInfo, settings){
1099
1144
 
1100
1145
  winston.debug("(voice) generateSTT: "+ attributes.VOICE_PROVIDER);
1101
1146
 
1102
- let tiledeskMessage = {}, text = null;
1103
- switch(attributes.VOICE_PROVIDER){
1104
- case VOICE_PROVIDER.OPENAI:
1105
- let GPT_KEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.key
1106
- let publicKey = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.publicKey
1107
- if(publicKey){
1108
- let keep_going = await aiService.checkQuoteAvailability(sessionInfo.project_id, settings.token).catch((err)=>{
1109
- winston.error('errr while checkQuoteAvailability for project:', sessionInfo.project_id, err.response?.data)
1110
- })
1111
- winston.verbose('(voice) checkQuoteAvailability return: '+ keep_going);
1112
- if(!keep_going){
1113
- //no token is available --> close conversation
1114
- return tiledeskMessage= {
1115
- //text:'\\close',
1116
- text:'/close',
1117
- senderFullname: sessionInfo.from,
1118
- type: 'text',
1119
- channel: { name: CHANNEL_NAME },
1120
- attributes: {
1121
- subtype: "info",
1122
- action: 'close'+JSON.stringify({event: 'quota_exceeded'}),
1123
- payload: {
1124
- catchEvent: 'quota_exceeded'
1125
- },
1126
- timestamp: 'xxxxxx'
1127
- }
1128
- };
1147
+ let tiledeskMessage = {};
1148
+ let text = null;
1149
+
1150
+ try {
1151
+ switch(attributes.VOICE_PROVIDER){
1152
+ case VOICE_PROVIDER.OPENAI: {
1153
+ let GPT_KEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.key
1154
+ let publicKey = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.publicKey
1155
+ if(publicKey){
1156
+ let keep_going = await aiService.checkQuoteAvailability(sessionInfo.project_id, settings.token)
1157
+ winston.debug('(voice) checkQuoteAvailability return: '+ keep_going);
1158
+ if(!keep_going){
1159
+ //no token is available --> close conversation
1160
+ return tiledeskMessage= {
1161
+ //text:'\\close',
1162
+ text:'/close',
1163
+ senderFullname: sessionInfo.from,
1164
+ type: 'text',
1165
+ channel: { name: CHANNEL_NAME },
1166
+ attributes: {
1167
+ subtype: "info",
1168
+ action: 'close'+JSON.stringify({event: 'quota_exceeded'}),
1169
+ payload: {
1170
+ catchEvent: 'quota_exceeded'
1171
+ },
1172
+ timestamp: 'xxxxxx'
1173
+ }
1174
+ };
1129
1175
 
1176
+ }
1130
1177
  }
1178
+
1179
+ text = await aiService.speechToText(audioFileUrl, attributes.STT_MODEL, GPT_KEY)
1180
+ break;
1131
1181
  }
1132
-
1133
- text = await aiService.speechToText(audioFileUrl, attributes.STT_MODEL, GPT_KEY).catch((err)=>{
1134
- winston.error('errr while transcript', err.response?.data)
1135
- })
1136
- tiledeskMessage= {
1137
- text: text,
1138
- senderFullname: sessionInfo.from,
1139
- type: 'text',
1140
- channel: { name: CHANNEL_NAME }
1141
- };
1142
- break;
1143
- case VOICE_PROVIDER.ELEVENLABS:
1144
- let ELEVENLABS_APIKEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.ELEVENLABS))?.key
1145
- // La condizione negli input del metodo è corretta, ma può essere scritta in modo più leggibile:
1146
- const ttsLanguage = attributes.TTS_LANGUAGE || 'en';
1147
- text = await this.aiService.speechToTextElevenLabs(
1148
- audioFileUrl,
1149
- attributes.STT_MODEL,
1150
- ttsLanguage,
1151
- ELEVENLABS_APIKEY
1152
- ).catch((err) => {
1153
- winston.error('errr while creating elevenlabs audio message', err?.response?.data);
1154
- });
1155
- tiledeskMessage= {
1182
+ case VOICE_PROVIDER.ELEVENLABS: {
1183
+ let ELEVENLABS_APIKEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.ELEVENLABS))?.key
1184
+ const ttsLanguage = attributes.TTS_LANGUAGE || 'en';
1185
+ text = await aiService.speechToTextElevenLabs( audioFileUrl, attributes.STT_MODEL, ttsLanguage, ELEVENLABS_APIKEY )
1186
+ break;
1187
+ }
1188
+ default:
1189
+ throw new Error('Unsupported VOICE_PROVIDER: ' + attributes.VOICE_PROVIDER);
1190
+ }
1191
+
1192
+ if(text){
1193
+ winston.debug('[STT] text empty → fallback no_input');
1194
+ tiledeskMessage = {
1156
1195
  text: text,
1157
1196
  senderFullname: sessionInfo.from,
1158
1197
  type: 'text',
1159
1198
  channel: { name: CHANNEL_NAME }
1160
1199
  };
1161
- break;
1200
+ }
1201
+ } catch (error) {
1202
+ winston.error('[STT] generateSTT error:', error);
1203
+ switch (error.code) {
1204
+ case 'AISERVICE_FAILED':
1205
+ winston.error('[STT] AISERVICE_FAILED → ', error.message);
1206
+ break;
1207
+ }
1162
1208
 
1209
+ // fallback: tiledeskMessage vuoto
1210
+ tiledeskMessage = {};
1211
+
1163
1212
  }
1164
1213
 
1165
1214
  return tiledeskMessage
1166
1215
  }
1167
1216
 
1168
1217
 
1218
+ async function buildNoInputMessage(event, { from, button_action, payload }) {
1219
+ return {
1220
+ text: `/${event}`,
1221
+ senderFullname: from,
1222
+ type: 'text',
1223
+ channel: { name: CHANNEL_NAME },
1224
+ attributes: {
1225
+ type: 'info',
1226
+ action: button_action,
1227
+ payload: payload
1228
+ }
1229
+ };
1230
+ }
1231
+
1232
+
1169
1233
 
1170
1234
  router.get('/addon/transcript', async (req, res) => {
1171
- winston.verbose("(voice) called GET /transcript query-->" , req.query);
1235
+ winston.debug("(voice) called GET /transcript query-->" , req.query);
1172
1236
  winston.debug("(voice) called GET /transcript body -->" , req.body);
1173
1237
 
1174
1238
  res.status(200).send('ok');
@@ -1177,7 +1241,7 @@ router.get('/addon/transcript', async (req, res) => {
1177
1241
 
1178
1242
  /** --> only for test purpose <-- **/
1179
1243
  router.get('/test', async (req, res) => {
1180
- winston.verbose("(voice) called GET /test" , req.query);
1244
+ winston.debug("(voice) called GET /test" , req.query);
1181
1245
 
1182
1246
  let project_id = req.query.id_project;
1183
1247
  let callSid = req.body.CallSid;
@@ -1354,7 +1418,7 @@ async function connectRedis() {
1354
1418
 
1355
1419
 
1356
1420
  redis_client.on('error', err => {
1357
- winston.debug('(voice) Connect Redis Error ' + err);
1421
+ winston.error('(voice) Connect Redis Error ' + err);
1358
1422
  })
1359
1423
  /*
1360
1424
  redis_client.on('connect', () => {