@tiledesk/tiledesk-voice-twilio-connector 0.2.0-rc3 → 0.2.0-rc6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +494 -23
- package/package.json +3 -2
- package/src/app.js +11 -3
- package/src/controllers/VoiceController.js +33 -28
- package/src/middlewares/httpLogger.js +25 -13
- package/src/models/KeyValueStore.js +5 -5
- package/src/routes/manageApp.js +7 -7
- package/src/services/AiService.js +8 -8
- package/src/services/AiService.sdk.js +13 -13
- package/src/services/IntegrationService.js +2 -2
- package/src/services/MessageService.js +12 -6
- package/src/services/SessionService.js +5 -5
- package/src/services/SpeechService.js +3 -3
- package/src/services/TwilioService.js +9 -2
- package/src/services/UploadService.js +1 -1
- package/src/services/channels/TiledeskChannel.js +54 -55
- package/src/services/channels/VoiceChannel.js +4 -4
- package/src/services/clients/TiledeskSubscriptionClient.js +2 -2
- package/src/services/translators/TiledeskTwilioTranslator.js +20 -15
- package/src/controllers/VoiceController.original.js +0 -811
- package/src/services/translators/TiledeskTwilioTranslator.original.js +0 -614
|
@@ -72,11 +72,11 @@ class VoiceController {
|
|
|
72
72
|
const tiledeskMessage = req.body.payload;
|
|
73
73
|
const projectId = tiledeskMessage.id_project;
|
|
74
74
|
|
|
75
|
-
logger.debug(`(voice) Message received from Tiledesk in projectID: ${projectId} ---- text: ${tiledeskMessage.text}`);
|
|
75
|
+
// logger.debug(`(voice) Message received from Tiledesk in projectID: ${projectId} ---- text: ${tiledeskMessage.text}`);
|
|
76
76
|
|
|
77
|
-
if (!utilsMessage.messageType(TYPE_MESSAGE.INFO, tiledeskMessage) && !(tiledeskMessage.sender.indexOf(CHANNEL_NAME) > -1)) {
|
|
78
|
-
|
|
79
|
-
}
|
|
77
|
+
// if (!utilsMessage.messageType(TYPE_MESSAGE.INFO, tiledeskMessage) && !(tiledeskMessage.sender.indexOf(CHANNEL_NAME) > -1)) {
|
|
78
|
+
// logger.debug(`> whook SAVE MESSAGE "${tiledeskMessage.text}" TO QUEUE at time ${new Date()}`);
|
|
79
|
+
// }
|
|
80
80
|
|
|
81
81
|
if (await this.tdChannel.addMessageToQueue(tiledeskMessage)) {
|
|
82
82
|
await this._handlePlayRedirect(tiledeskMessage, projectId);
|
|
@@ -102,7 +102,7 @@ class VoiceController {
|
|
|
102
102
|
async webhook(req, res) {
|
|
103
103
|
try {
|
|
104
104
|
const startCall = Date.now();
|
|
105
|
-
logger.debug('(voice) called POST /webhook/:id_project ' + new Date(), req.params);
|
|
105
|
+
// logger.debug('(voice) called POST /webhook/:id_project ' + new Date(), req.params);
|
|
106
106
|
|
|
107
107
|
const projectId = req.params.id_project;
|
|
108
108
|
const callSid = req.body.CallSid;
|
|
@@ -135,7 +135,7 @@ class VoiceController {
|
|
|
135
135
|
this.sessionService.getIntegrationKeys(projectId, settings.token)
|
|
136
136
|
]);
|
|
137
137
|
|
|
138
|
-
logger.debug(`(voice) conversation returned: ${conversationId}`);
|
|
138
|
+
// logger.debug(`(voice) conversation returned: ${conversationId}`);
|
|
139
139
|
|
|
140
140
|
// Create session
|
|
141
141
|
const sessionData = await this.sessionService.createSession({
|
|
@@ -158,9 +158,10 @@ class VoiceController {
|
|
|
158
158
|
// Get response from queue
|
|
159
159
|
const message = await this.messageService.getNextMessage(callSid, conversationId, from);
|
|
160
160
|
const { vxmlAttributes } = await this.sessionService.getSessionContext(callSid);
|
|
161
|
-
const messageToVXML = await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionData);
|
|
161
|
+
const messageToVXML = message != null ? await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionData) : this.twilioService.generateRedirectTwiML(callSid, '');
|
|
162
|
+
// const messageToVXML = await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionData);
|
|
162
163
|
|
|
163
|
-
logger.info(`Time to respond to /webhook/${projectId}: ${Date.now() - startCall}[ms]`);
|
|
164
|
+
// logger.info(`Time to respond to /webhook/${projectId}: ${Date.now() - startCall}[ms]`);
|
|
164
165
|
|
|
165
166
|
res.set('Content-Type', 'text/xml');
|
|
166
167
|
res.status(200).send(messageToVXML);
|
|
@@ -177,15 +178,15 @@ class VoiceController {
|
|
|
177
178
|
const userText = req.body.SpeechResult || req.body.Digits;
|
|
178
179
|
const bargein = req.query.bargein === 'true' || req.query.bargein === true;
|
|
179
180
|
|
|
180
|
-
logger.verbose(`(voice) called POST /nextblock at ${new Date()} with text: ${userText}, bargein: ${bargein}`);
|
|
181
|
+
// logger.verbose(`(voice) called POST /nextblock at ${new Date()} with text: ${userText}, bargein: ${bargein}`);
|
|
181
182
|
|
|
182
183
|
const { vxmlAttributes, sessionInfo } = await this.sessionService.getSessionContext(callSid);
|
|
183
184
|
const { from, conversation_id, project_id, user } = sessionInfo;
|
|
184
185
|
|
|
185
186
|
const message = await this._handleUserInput(userText, callSid, from, conversation_id, project_id, user, bargein);
|
|
186
|
-
const messageToVXML = await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo);
|
|
187
|
+
const messageToVXML = message != null ? await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo) : this.twilioService.generateRedirectTwiML(callSid, '');
|
|
187
188
|
|
|
188
|
-
logger.info(`Time to respond to /nextblock/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
189
|
+
// logger.info(`Time to respond to /nextblock/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
189
190
|
|
|
190
191
|
res.set('Content-Type', 'application/xml');
|
|
191
192
|
res.status(200).send(messageToVXML);
|
|
@@ -201,15 +202,15 @@ class VoiceController {
|
|
|
201
202
|
const callSid = req.params.callSid;
|
|
202
203
|
const userText = req.body.SpeechResult;
|
|
203
204
|
|
|
204
|
-
logger.verbose(`(voice) called POST /speechresult at ${new Date()} with text: ${userText}`);
|
|
205
|
+
// logger.verbose(`(voice) called POST /speechresult at ${new Date()} with text: ${userText}`);
|
|
205
206
|
|
|
206
207
|
const { vxmlAttributes, sessionInfo } = await this.sessionService.getSessionContext(callSid);
|
|
207
208
|
const { from, conversation_id, project_id, user } = sessionInfo;
|
|
208
209
|
|
|
209
210
|
const message = await this._handleUserInput(userText, callSid, from, conversation_id, project_id, user);
|
|
210
|
-
const messageToVXML = await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo);
|
|
211
|
+
const messageToVXML = message != null? await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo) : this.twilioService.generateRedirectTwiML(callSid, '');
|
|
211
212
|
|
|
212
|
-
logger.info(`Time to respond to /speechresult/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
213
|
+
// logger.info(`Time to respond to /speechresult/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
213
214
|
|
|
214
215
|
res.set('Content-Type', 'application/xml');
|
|
215
216
|
res.status(200).send(messageToVXML);
|
|
@@ -220,14 +221,15 @@ class VoiceController {
|
|
|
220
221
|
}
|
|
221
222
|
|
|
222
223
|
async _handleUserInput(userText, callSid, from, conversationId, projectId, user, bargein = false) {
|
|
224
|
+
let message;
|
|
223
225
|
if (!userText) {
|
|
224
226
|
return this.messageService.getNextMessage(callSid, conversationId, from);
|
|
225
227
|
}
|
|
226
228
|
|
|
227
229
|
// If barge-in is enabled and user has provided input, clear the queue
|
|
228
230
|
// This ensures we get the fresh response after user interruption
|
|
229
|
-
if (bargein) {
|
|
230
|
-
logger.debug(`[VoiceController] Barge-in detected, clearing queue for conversation: ${conversationId}`);
|
|
231
|
+
if (bargein && !userText) {
|
|
232
|
+
// logger.debug(`[VoiceController] Barge-in detected, clearing queue for conversation: ${conversationId}`);
|
|
231
233
|
await this.tdChannel.clearQueue(conversationId);
|
|
232
234
|
}
|
|
233
235
|
|
|
@@ -242,15 +244,14 @@ class VoiceController {
|
|
|
242
244
|
const startCall = Date.now();
|
|
243
245
|
const callSid = req.body.CallSid;
|
|
244
246
|
|
|
245
|
-
logger.verbose(`(voice) called POST record/action/:callSid at time ${new Date()}`);
|
|
247
|
+
// logger.verbose(`(voice) called POST record/action/:callSid at time ${new Date()}`);
|
|
246
248
|
|
|
247
249
|
const { vxmlAttributes, sessionInfo } = await this.sessionService.getSessionContext(callSid);
|
|
248
250
|
const { from, conversation_id, project_id } = sessionInfo;
|
|
249
251
|
|
|
250
252
|
const message = await this.messageService.getNextMessage(callSid, conversation_id, from);
|
|
251
|
-
const messageToVXML = await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo);
|
|
252
|
-
|
|
253
|
-
logger.info(`Time to respond to /record/action/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
253
|
+
const messageToVXML = message != null? await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo) : this.twilioService.generateRedirectTwiML(callSid, '');
|
|
254
|
+
// logger.info(`Time to respond to /record/action/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
254
255
|
|
|
255
256
|
res.set('Content-Type', 'application/xml');
|
|
256
257
|
res.status(200).send(messageToVXML);
|
|
@@ -268,7 +269,7 @@ class VoiceController {
|
|
|
268
269
|
const buttonAction = req.query.button_action ? `#${req.query.button_action}` : '';
|
|
269
270
|
const previousIntentName = req.query.intentName || '';
|
|
270
271
|
|
|
271
|
-
logger.verbose(`(voice) called POST record/callback/:callSid at time ${new Date()}`);
|
|
272
|
+
// logger.verbose(`(voice) called POST record/callback/:callSid at time ${new Date()}`);
|
|
272
273
|
|
|
273
274
|
const { sessionInfo, project_id } = await this.sessionService.getSessionContext(callSid);
|
|
274
275
|
const { from, conversation_id, user } = sessionInfo;
|
|
@@ -305,7 +306,7 @@ class VoiceController {
|
|
|
305
306
|
|
|
306
307
|
await this.tdChannel.send(tiledeskMessage, user.token, conversation_id, project_id);
|
|
307
308
|
|
|
308
|
-
logger.info(`Time to respond to /record/callback/${callSid}: ${Date.now() - startCall}[ms] with text ${tiledeskMessage.text}`);
|
|
309
|
+
// logger.info(`Time to respond to /record/callback/${callSid}: ${Date.now() - startCall}[ms] with text ${tiledeskMessage.text}`);
|
|
309
310
|
|
|
310
311
|
res.status(200).send({ success: true, message: `Message sent to Tiledesk for callSid ${callSid}` });
|
|
311
312
|
} catch (error) {
|
|
@@ -322,7 +323,7 @@ class VoiceController {
|
|
|
322
323
|
const buttonNoMatch = req.query.button_action;
|
|
323
324
|
const menuChoice = req.body.Digits || '';
|
|
324
325
|
|
|
325
|
-
logger.verbose(`/menublock at: ${new Date()} with text: ${menuChoice}`);
|
|
326
|
+
// logger.verbose(`/menublock at: ${new Date()} with text: ${menuChoice}`);
|
|
326
327
|
|
|
327
328
|
// Parse menu selection
|
|
328
329
|
const { messageText, attributes } = this._parseMenuSelection(menuChoice, buttonsMenu, buttonNoMatch);
|
|
@@ -340,7 +341,7 @@ class VoiceController {
|
|
|
340
341
|
const message = await this.messageService.getNextMessage(callSid, conversation_id, from);
|
|
341
342
|
const messageToVXML = await this.tdTranslator.toVXML(message, callSid, vxmlAttributes, sessionInfo);
|
|
342
343
|
|
|
343
|
-
logger.info(`Time to respond to /menublock/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
344
|
+
// logger.info(`Time to respond to /menublock/${callSid}: ${Date.now() - startCall}[ms]`);
|
|
344
345
|
|
|
345
346
|
res.set('Content-Type', 'application/xml');
|
|
346
347
|
res.status(200).send(messageToVXML);
|
|
@@ -381,7 +382,7 @@ class VoiceController {
|
|
|
381
382
|
const buttonAction = `#${req.query.button_action}`;
|
|
382
383
|
const previousIntentName = req.query.intentName;
|
|
383
384
|
|
|
384
|
-
logger.debug(`(voice) called POST /handle event: ${event}`);
|
|
385
|
+
// logger.debug(`(voice) called POST /handle event: ${event}`);
|
|
385
386
|
|
|
386
387
|
const { vxmlAttributes, sessionInfo } = await this.sessionService.getSessionContext(callSid);
|
|
387
388
|
const { from, conversation_id, project_id, user } = sessionInfo;
|
|
@@ -410,7 +411,7 @@ class VoiceController {
|
|
|
410
411
|
const currentIntentName = req.query.intentName;
|
|
411
412
|
const currentIntentTimestamp = req.query.previousIntentTimestamp;
|
|
412
413
|
|
|
413
|
-
logger.debug(`(voice) called POST /event: ${event}`);
|
|
414
|
+
// logger.debug(`(voice) called POST /event: ${event}`);
|
|
414
415
|
|
|
415
416
|
const { vxmlAttributes, sessionInfo } = await this.sessionService.getSessionContext(callSid);
|
|
416
417
|
const { from, conversation_id, project_id, user } = sessionInfo;
|
|
@@ -459,7 +460,7 @@ class VoiceController {
|
|
|
459
460
|
const event = req.body.CallStatus;
|
|
460
461
|
const callSid = req.body.CallSid;
|
|
461
462
|
|
|
462
|
-
logger.debug(`(voice) called POST twilio/status: ${event}`);
|
|
463
|
+
// logger.debug(`(voice) called POST twilio/status: ${event}`);
|
|
463
464
|
|
|
464
465
|
if (event === CALL_STATUS.COMPLETED) {
|
|
465
466
|
const { sessionInfo, conversation_id } = await this.sessionService.getSessionContext(callSid);
|
|
@@ -469,17 +470,21 @@ class VoiceController {
|
|
|
469
470
|
await this.tdChannel.send(tiledeskMessage, user.token, conversation_id, project_id);
|
|
470
471
|
|
|
471
472
|
await this.sessionService.cleanupSession(callSid, conversation_id);
|
|
473
|
+
await this.messageService.cleanupMessages(conversation_id, null, callSid);
|
|
472
474
|
}
|
|
473
475
|
|
|
474
476
|
res.status(200).send();
|
|
475
477
|
} catch (error) {
|
|
476
478
|
logger.error("(voice) Error in twilioStatus handler:", error);
|
|
477
479
|
res.status(500).send({ error: "Internal Server Error" });
|
|
480
|
+
|
|
481
|
+
await this.sessionService.cleanupSession(callSid, conversation_id);
|
|
482
|
+
await this.messageService.cleanupMessages(conversation_id, null, callSid);
|
|
478
483
|
}
|
|
479
484
|
}
|
|
480
485
|
|
|
481
486
|
async twilioFail(req, res) {
|
|
482
|
-
logger.debug('(voice) called POST twilio/fail', req.body);
|
|
487
|
+
// logger.debug('(voice) called POST twilio/fail', req.body);
|
|
483
488
|
res.set('Content-Type', 'application/xml');
|
|
484
489
|
res.status(200).send('<Response></Response>');
|
|
485
490
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
const logger = require('../utils/logger');
|
|
2
|
+
const uuid = require('uuid');
|
|
2
3
|
|
|
3
4
|
let lastRequestTime = Date.now();
|
|
4
5
|
|
|
@@ -7,22 +8,33 @@ const httpLogger = (req, res, next) => {
|
|
|
7
8
|
const timeSinceLastRequest = startTime - lastRequestTime;
|
|
8
9
|
lastRequestTime = startTime;
|
|
9
10
|
|
|
11
|
+
const requestId = uuid.v4();
|
|
12
|
+
const bodySize = req.headers['content-length'] || (req.body ? JSON.stringify(req.body).length : 0);
|
|
13
|
+
|
|
14
|
+
const logData = {
|
|
15
|
+
method: req.method,
|
|
16
|
+
path: req.path,
|
|
17
|
+
query: req.query,
|
|
18
|
+
params: req.params,
|
|
19
|
+
bodySize: bodySize,
|
|
20
|
+
statusCode: res.statusCode
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
logger.info(`Request[${requestId}]: ${JSON.stringify(logData)}`);
|
|
24
|
+
logger.debug(`Request[${requestId}]: ${JSON.stringify(req.body)}`);
|
|
25
|
+
|
|
26
|
+
const originalSend = res.send;
|
|
27
|
+
let resBody;
|
|
28
|
+
res.send = function (body) {
|
|
29
|
+
resBody = body;
|
|
30
|
+
return originalSend.call(this, body);
|
|
31
|
+
}
|
|
32
|
+
|
|
10
33
|
// Hook into response finish to calculate processing time
|
|
11
34
|
res.on('finish', () => {
|
|
12
35
|
const duration = Date.now() - startTime;
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
const logData = {
|
|
16
|
-
method: req.method,
|
|
17
|
-
path: req.path,
|
|
18
|
-
query: req.query,
|
|
19
|
-
bodySize: bodySize,
|
|
20
|
-
statusCode: res.statusCode,
|
|
21
|
-
durationMs: duration,
|
|
22
|
-
timeSinceLastRequestMs: timeSinceLastRequest
|
|
23
|
-
};
|
|
24
|
-
|
|
25
|
-
logger.info(`HTTP: ${JSON.stringify(logData)}`);
|
|
36
|
+
logger.info(`Response[${requestId}]: ${JSON.stringify({ statusCode: res.statusCode, duration: duration + 'ms'})}`);
|
|
37
|
+
logger.debug(`Response[${requestId}]: ${JSON.stringify(resBody)}`);
|
|
26
38
|
});
|
|
27
39
|
|
|
28
40
|
next();
|
|
@@ -17,7 +17,7 @@ class KVBaseMongo {
|
|
|
17
17
|
throw new Error('KVBASE_COLLECTION (the name of the Mongodb collection used as key-value store) is mandatory.');
|
|
18
18
|
}
|
|
19
19
|
this.KV_COLLECTION = KVBASE_COLLECTION;
|
|
20
|
-
|
|
20
|
+
// logger.debug("KV_COLLECTION: " + this.KV_COLLECTION)
|
|
21
21
|
}
|
|
22
22
|
|
|
23
23
|
async connect(MONGODB_URI) {
|
|
@@ -27,7 +27,7 @@ class KVBaseMongo {
|
|
|
27
27
|
await this.db.collection(this.KV_COLLECTION).createIndex(
|
|
28
28
|
{ "key": 1 }, { unique: true }
|
|
29
29
|
);
|
|
30
|
-
|
|
30
|
+
//// logger.debug("[mongodb] db: ", this.db);
|
|
31
31
|
} catch (err) {
|
|
32
32
|
winston.error(err);
|
|
33
33
|
process.exit(1);
|
|
@@ -50,14 +50,14 @@ class KVBaseMongo {
|
|
|
50
50
|
}
|
|
51
51
|
|
|
52
52
|
async get(k) {
|
|
53
|
-
|
|
53
|
+
// logger.debug("Searching on Collection " + this.KV_COLLECTION + ' for key: '+ k);
|
|
54
54
|
try {
|
|
55
55
|
const doc = await this.db.collection(this.KV_COLLECTION).findOne({ key: k });
|
|
56
56
|
if (doc) {
|
|
57
|
-
|
|
57
|
+
// logger.debug("Doc found with key: " + doc.key);
|
|
58
58
|
return doc.value;
|
|
59
59
|
} else {
|
|
60
|
-
|
|
60
|
+
// logger.debug("No Doc found!");
|
|
61
61
|
return null;
|
|
62
62
|
}
|
|
63
63
|
} catch (err) {
|
package/src/routes/manageApp.js
CHANGED
|
@@ -47,7 +47,7 @@ module.exports = (services) => {
|
|
|
47
47
|
});
|
|
48
48
|
|
|
49
49
|
router.get('/configure', async (req, res) => {
|
|
50
|
-
|
|
50
|
+
// logger.debug("(voice) /configure :params", req.query);
|
|
51
51
|
|
|
52
52
|
let project_id = req.query.project_id;
|
|
53
53
|
let token = req.query.token;
|
|
@@ -64,7 +64,7 @@ module.exports = (services) => {
|
|
|
64
64
|
try {
|
|
65
65
|
let CONTENT_KEY = CHANNEL_NAME + "-" + project_id;
|
|
66
66
|
let settings = await db.get(CONTENT_KEY);
|
|
67
|
-
|
|
67
|
+
// logger.debug("(voice) settings: ", settings);
|
|
68
68
|
|
|
69
69
|
const tdChannel = new TiledeskChannel({
|
|
70
70
|
API_URL: API_URL,
|
|
@@ -126,7 +126,7 @@ module.exports = (services) => {
|
|
|
126
126
|
})
|
|
127
127
|
|
|
128
128
|
router.post('/update', async (req, res) => {
|
|
129
|
-
|
|
129
|
+
// logger.debug("(voice) /update", req.body);
|
|
130
130
|
|
|
131
131
|
let project_id = req.body.project_id;
|
|
132
132
|
let token = req.body.token;
|
|
@@ -208,7 +208,7 @@ module.exports = (services) => {
|
|
|
208
208
|
})
|
|
209
209
|
|
|
210
210
|
router.post('/disconnect', async (req, res) => {
|
|
211
|
-
|
|
211
|
+
// logger.debug("(voice) /disconnect")
|
|
212
212
|
|
|
213
213
|
let project_id = req.body.project_id;
|
|
214
214
|
let token = req.body.token;
|
|
@@ -220,7 +220,7 @@ module.exports = (services) => {
|
|
|
220
220
|
|
|
221
221
|
let CONTENT_KEY = CHANNEL_NAME + "-" + project_id;
|
|
222
222
|
await db.remove(CONTENT_KEY);
|
|
223
|
-
|
|
223
|
+
// logger.debug("(voice) Content deleted.");
|
|
224
224
|
|
|
225
225
|
let proxy_url = BASE_URL + "/webhook/" + project_id;
|
|
226
226
|
let status_url = BASE_URL + "/twilio/status";
|
|
@@ -270,7 +270,7 @@ async function subscribe(token, project_id, API_URL, BASE_URL, db){
|
|
|
270
270
|
|
|
271
271
|
try {
|
|
272
272
|
const subscription = await tdClient.subscribe(subscription_info);
|
|
273
|
-
|
|
273
|
+
// logger.debug("(voice) Subscription: ", subscription)
|
|
274
274
|
|
|
275
275
|
let settings = {
|
|
276
276
|
project_id: project_id,
|
|
@@ -290,7 +290,7 @@ async function unsubscribe(token, project_id, subscriptionId, API_URL){
|
|
|
290
290
|
const tdClient = new TiledeskSubscriptionClient({ API_URL: API_URL, project_id: project_id, token: token })
|
|
291
291
|
try {
|
|
292
292
|
const data = await tdClient.unsubscribe(subscriptionId);
|
|
293
|
-
|
|
293
|
+
// logger.debug("(voice) Subscription: ", data)
|
|
294
294
|
return data;
|
|
295
295
|
} catch (error) {
|
|
296
296
|
throw error;
|
|
@@ -34,7 +34,7 @@ class AiService {
|
|
|
34
34
|
|
|
35
35
|
async speechToText(fileUrl, model, GPT_KEY) {
|
|
36
36
|
let start_time = new Date();
|
|
37
|
-
|
|
37
|
+
// logger.debug("[AiService] speechToText url: "+ fileUrl);
|
|
38
38
|
|
|
39
39
|
try {
|
|
40
40
|
let file;
|
|
@@ -46,7 +46,7 @@ class AiService {
|
|
|
46
46
|
}
|
|
47
47
|
|
|
48
48
|
if (!file) {
|
|
49
|
-
|
|
49
|
+
// logger.debug('[AiService] OPENAI speechToText file NOT EXIST: . . . return');
|
|
50
50
|
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
51
51
|
}
|
|
52
52
|
|
|
@@ -77,7 +77,7 @@ class AiService {
|
|
|
77
77
|
|
|
78
78
|
async textToSpeech(text, name, model, GPT_KEY){
|
|
79
79
|
let start_time = new Date();
|
|
80
|
-
|
|
80
|
+
// logger.debug('[AiService] textToSpeech text:'+ text)
|
|
81
81
|
|
|
82
82
|
const data = {
|
|
83
83
|
model: model,
|
|
@@ -85,7 +85,7 @@ class AiService {
|
|
|
85
85
|
voice: name,
|
|
86
86
|
};
|
|
87
87
|
|
|
88
|
-
|
|
88
|
+
// logger.debug('[AiService] textToSpeech config:', data)
|
|
89
89
|
|
|
90
90
|
try {
|
|
91
91
|
const response = await axios({
|
|
@@ -112,7 +112,7 @@ class AiService {
|
|
|
112
112
|
|
|
113
113
|
async speechToTextElevenLabs(fileUrl, model, language, API_KEY) {
|
|
114
114
|
let start_time = new Date();
|
|
115
|
-
|
|
115
|
+
// logger.debug("[AiService] ELEVEN Labs speechToText url: "+ fileUrl);
|
|
116
116
|
|
|
117
117
|
try {
|
|
118
118
|
let file;
|
|
@@ -124,7 +124,7 @@ class AiService {
|
|
|
124
124
|
}
|
|
125
125
|
|
|
126
126
|
if (!file) {
|
|
127
|
-
|
|
127
|
+
// logger.debug('[AiService] ELEVEN Labs speechToText file NOT EXIST: . . . return');
|
|
128
128
|
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
129
129
|
}
|
|
130
130
|
|
|
@@ -161,7 +161,7 @@ class AiService {
|
|
|
161
161
|
text: text,
|
|
162
162
|
language_code: language_code
|
|
163
163
|
};
|
|
164
|
-
|
|
164
|
+
// logger.debug('[AiService] ELEVEN Labs textToSpeech config:', data);
|
|
165
165
|
|
|
166
166
|
try {
|
|
167
167
|
const response = await axios({
|
|
@@ -188,7 +188,7 @@ class AiService {
|
|
|
188
188
|
|
|
189
189
|
async checkQuoteAvailability(projectId, token) {
|
|
190
190
|
|
|
191
|
-
|
|
191
|
+
// logger.debug("[AiService] checkQuoteAvailability for project: "+ projectId);
|
|
192
192
|
|
|
193
193
|
try {
|
|
194
194
|
const resbody = await axios({
|
|
@@ -76,7 +76,7 @@ class AiService {
|
|
|
76
76
|
|
|
77
77
|
async speechToText(fileUrl, model, GPT_KEY) {
|
|
78
78
|
let start_time = new Date();
|
|
79
|
-
|
|
79
|
+
// logger.debug("[AiService] speechToText url: "+ fileUrl);
|
|
80
80
|
|
|
81
81
|
try {
|
|
82
82
|
let file;
|
|
@@ -88,7 +88,7 @@ class AiService {
|
|
|
88
88
|
}
|
|
89
89
|
|
|
90
90
|
if (!file) {
|
|
91
|
-
|
|
91
|
+
// logger.debug('[AiService] OPENAI speechToText file NOT EXIST: . . . return');
|
|
92
92
|
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
93
93
|
}
|
|
94
94
|
|
|
@@ -119,7 +119,7 @@ class AiService {
|
|
|
119
119
|
|
|
120
120
|
async textToSpeech(text, name, model, GPT_KEY){
|
|
121
121
|
let start_time = new Date();
|
|
122
|
-
|
|
122
|
+
// logger.debug('[AiService] textToSpeech text:'+ text)
|
|
123
123
|
|
|
124
124
|
const data = {
|
|
125
125
|
model: model,
|
|
@@ -127,7 +127,7 @@ class AiService {
|
|
|
127
127
|
voice: name,
|
|
128
128
|
};
|
|
129
129
|
|
|
130
|
-
|
|
130
|
+
// logger.debug('[AiService] textToSpeech config:', data)
|
|
131
131
|
|
|
132
132
|
try {
|
|
133
133
|
const response = await axios({
|
|
@@ -156,11 +156,11 @@ class AiService {
|
|
|
156
156
|
*/
|
|
157
157
|
async speechToTextElevenLabs(fileUrl, model, language, API_KEY) {
|
|
158
158
|
let start_time = new Date();
|
|
159
|
-
|
|
159
|
+
// logger.debug("[AiService.sdk] ELEVEN Labs speechToText url: "+ fileUrl);
|
|
160
160
|
|
|
161
161
|
// If SDK is not available or disabled, fall back to HTTP
|
|
162
162
|
if (!this.useSDK || !this.sdkAvailable || !ElevenLabsClient) {
|
|
163
|
-
|
|
163
|
+
// logger.debug("[AiService.sdk] SDK not available or disabled, using HTTP");
|
|
164
164
|
return this._speechToTextElevenLabsHTTP(fileUrl, model, language, API_KEY);
|
|
165
165
|
}
|
|
166
166
|
|
|
@@ -174,7 +174,7 @@ class AiService {
|
|
|
174
174
|
}
|
|
175
175
|
|
|
176
176
|
if (!file) {
|
|
177
|
-
|
|
177
|
+
// logger.debug('[AiService.sdk] ELEVEN Labs speechToText file NOT EXIST: . . . return');
|
|
178
178
|
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
179
179
|
}
|
|
180
180
|
|
|
@@ -209,7 +209,7 @@ class AiService {
|
|
|
209
209
|
*/
|
|
210
210
|
async _speechToTextElevenLabsHTTP(fileUrl, model, language, API_KEY) {
|
|
211
211
|
let start_time = new Date();
|
|
212
|
-
|
|
212
|
+
// logger.debug("[AiService.sdk] ELEVEN Labs speechToText (HTTP fallback) url: "+ fileUrl);
|
|
213
213
|
|
|
214
214
|
try {
|
|
215
215
|
let file;
|
|
@@ -221,7 +221,7 @@ class AiService {
|
|
|
221
221
|
}
|
|
222
222
|
|
|
223
223
|
if (!file) {
|
|
224
|
-
|
|
224
|
+
// logger.debug('[AiService.sdk] ELEVEN Labs speechToText file NOT EXIST: . . . return');
|
|
225
225
|
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
226
226
|
}
|
|
227
227
|
|
|
@@ -257,11 +257,11 @@ class AiService {
|
|
|
257
257
|
*/
|
|
258
258
|
async textToSpeechElevenLabs(text, voice_id, model, language_code, API_KEY){
|
|
259
259
|
let start_time = new Date();
|
|
260
|
-
|
|
260
|
+
// logger.debug('[AiService.sdk] ELEVEN Labs textToSpeech config:', { text, voice_id, model, language_code });
|
|
261
261
|
|
|
262
262
|
// If SDK is not available or disabled, fall back to HTTP
|
|
263
263
|
if (!this.useSDK || !this.sdkAvailable || !ElevenLabsClient) {
|
|
264
|
-
|
|
264
|
+
// logger.debug("[AiService.sdk] SDK not available or disabled, using HTTP");
|
|
265
265
|
return this._textToSpeechElevenLabsHTTP(text, voice_id, model, language_code, API_KEY);
|
|
266
266
|
}
|
|
267
267
|
|
|
@@ -309,7 +309,7 @@ class AiService {
|
|
|
309
309
|
*/
|
|
310
310
|
async _textToSpeechElevenLabsHTTP(text, voice_id, model, language_code, API_KEY){
|
|
311
311
|
let start_time = new Date();
|
|
312
|
-
|
|
312
|
+
// logger.debug('[AiService.sdk] ELEVEN Labs textToSpeech (HTTP fallback) config:', { text, voice_id, model, language_code });
|
|
313
313
|
|
|
314
314
|
const data = {
|
|
315
315
|
model_id: model || "eleven_multilingual_v2",
|
|
@@ -339,7 +339,7 @@ class AiService {
|
|
|
339
339
|
}
|
|
340
340
|
|
|
341
341
|
async checkQuoteAvailability(projectId, token) {
|
|
342
|
-
|
|
342
|
+
// logger.debug("[AiService] checkQuoteAvailability for project: "+ projectId);
|
|
343
343
|
|
|
344
344
|
try {
|
|
345
345
|
const resbody = await axios({
|
|
@@ -18,7 +18,7 @@ class IntegrationService {
|
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
async getKeyFromIntegrations(id_project, integration_name, token) {
|
|
21
|
-
|
|
21
|
+
// logger.debug('[IntegrationService] getKeyFromIntegrations id_project:' + id_project + ' ' + integration_name)
|
|
22
22
|
|
|
23
23
|
try {
|
|
24
24
|
const response = await axios({
|
|
@@ -43,7 +43,7 @@ class IntegrationService {
|
|
|
43
43
|
}
|
|
44
44
|
|
|
45
45
|
async getKeyFromKbSettings(id_project, token) {
|
|
46
|
-
|
|
46
|
+
// logger.debug('[IntegrationService] getKeyFromKbSettings id_project:', id_project)
|
|
47
47
|
|
|
48
48
|
try {
|
|
49
49
|
const response = await axios({
|
|
@@ -34,11 +34,14 @@ class MessageService {
|
|
|
34
34
|
|
|
35
35
|
if (queue && queue.length > 0) {
|
|
36
36
|
return await this._processQueueMessage(callSid, conversationId, queue[0]);
|
|
37
|
+
}else{
|
|
38
|
+
return null;
|
|
37
39
|
}
|
|
38
40
|
|
|
39
41
|
// 2. Queue empty: subscribe with timeout
|
|
40
|
-
logger.debug('[MessageService] Queue empty, starting subscription...');
|
|
41
|
-
|
|
42
|
+
// logger.debug('[MessageService] Queue empty, starting subscription...');
|
|
43
|
+
|
|
44
|
+
// return await this._subscribeWithTimeout(callSid, conversationId, ani);
|
|
42
45
|
|
|
43
46
|
} catch (error) {
|
|
44
47
|
logger.error('[MessageService] getNextMessage error:', error);
|
|
@@ -52,8 +55,7 @@ class MessageService {
|
|
|
52
55
|
async _processQueueMessage(callSid, conversationId, message) {
|
|
53
56
|
logger.verbose(`[MessageService] Processing queue message: ${message.text}`);
|
|
54
57
|
|
|
55
|
-
await this.
|
|
56
|
-
await this.voiceChannel.clearDelayTimeForCallId(callSid);
|
|
58
|
+
await this.cleanupMessages(conversationId, message, callSid);
|
|
57
59
|
|
|
58
60
|
return message;
|
|
59
61
|
}
|
|
@@ -90,13 +92,17 @@ class MessageService {
|
|
|
90
92
|
const message = queue[0];
|
|
91
93
|
logger.verbose(`[MessageService] Message received from subscription: ${message.text}`);
|
|
92
94
|
|
|
93
|
-
await this.
|
|
94
|
-
await this.voiceChannel.clearDelayTimeForCallId(callSid);
|
|
95
|
+
await this.cleanupMessages(conversationId, message, callSid);
|
|
95
96
|
|
|
96
97
|
onSuccess();
|
|
97
98
|
return message;
|
|
98
99
|
}
|
|
99
100
|
|
|
101
|
+
async cleanupMessages(conversationId, message, callSid){
|
|
102
|
+
await this.tdChannel.removeMessageFromQueue(conversationId, message?._id ?? null);
|
|
103
|
+
await this.voiceChannel.clearDelayTimeForCallId(callSid);
|
|
104
|
+
}
|
|
105
|
+
|
|
100
106
|
/**
|
|
101
107
|
* Create promise that resolves with wait message after timeout.
|
|
102
108
|
*/
|
|
@@ -50,12 +50,12 @@ class SessionService {
|
|
|
50
50
|
let key = await this.integrationService.getKeyFromIntegrations(projectId, 'openai', token);
|
|
51
51
|
|
|
52
52
|
if (!key) {
|
|
53
|
-
logger.debug('[SessionService] OpenAI key not in Integrations, checking KB settings...');
|
|
53
|
+
// logger.debug('[SessionService] OpenAI key not in Integrations, checking KB settings...');
|
|
54
54
|
key = await this.integrationService.getKeyFromKbSettings(projectId, token);
|
|
55
55
|
}
|
|
56
56
|
|
|
57
57
|
if (!key) {
|
|
58
|
-
logger.debug('[SessionService] Using public GPT key');
|
|
58
|
+
// logger.debug('[SessionService] Using public GPT key');
|
|
59
59
|
return { type: 'openai', key: this.config.GPT_KEY, publicKey: true };
|
|
60
60
|
}
|
|
61
61
|
|
|
@@ -73,7 +73,7 @@ class SessionService {
|
|
|
73
73
|
try {
|
|
74
74
|
const key = await this.integrationService.getKeyFromIntegrations(projectId, 'elevenlabs', token);
|
|
75
75
|
if (key) {
|
|
76
|
-
logger.debug('[SessionService] ElevenLabs key found');
|
|
76
|
+
// logger.debug('[SessionService] ElevenLabs key found');
|
|
77
77
|
return { type: 'elevenlabs', key, publicKey: false };
|
|
78
78
|
}
|
|
79
79
|
return null;
|
|
@@ -100,7 +100,7 @@ class SessionService {
|
|
|
100
100
|
};
|
|
101
101
|
|
|
102
102
|
await this.voiceChannel.setSessionForCallId(callSid, sessionData);
|
|
103
|
-
logger.debug(`[SessionService] Session created for callSid: ${callSid}`);
|
|
103
|
+
// logger.debug(`[SessionService] Session created for callSid: ${callSid}`);
|
|
104
104
|
|
|
105
105
|
return sessionData;
|
|
106
106
|
}
|
|
@@ -136,7 +136,7 @@ class SessionService {
|
|
|
136
136
|
async cleanupSession(callSid, conversationId) {
|
|
137
137
|
await this.voiceChannel.deleteCallKeys(callSid);
|
|
138
138
|
await this.tdChannel.clearQueue(conversationId);
|
|
139
|
-
logger.debug(`[SessionService] Session cleaned up for callSid: ${callSid}`);
|
|
139
|
+
// logger.debug(`[SessionService] Session cleaned up for callSid: ${callSid}`);
|
|
140
140
|
}
|
|
141
141
|
}
|
|
142
142
|
|
|
@@ -25,13 +25,13 @@ class SpeechService {
|
|
|
25
25
|
*/
|
|
26
26
|
async transcribeAudio(audioFileUrl, callSid, sessionInfo, settings) {
|
|
27
27
|
const attributes = await this.voiceChannel.getSettingsForCallId(callSid);
|
|
28
|
-
logger.debug(`[SpeechService] Transcribing audio: ${audioFileUrl}, provider: ${attributes.VOICE_PROVIDER}`);
|
|
28
|
+
// logger.debug(`[SpeechService] Transcribing audio: ${audioFileUrl}, provider: ${attributes.VOICE_PROVIDER}`);
|
|
29
29
|
|
|
30
30
|
try {
|
|
31
31
|
const text = await this._performSTT(audioFileUrl, attributes, sessionInfo, settings);
|
|
32
32
|
|
|
33
33
|
if (!text) {
|
|
34
|
-
logger.debug('[SpeechService] STT returned empty text');
|
|
34
|
+
// logger.debug('[SpeechService] STT returned empty text');
|
|
35
35
|
return null;
|
|
36
36
|
}
|
|
37
37
|
|
|
@@ -77,7 +77,7 @@ class SpeechService {
|
|
|
77
77
|
// Check quota for public keys
|
|
78
78
|
if (isPublicKey) {
|
|
79
79
|
const hasQuota = await this.aiService.checkQuoteAvailability(sessionInfo.project_id, settings.token);
|
|
80
|
-
logger.debug(`[SpeechService] Quota check: ${hasQuota}`);
|
|
80
|
+
// logger.debug(`[SpeechService] Quota check: ${hasQuota}`);
|
|
81
81
|
|
|
82
82
|
if (!hasQuota) {
|
|
83
83
|
const error = new Error('No quota available');
|