@tiledesk/tiledesk-voice-twilio-connector 0.1.28 → 0.2.0-rc3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +179 -0
- package/README.md +44 -0
- package/index.js +7 -1562
- package/package.json +23 -22
- package/src/app.js +146 -0
- package/src/config/index.js +32 -0
- package/src/controllers/VoiceController.js +488 -0
- package/src/controllers/VoiceController.original.js +811 -0
- package/src/middlewares/httpLogger.js +31 -0
- package/src/models/KeyValueStore.js +78 -0
- package/src/routes/manageApp.js +298 -0
- package/src/routes/voice.js +22 -0
- package/src/services/AiService.js +219 -0
- package/src/services/AiService.sdk.js +367 -0
- package/src/services/IntegrationService.js +74 -0
- package/src/services/MessageService.js +133 -0
- package/src/services/README_SDK.md +107 -0
- package/src/services/SessionService.js +143 -0
- package/src/services/SpeechService.js +134 -0
- package/src/services/TiledeskMessageBuilder.js +135 -0
- package/src/services/TwilioService.js +122 -0
- package/src/services/UploadService.js +78 -0
- package/src/services/channels/TiledeskChannel.js +269 -0
- package/{tiledesk → src/services/channels}/VoiceChannel.js +17 -56
- package/src/services/clients/TiledeskSubscriptionClient.js +78 -0
- package/src/services/index.js +45 -0
- package/src/services/translators/TiledeskTwilioTranslator.js +509 -0
- package/{tiledesk/TiledeskTwilioTranslator.js → src/services/translators/TiledeskTwilioTranslator.original.js} +119 -212
- package/src/utils/fileUtils.js +24 -0
- package/src/utils/logger.js +32 -0
- package/{tiledesk → src/utils}/utils-message.js +6 -21
- package/logs/app.log +0 -3082
- package/routes/manageApp.js +0 -419
- package/tiledesk/KVBaseMongo.js +0 -101
- package/tiledesk/TiledeskChannel.js +0 -363
- package/tiledesk/TiledeskSubscriptionClient.js +0 -135
- package/tiledesk/fileUtils.js +0 -55
- package/tiledesk/services/AiService.js +0 -230
- package/tiledesk/services/IntegrationService.js +0 -81
- package/tiledesk/services/UploadService.js +0 -88
- /package/{winston.js → src/config/logger.js} +0 -0
- /package/{tiledesk → src}/services/voiceEventEmitter.js +0 -0
- /package/{template → src/template}/configure.html +0 -0
- /package/{template → src/template}/css/configure.css +0 -0
- /package/{template → src/template}/css/error.css +0 -0
- /package/{template → src/template}/css/style.css +0 -0
- /package/{template → src/template}/error.html +0 -0
- /package/{tiledesk → src/utils}/constants.js +0 -0
- /package/{tiledesk → src/utils}/errors.js +0 -0
- /package/{tiledesk → src/utils}/utils.js +0 -0
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AiService.sdk.js - Alternative implementation using ElevenLabs SDK
|
|
3
|
+
*
|
|
4
|
+
* This is a copy of AiService.js that uses the official ElevenLabs SDK
|
|
5
|
+
* instead of direct HTTP calls via axios.
|
|
6
|
+
*
|
|
7
|
+
* To use this version:
|
|
8
|
+
* 1. Install the ElevenLabs SDK: npm install elevenlabs
|
|
9
|
+
* 2. Replace the import in your code to use this file instead
|
|
10
|
+
*
|
|
11
|
+
* Note: This file maintains the same interface as AiService.js
|
|
12
|
+
* for backward compatibility.
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
var winston = require('../utils/logger');
|
|
16
|
+
const axios = require("axios").default;
|
|
17
|
+
const FormData = require('form-data');
|
|
18
|
+
|
|
19
|
+
/*ERROR HANDLER*/
|
|
20
|
+
const { ServiceError } = require('../utils/errors');
|
|
21
|
+
|
|
22
|
+
/*UTILS*/
|
|
23
|
+
const fileUtils = require('../utils/fileUtils.js');
|
|
24
|
+
|
|
25
|
+
// Try to import ElevenLabs SDK (will fail gracefully if not installed)
|
|
26
|
+
let ElevenLabsClient;
|
|
27
|
+
let ElevenLabs;
|
|
28
|
+
try {
|
|
29
|
+
const elevenlabsModule = require('elevenlabs');
|
|
30
|
+
// The SDK exports ElevenLabsClient as a named export
|
|
31
|
+
ElevenLabsClient = elevenlabsModule.ElevenLabsClient || elevenlabsModule.default?.ElevenLabsClient;
|
|
32
|
+
ElevenLabs = elevenlabsModule;
|
|
33
|
+
} catch (err) {
|
|
34
|
+
winston.warn('[AiService.sdk] ElevenLabs SDK not found. Install with: npm install elevenlabs');
|
|
35
|
+
ElevenLabsClient = null;
|
|
36
|
+
ElevenLabs = null;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
class AiService {
|
|
40
|
+
|
|
41
|
+
constructor(config) {
|
|
42
|
+
|
|
43
|
+
if (!config) {
|
|
44
|
+
throw new Error("[AiService] config is mandatory");
|
|
45
|
+
}
|
|
46
|
+
if (!config.OPENAI_ENDPOINT) {
|
|
47
|
+
throw new Error("[AiService] config.OPENAI_ENDPOINT is mandatory");
|
|
48
|
+
}
|
|
49
|
+
if(!config.ELEVENLABS_ENDPOINT){
|
|
50
|
+
throw new Error("[AiService] config.ELEVENLABS_ENDPOINT is mandatory");
|
|
51
|
+
}
|
|
52
|
+
if (!config.API_URL) {
|
|
53
|
+
throw new Error("[AiService] config.API_URL is mandatory");
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
this.OPENAI_ENDPOINT = config.OPENAI_ENDPOINT;
|
|
57
|
+
this.ELEVENLABS_ENDPOINT = config.ELEVENLABS_ENDPOINT;
|
|
58
|
+
this.API_URL = config.API_URL;
|
|
59
|
+
this.useSDK = config.useSDK !== false; // Default to true for SDK version
|
|
60
|
+
|
|
61
|
+
// Check if SDK is available (we'll create client instances per-request with API keys)
|
|
62
|
+
this.sdkAvailable = false;
|
|
63
|
+
|
|
64
|
+
if (this.useSDK && ElevenLabsClient) {
|
|
65
|
+
this.sdkAvailable = true;
|
|
66
|
+
winston.info('[AiService.sdk] ElevenLabs SDK available - will use SDK for ElevenLabs operations');
|
|
67
|
+
} else if (this.useSDK && !ElevenLabsClient) {
|
|
68
|
+
winston.warn('[AiService.sdk] ElevenLabs SDK not installed. Install with: npm install elevenlabs');
|
|
69
|
+
winston.warn('[AiService.sdk] Will use HTTP fallback for ElevenLabs operations');
|
|
70
|
+
this.sdkAvailable = false;
|
|
71
|
+
} else {
|
|
72
|
+
winston.info('[AiService.sdk] SDK usage disabled, will use HTTP for ElevenLabs operations');
|
|
73
|
+
this.sdkAvailable = false;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async speechToText(fileUrl, model, GPT_KEY) {
|
|
78
|
+
let start_time = new Date();
|
|
79
|
+
winston.debug("[AiService] speechToText url: "+ fileUrl);
|
|
80
|
+
|
|
81
|
+
try {
|
|
82
|
+
let file;
|
|
83
|
+
try {
|
|
84
|
+
file = await fileUtils.downloadFromUrl(fileUrl);
|
|
85
|
+
} catch (err) {
|
|
86
|
+
winston.error("[AiService] err while downloadFromUrl: ", err);
|
|
87
|
+
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file:', fileUrl);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (!file) {
|
|
91
|
+
winston.debug('[AiService] OPENAI speechToText file NOT EXIST: . . . return');
|
|
92
|
+
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const formData = new FormData();
|
|
96
|
+
formData.append('file', file, { filename: 'audiofile.wav', contentType: 'audio/wav' });
|
|
97
|
+
formData.append('model', model);
|
|
98
|
+
|
|
99
|
+
const resbody = await axios({
|
|
100
|
+
url: `${this.OPENAI_ENDPOINT}/audio/transcriptions`,
|
|
101
|
+
headers: {
|
|
102
|
+
...formData.getHeaders(),
|
|
103
|
+
"Authorization": "Bearer " + GPT_KEY
|
|
104
|
+
},
|
|
105
|
+
data: formData,
|
|
106
|
+
method: 'POST'
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
let end_time = new Date();
|
|
110
|
+
winston.verbose(`-----> [AiService] OpenAI speechToText time elapsed: ${end_time - start_time} ms`);
|
|
111
|
+
return resbody.data.text;
|
|
112
|
+
|
|
113
|
+
} catch (err) {
|
|
114
|
+
if (err instanceof ServiceError) throw err;
|
|
115
|
+
winston.error("[AiService] OpenAI STT error", err.message);
|
|
116
|
+
throw new ServiceError('AISERVICE_FAILED', 'OpenAI STT service failed with err:', err);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async textToSpeech(text, name, model, GPT_KEY){
|
|
121
|
+
let start_time = new Date();
|
|
122
|
+
winston.debug('[AiService] textToSpeech text:'+ text)
|
|
123
|
+
|
|
124
|
+
const data = {
|
|
125
|
+
model: model,
|
|
126
|
+
input: text,
|
|
127
|
+
voice: name,
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
winston.debug('[AiService] textToSpeech config:', data)
|
|
131
|
+
|
|
132
|
+
try {
|
|
133
|
+
const response = await axios({
|
|
134
|
+
url: `${this.OPENAI_ENDPOINT}/audio/speech`,
|
|
135
|
+
headers: {
|
|
136
|
+
"Content-Type": "application/json",
|
|
137
|
+
"Authorization": "Bearer " + GPT_KEY
|
|
138
|
+
},
|
|
139
|
+
responseType: 'arraybuffer',
|
|
140
|
+
data: data,
|
|
141
|
+
method: "POST",
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
let end_time = new Date();
|
|
145
|
+
winston.verbose(`-----> [AiService] textToSpeech time elapsed: ${end_time - start_time} ms`);
|
|
146
|
+
return response?.data;
|
|
147
|
+
} catch (err) {
|
|
148
|
+
winston.error("[AiService] textToSpeech error: ", err.response?.data);
|
|
149
|
+
throw new ServiceError('AISERVICE_FAILED', 'OpenAI textToSpeech API failed with err:', err);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Speech-to-Text using ElevenLabs SDK
|
|
155
|
+
* Falls back to HTTP if SDK is not available or disabled
|
|
156
|
+
*/
|
|
157
|
+
async speechToTextElevenLabs(fileUrl, model, language, API_KEY) {
|
|
158
|
+
let start_time = new Date();
|
|
159
|
+
winston.debug("[AiService.sdk] ELEVEN Labs speechToText url: "+ fileUrl);
|
|
160
|
+
|
|
161
|
+
// If SDK is not available or disabled, fall back to HTTP
|
|
162
|
+
if (!this.useSDK || !this.sdkAvailable || !ElevenLabsClient) {
|
|
163
|
+
winston.debug("[AiService.sdk] SDK not available or disabled, using HTTP");
|
|
164
|
+
return this._speechToTextElevenLabsHTTP(fileUrl, model, language, API_KEY);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
try {
|
|
168
|
+
let file;
|
|
169
|
+
try {
|
|
170
|
+
file = await fileUtils.downloadFromUrl(fileUrl);
|
|
171
|
+
} catch (err) {
|
|
172
|
+
winston.error("[AiService.sdk] err: ", err);
|
|
173
|
+
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file:', fileUrl);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
if (!file) {
|
|
177
|
+
winston.debug('[AiService.sdk] ELEVEN Labs speechToText file NOT EXIST: . . . return');
|
|
178
|
+
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Use ElevenLabs SDK for STT
|
|
182
|
+
// Create a temporary client instance with the API key for this request
|
|
183
|
+
const client = new ElevenLabsClient({
|
|
184
|
+
apiKey: API_KEY
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
// Use the convert method from the client
|
|
188
|
+
const transcription = await client.convert({
|
|
189
|
+
audio: file,
|
|
190
|
+
model_id: model || "scribe_v1",
|
|
191
|
+
language_code: language || "en"
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
let end_time = new Date();
|
|
195
|
+
winston.verbose(`-----> [AiService.sdk] ELEVEN Labs speechToText (SDK) time elapsed: ${end_time - start_time} ms`);
|
|
196
|
+
|
|
197
|
+
// SDK typically returns text directly or in a response object
|
|
198
|
+
return transcription.text || transcription;
|
|
199
|
+
|
|
200
|
+
} catch (err) {
|
|
201
|
+
winston.error("[AiService.sdk] ElevenLabs STT SDK error, falling back to HTTP:", err.message);
|
|
202
|
+
// Fallback to HTTP on SDK error
|
|
203
|
+
return this._speechToTextElevenLabsHTTP(fileUrl, model, language, API_KEY);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* HTTP fallback for STT (original implementation)
|
|
209
|
+
*/
|
|
210
|
+
async _speechToTextElevenLabsHTTP(fileUrl, model, language, API_KEY) {
|
|
211
|
+
let start_time = new Date();
|
|
212
|
+
winston.debug("[AiService.sdk] ELEVEN Labs speechToText (HTTP fallback) url: "+ fileUrl);
|
|
213
|
+
|
|
214
|
+
try {
|
|
215
|
+
let file;
|
|
216
|
+
try {
|
|
217
|
+
file = await fileUtils.downloadFromUrl(fileUrl);
|
|
218
|
+
} catch (err) {
|
|
219
|
+
winston.error("[AiService.sdk] err: ", err);
|
|
220
|
+
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file:', fileUrl);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (!file) {
|
|
224
|
+
winston.debug('[AiService.sdk] ELEVEN Labs speechToText file NOT EXIST: . . . return');
|
|
225
|
+
throw new ServiceError('AISERVICE_FAILED', 'Cannot download audio file: file is null');
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const formData = new FormData();
|
|
229
|
+
formData.append('file', file, { filename: 'audiofile.wav', contentType: 'audio/wav' });
|
|
230
|
+
formData.append('model_id', model || "scribe_v1");
|
|
231
|
+
formData.append('language_code', language || "en");
|
|
232
|
+
|
|
233
|
+
const resbody = await axios({
|
|
234
|
+
url: `${this.ELEVENLABS_ENDPOINT}/v1/speech-to-text`,
|
|
235
|
+
headers: {
|
|
236
|
+
...formData.getHeaders(),
|
|
237
|
+
"xi-api-key": API_KEY
|
|
238
|
+
},
|
|
239
|
+
data: formData,
|
|
240
|
+
method: 'POST'
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
let end_time = new Date();
|
|
244
|
+
winston.verbose(`-----> [AiService.sdk] ELEVEN Labs speechToText (HTTP) time elapsed: ${end_time - start_time} ms`);
|
|
245
|
+
return resbody.data.text;
|
|
246
|
+
|
|
247
|
+
} catch (err) {
|
|
248
|
+
if (err instanceof ServiceError) throw err;
|
|
249
|
+
winston.error("[AiService.sdk] ElevenLabs STT HTTP error", err.message);
|
|
250
|
+
throw new ServiceError('AISERVICE_FAILED', 'ElevenLabs STT service failed with err:', err);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
/**
|
|
255
|
+
* Text-to-Speech using ElevenLabs SDK
|
|
256
|
+
* Falls back to HTTP if SDK is not available or disabled
|
|
257
|
+
*/
|
|
258
|
+
async textToSpeechElevenLabs(text, voice_id, model, language_code, API_KEY){
|
|
259
|
+
let start_time = new Date();
|
|
260
|
+
winston.debug('[AiService.sdk] ELEVEN Labs textToSpeech config:', { text, voice_id, model, language_code });
|
|
261
|
+
|
|
262
|
+
// If SDK is not available or disabled, fall back to HTTP
|
|
263
|
+
if (!this.useSDK || !this.sdkAvailable || !ElevenLabsClient) {
|
|
264
|
+
winston.debug("[AiService.sdk] SDK not available or disabled, using HTTP");
|
|
265
|
+
return this._textToSpeechElevenLabsHTTP(text, voice_id, model, language_code, API_KEY);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
try {
|
|
269
|
+
// Use ElevenLabs SDK for TTS
|
|
270
|
+
// Create a temporary client instance with the API key for this request
|
|
271
|
+
const client = new ElevenLabsClient({
|
|
272
|
+
apiKey: API_KEY
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
// Use the textToSpeech method from the client
|
|
276
|
+
const audio = await client.textToSpeech.convert(voice_id, {
|
|
277
|
+
text: text,
|
|
278
|
+
model_id: model || "eleven_multilingual_v2",
|
|
279
|
+
language_code: language_code || "en",
|
|
280
|
+
output_format: "mp3_44100_128" // Match original format
|
|
281
|
+
});
|
|
282
|
+
|
|
283
|
+
let end_time = new Date();
|
|
284
|
+
winston.verbose(`-----> [AiService.sdk] ELEVEN Labs textToSpeech (SDK) time elapsed: ${end_time - start_time} ms`);
|
|
285
|
+
|
|
286
|
+
// SDK typically returns audio buffer directly
|
|
287
|
+
// Convert to Buffer if needed
|
|
288
|
+
if (Buffer.isBuffer(audio)) {
|
|
289
|
+
return audio;
|
|
290
|
+
} else if (audio instanceof ArrayBuffer) {
|
|
291
|
+
return Buffer.from(audio);
|
|
292
|
+
} else if (typeof audio === 'string') {
|
|
293
|
+
// If base64, decode it
|
|
294
|
+
return Buffer.from(audio, 'base64');
|
|
295
|
+
} else {
|
|
296
|
+
// Try to get audio data from response object
|
|
297
|
+
return audio.data || audio.audio || audio;
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
} catch (err) {
|
|
301
|
+
winston.error("[AiService.sdk] ElevenLabs TTS SDK error, falling back to HTTP:", err.message);
|
|
302
|
+
// Fallback to HTTP on SDK error
|
|
303
|
+
return this._textToSpeechElevenLabsHTTP(text, voice_id, model, language_code, API_KEY);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
/**
|
|
308
|
+
* HTTP fallback for TTS (original implementation)
|
|
309
|
+
*/
|
|
310
|
+
async _textToSpeechElevenLabsHTTP(text, voice_id, model, language_code, API_KEY){
|
|
311
|
+
let start_time = new Date();
|
|
312
|
+
winston.debug('[AiService.sdk] ELEVEN Labs textToSpeech (HTTP fallback) config:', { text, voice_id, model, language_code });
|
|
313
|
+
|
|
314
|
+
const data = {
|
|
315
|
+
model_id: model || "eleven_multilingual_v2",
|
|
316
|
+
text: text,
|
|
317
|
+
language_code: language_code || "en"
|
|
318
|
+
};
|
|
319
|
+
|
|
320
|
+
try {
|
|
321
|
+
const response = await axios({
|
|
322
|
+
url: `${this.ELEVENLABS_ENDPOINT}/v1/text-to-speech/${voice_id}?output_format=mp3_44100_128`,
|
|
323
|
+
headers: {
|
|
324
|
+
"Content-Type": "application/json",
|
|
325
|
+
"xi-api-key": API_KEY
|
|
326
|
+
},
|
|
327
|
+
responseType: 'arraybuffer',
|
|
328
|
+
data: data,
|
|
329
|
+
method: "POST",
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
let end_time = new Date();
|
|
333
|
+
winston.verbose(`-----> [AiService.sdk] ELEVEN Labs textToSpeech (HTTP) time elapsed: ${end_time - start_time} ms`);
|
|
334
|
+
return response?.data;
|
|
335
|
+
} catch (err) {
|
|
336
|
+
winston.error("[AiService.sdk] ELEVEN Labs textToSpeech HTTP error: ", err);
|
|
337
|
+
throw new ServiceError('AISERVICE_FAILED', 'ElevenLabs textToSpeech API failed with err:', err);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
async checkQuoteAvailability(projectId, token) {
|
|
342
|
+
winston.debug("[AiService] checkQuoteAvailability for project: "+ projectId);
|
|
343
|
+
|
|
344
|
+
try {
|
|
345
|
+
const resbody = await axios({
|
|
346
|
+
url: `${this.API_URL}/${projectId}/quotes/tokens`,
|
|
347
|
+
headers: {
|
|
348
|
+
'Content-Type': 'application/json',
|
|
349
|
+
'Authorization': token
|
|
350
|
+
},
|
|
351
|
+
method: 'GET'
|
|
352
|
+
});
|
|
353
|
+
|
|
354
|
+
if (resbody && resbody.data?.isAvailable === true) {
|
|
355
|
+
return true;
|
|
356
|
+
} else {
|
|
357
|
+
return false;
|
|
358
|
+
}
|
|
359
|
+
} catch (err) {
|
|
360
|
+
winston.error("[AiService] checkQuoteAvailability error: ", err.response?.data);
|
|
361
|
+
throw new ServiceError('AISERVICE_FAILED', 'checkQuoteAvailability API failed with err:', err);
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
module.exports = { AiService };
|
|
367
|
+
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
const winston = require('../utils/logger');
|
|
2
|
+
const axios = require("axios").default;
|
|
3
|
+
|
|
4
|
+
class IntegrationService {
|
|
5
|
+
|
|
6
|
+
constructor(config) {
|
|
7
|
+
|
|
8
|
+
if (!config) {
|
|
9
|
+
throw new Error("[IntegrationService] config is mandatory");
|
|
10
|
+
}
|
|
11
|
+
if (!config.API_URL) {
|
|
12
|
+
throw new Error("[IntegrationService] config.API_URL is mandatory");
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
this.API_URL = config.API_URL;
|
|
16
|
+
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async getKeyFromIntegrations(id_project, integration_name, token) {
|
|
21
|
+
winston.debug('[IntegrationService] getKeyFromIntegrations id_project:' + id_project + ' ' + integration_name)
|
|
22
|
+
|
|
23
|
+
try {
|
|
24
|
+
const response = await axios({
|
|
25
|
+
url: this.API_URL + "/" + id_project + "/integration/name/" + integration_name,
|
|
26
|
+
headers: {
|
|
27
|
+
'Content-Type': 'application/json',
|
|
28
|
+
'Authorization': token
|
|
29
|
+
},
|
|
30
|
+
data: {},
|
|
31
|
+
method: "GET",
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
if (!response.data || !response.data.value) {
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return response.data.value.apikey;
|
|
39
|
+
} catch (err) {
|
|
40
|
+
winston.error("[IntegrationService] getKeyFromIntegrations error: ", err.response?.data || err.message);
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async getKeyFromKbSettings(id_project, token) {
|
|
46
|
+
winston.debug('[IntegrationService] getKeyFromKbSettings id_project:', id_project)
|
|
47
|
+
|
|
48
|
+
try {
|
|
49
|
+
const response = await axios({
|
|
50
|
+
url: this.API_URL + "/" + id_project + "/kbsettings",
|
|
51
|
+
headers: {
|
|
52
|
+
'Content-Type': 'application/json',
|
|
53
|
+
'Authorization': token
|
|
54
|
+
},
|
|
55
|
+
data: {},
|
|
56
|
+
method: "GET",
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
if (!response.data || !response.data.gptkey) {
|
|
60
|
+
return null;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return response.data.gptkey;
|
|
64
|
+
} catch (err) {
|
|
65
|
+
winston.error("[IntegrationService] getKeyFromKbSettings error: ", err.response?.data || err.message);
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
module.exports = { IntegrationService };
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
const logger = require('../utils/logger');
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Service responsible for message queue operations and polling logic.
|
|
5
|
+
* Handles retrieving messages from queue with subscription fallback.
|
|
6
|
+
*/
|
|
7
|
+
class MessageService {
|
|
8
|
+
constructor({ voiceChannel, tdChannel, config }) {
|
|
9
|
+
if (!voiceChannel) throw new Error('[MessageService] voiceChannel is required');
|
|
10
|
+
if (!tdChannel) throw new Error('[MessageService] tdChannel is required');
|
|
11
|
+
if (!config) throw new Error('[MessageService] config is required');
|
|
12
|
+
|
|
13
|
+
this.voiceChannel = voiceChannel;
|
|
14
|
+
this.tdChannel = tdChannel;
|
|
15
|
+
this.maxPollingTime = config.MAX_POLLING_TIME || 5;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Get next message from queue with subscription fallback.
|
|
20
|
+
* If queue is empty, subscribes to topic and waits for message or timeout.
|
|
21
|
+
*
|
|
22
|
+
* @param {string} callSid - The call session ID
|
|
23
|
+
* @param {string} conversationId - The conversation ID
|
|
24
|
+
* @param {string} ani - Caller phone number (for wait message)
|
|
25
|
+
* @returns {Promise<Object>} The message object
|
|
26
|
+
*/
|
|
27
|
+
async getNextMessage(callSid, conversationId, ani) {
|
|
28
|
+
const startTime = Date.now();
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
// 1. First attempt: read from queue
|
|
32
|
+
const queue = await this.tdChannel.getMessagesFromQueue(conversationId);
|
|
33
|
+
logger.debug(`[MessageService] Queue length: ${queue.length}`);
|
|
34
|
+
|
|
35
|
+
if (queue && queue.length > 0) {
|
|
36
|
+
return await this._processQueueMessage(callSid, conversationId, queue[0]);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// 2. Queue empty: subscribe with timeout
|
|
40
|
+
logger.debug('[MessageService] Queue empty, starting subscription...');
|
|
41
|
+
return await this._subscribeWithTimeout(callSid, conversationId, ani);
|
|
42
|
+
|
|
43
|
+
} catch (error) {
|
|
44
|
+
logger.error('[MessageService] getNextMessage error:', error);
|
|
45
|
+
throw error;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Process a message from the queue.
|
|
51
|
+
*/
|
|
52
|
+
async _processQueueMessage(callSid, conversationId, message) {
|
|
53
|
+
logger.verbose(`[MessageService] Processing queue message: ${message.text}`);
|
|
54
|
+
|
|
55
|
+
await this.tdChannel.removeMessageFromQueue(conversationId, message._id);
|
|
56
|
+
await this.voiceChannel.clearDelayTimeForCallId(callSid);
|
|
57
|
+
|
|
58
|
+
return message;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Subscribe to topic with timeout fallback.
|
|
63
|
+
* Returns message if received, or wait message if timeout.
|
|
64
|
+
*/
|
|
65
|
+
async _subscribeWithTimeout(callSid, conversationId, ani) {
|
|
66
|
+
let timeoutId;
|
|
67
|
+
|
|
68
|
+
const subscriptionPromise = this._createSubscriptionPromise(callSid, conversationId, () => {
|
|
69
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
const timeoutPromise = this._createTimeoutPromise(callSid, ani, (id) => {
|
|
73
|
+
timeoutId = id;
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
return Promise.race([subscriptionPromise, timeoutPromise]);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Create promise that resolves when message received via subscription.
|
|
81
|
+
*/
|
|
82
|
+
async _createSubscriptionPromise(callSid, conversationId, onSuccess) {
|
|
83
|
+
await this.tdChannel.subscribeToTopic(conversationId);
|
|
84
|
+
const queue = await this.tdChannel.getMessagesFromQueue(conversationId);
|
|
85
|
+
|
|
86
|
+
if (!queue || queue.length === 0) {
|
|
87
|
+
throw new Error('[MessageService] No message received after subscription');
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
const message = queue[0];
|
|
91
|
+
logger.verbose(`[MessageService] Message received from subscription: ${message.text}`);
|
|
92
|
+
|
|
93
|
+
await this.tdChannel.removeMessageFromQueue(conversationId, message._id);
|
|
94
|
+
await this.voiceChannel.clearDelayTimeForCallId(callSid);
|
|
95
|
+
|
|
96
|
+
onSuccess();
|
|
97
|
+
return message;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Create promise that resolves with wait message after timeout.
|
|
102
|
+
*/
|
|
103
|
+
_createTimeoutPromise(callSid, ani, setTimeoutId) {
|
|
104
|
+
return new Promise((resolve) => {
|
|
105
|
+
const timeoutId = setTimeout(async () => {
|
|
106
|
+
logger.debug('[MessageService] Subscription timeout, generating wait message...');
|
|
107
|
+
|
|
108
|
+
const delayTime = await this.voiceChannel.getNextDelayTimeForCallId(callSid);
|
|
109
|
+
const waitMessage = await this.tdChannel.generateWaitTdMessage(ani, delayTime);
|
|
110
|
+
await this.voiceChannel.saveDelayIndexForCallId(callSid);
|
|
111
|
+
|
|
112
|
+
resolve(waitMessage);
|
|
113
|
+
}, this.maxPollingTime * 1000);
|
|
114
|
+
|
|
115
|
+
setTimeoutId(timeoutId);
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Generate a wait message with appropriate delay.
|
|
121
|
+
* @param {string} callSid - The call session ID
|
|
122
|
+
* @param {string} ani - Caller identifier
|
|
123
|
+
* @returns {Promise<Object>} Wait message object
|
|
124
|
+
*/
|
|
125
|
+
async generateWaitMessage(callSid, ani) {
|
|
126
|
+
const delayTime = await this.voiceChannel.getNextDelayTimeForCallId(callSid);
|
|
127
|
+
const waitMessage = await this.tdChannel.generateWaitTdMessage(ani, delayTime);
|
|
128
|
+
await this.voiceChannel.saveDelayIndexForCallId(callSid);
|
|
129
|
+
return waitMessage;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
module.exports = { MessageService };
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
# ElevenLabs SDK Implementation
|
|
2
|
+
|
|
3
|
+
Questo documento descrive come usare l'implementazione dinamica che può utilizzare l'SDK ufficiale di ElevenLabs o le chiamate HTTP dirette, configurata tramite variabile d'ambiente.
|
|
4
|
+
|
|
5
|
+
## Installazione
|
|
6
|
+
|
|
7
|
+
Per usare l'SDK, installa il pacchetto ufficiale di ElevenLabs:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install elevenlabs
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
**Nota**: Se l'SDK non è installato, il sistema userà automaticamente le chiamate HTTP (fallback).
|
|
14
|
+
|
|
15
|
+
## Configurazione
|
|
16
|
+
|
|
17
|
+
Il sistema ora supporta l'uso dinamico dell'SDK tramite variabile d'ambiente. Non è più necessario modificare il codice!
|
|
18
|
+
|
|
19
|
+
### Abilitare l'SDK
|
|
20
|
+
|
|
21
|
+
Imposta la variabile d'ambiente `USE_ELEVENLABS_SDK`:
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
# Nel file .env
|
|
25
|
+
USE_ELEVENLABS_SDK=true
|
|
26
|
+
|
|
27
|
+
# Oppure quando avvii l'applicazione
|
|
28
|
+
USE_ELEVENLABS_SDK=true npm start
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
### Disabilitare l'SDK (usare HTTP)
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
# Nel file .env
|
|
35
|
+
USE_ELEVENLABS_SDK=false
|
|
36
|
+
|
|
37
|
+
# Oppure ometti la variabile (default: HTTP)
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Come Funziona
|
|
41
|
+
|
|
42
|
+
Il sistema ora carica dinamicamente l'implementazione corretta:
|
|
43
|
+
|
|
44
|
+
1. **Con `USE_ELEVENLABS_SDK=true`**:
|
|
45
|
+
- Usa `AiService.sdk.js` che tenta di utilizzare l'SDK
|
|
46
|
+
- Se l'SDK non è installato o fallisce, fa automaticamente fallback a HTTP
|
|
47
|
+
|
|
48
|
+
2. **Con `USE_ELEVENLABS_SDK=false` o non impostato**:
|
|
49
|
+
- Usa `AiService.js` con chiamate HTTP dirette (comportamento originale)
|
|
50
|
+
|
|
51
|
+
Non è necessario modificare alcun file del codice!
|
|
52
|
+
|
|
53
|
+
## Vantaggi dell'SDK
|
|
54
|
+
|
|
55
|
+
1. **Gestione errori migliorata**: L'SDK gestisce automaticamente retry e error handling
|
|
56
|
+
2. **Type safety**: Se usi TypeScript, l'SDK fornisce tipi
|
|
57
|
+
3. **Aggiornamenti automatici**: L'SDK viene aggiornato con nuove funzionalità
|
|
58
|
+
4. **Performance**: Possibili ottimizzazioni interne nell'SDK
|
|
59
|
+
|
|
60
|
+
## Fallback automatico
|
|
61
|
+
|
|
62
|
+
L'implementazione `AiService.sdk.js` include un fallback automatico:
|
|
63
|
+
- Se l'SDK non è installato, usa le chiamate HTTP (comportamento originale)
|
|
64
|
+
- Se l'SDK fallisce, ritorna automaticamente alle chiamate HTTP
|
|
65
|
+
- Nessuna modifica al codice esistente è necessaria
|
|
66
|
+
|
|
67
|
+
## Note sull'API dell'SDK
|
|
68
|
+
|
|
69
|
+
L'SDK di ElevenLabs per Node.js potrebbe avere un'API leggermente diversa da quella documentata qui.
|
|
70
|
+
Verifica la documentazione ufficiale: https://elevenlabs.io/docs/api-reference
|
|
71
|
+
|
|
72
|
+
Se l'API dell'SDK è diversa, modifica i metodi in `AiService.sdk.js`:
|
|
73
|
+
- `speechToTextElevenLabs()` - per STT
|
|
74
|
+
- `textToSpeechElevenLabs()` - per TTS
|
|
75
|
+
|
|
76
|
+
## Testing
|
|
77
|
+
|
|
78
|
+
Per testare l'implementazione SDK:
|
|
79
|
+
|
|
80
|
+
1. Installa l'SDK: `npm install elevenlabs`
|
|
81
|
+
2. Imposta `USE_ELEVENLABS_SDK=true` nel file `.env` o come variabile d'ambiente
|
|
82
|
+
3. Esegui l'applicazione: `npm start`
|
|
83
|
+
4. Verifica i log:
|
|
84
|
+
- Cerca `"Using ElevenLabs SDK implementation"` per confermare l'uso dell'SDK
|
|
85
|
+
- Cerca `"[AiService.sdk] ELEVEN Labs ... (SDK)"` per vedere le chiamate SDK
|
|
86
|
+
- Cerca `"[AiService.sdk] ... (HTTP)"` per vedere il fallback HTTP
|
|
87
|
+
|
|
88
|
+
### Test con HTTP (comportamento originale)
|
|
89
|
+
|
|
90
|
+
1. Imposta `USE_ELEVENLABS_SDK=false` o ometti la variabile
|
|
91
|
+
2. Esegui l'applicazione
|
|
92
|
+
3. Verifica i log: `"Using ElevenLabs HTTP implementation"`
|
|
93
|
+
|
|
94
|
+
## Troubleshooting
|
|
95
|
+
|
|
96
|
+
### SDK non trovato
|
|
97
|
+
Se vedi il warning `ElevenLabs SDK not found`, installa il pacchetto:
|
|
98
|
+
```bash
|
|
99
|
+
npm install elevenlabs
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
### Errori nell'API dell'SDK
|
|
103
|
+
Se l'SDK ha un'API diversa, modifica i metodi in `AiService.sdk.js` per corrispondere alla documentazione ufficiale dell'SDK.
|
|
104
|
+
|
|
105
|
+
### Fallback a HTTP
|
|
106
|
+
Se l'SDK fallisce, il sistema userà automaticamente le chiamate HTTP. Controlla i log per vedere quale metodo viene usato.
|
|
107
|
+
|