@tiledesk/tiledesk-voice-twilio-connector 0.1.22 → 0.1.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -58,6 +58,7 @@ let API_URL = null;
58
58
  let BASE_URL = null;
59
59
  let BASE_FILE_URL = null;
60
60
  let OPENAI_ENDPOINT = null;
61
+ let ELEVENLABS_ENDPOINT = null;
61
62
  let REDIS_HOST = null;
62
63
  let REDIS_PORT = null;
63
64
  let REDIS_PASSWORD = null;
@@ -164,7 +165,7 @@ router.post('/webhook/:id_project', async (req, res) => {
164
165
  winston.debug("(voice) conversation returned:"+ conversation_id);
165
166
 
166
167
 
167
- //GET AND SAVE GPT-KET IF
168
+ //GET AND SAVE GPT-KET IF
168
169
  let integrations = [], publicKey = false;
169
170
  let key = await integrationService.getKeyFromIntegrations(project_id, 'openai', settings.token)
170
171
  if (!key) {
@@ -179,6 +180,12 @@ router.post('/webhook/:id_project', async (req, res) => {
179
180
  }
180
181
  integrations.push({type: 'openai', key: key, publicKey: publicKey})
181
182
 
183
+ let eleven_labs = await integrationService.getKeyFromIntegrations(project_id, 'elevenlabs', settings.token)
184
+ if (eleven_labs) {
185
+ winston.debug("(voice) - Key found in Integrations: "+ eleven_labs);
186
+ integrations.push({type: 'elevenlabs', key: eleven_labs, publicKey: false})
187
+ }
188
+
182
189
  //save data to redis
183
190
  let session_data = {
184
191
  from: from,
@@ -207,7 +214,7 @@ router.post('/webhook/:id_project', async (req, res) => {
207
214
  attributes: {
208
215
  subtype: 'info',
209
216
  payload: {
210
- ... req.query //send all attributes back to chatbot
217
+ ... req.body //send all attributes back to chatbot
211
218
  }
212
219
  },
213
220
  channel: { name: CHANNEL_NAME },
@@ -363,6 +370,8 @@ async function getMessage(callSid, from, project_id, conversation_id){
363
370
  await tdChannel.removeMessageFromQueue(conversation_id, message._id)
364
371
  //reset delayIndex for wait command message time
365
372
  await voiceChannel.clearDelayTimeForCallId(callSid)
373
+ //manage attributes for current callId
374
+ await voiceChannel.saveSettingsForCallId(message.attributes, callSid)
366
375
 
367
376
  resolve(message);
368
377
  return;
@@ -918,23 +927,16 @@ router.post('/record/:callSid/',async (req, res) => {
918
927
  return res.status(404).send({error: "VOICE Channel not already connected"})
919
928
  }
920
929
 
921
- //SPEECH TO TEXT
922
- let key = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPEN_AI))?.key
923
- let publicKey = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPEN_AI))?.publicKey
924
- //check quotes if user is using public GPT_KEY
925
- if(publicKey){
926
- let keep_going = await aiService.checkQuoteAvailability(project_id, user.token);
927
- if(!keep_going){
928
- //no toke is available
929
- }
930
- }
931
930
 
932
- let textMessage = await aiService.speechToText(audioFileUrl, vxmlAttributes.STT_MODEL, key).catch((err)=>{
933
- console.log('errr while transcript', err.response?.data)
934
- })
935
- console.log('(voice) Message captured after STT -->', textMessage)
931
+ let attributes = await voiceChannel.getSettingsForCallId(callSid);
932
+ console.log('attributessss', attributes)
936
933
 
937
- if(!textMessage){
934
+ //SPEECH TO TEXT
935
+ console.log('getting text message . . . ', audioFileUrl, attributes.STT_MODEL)
936
+ let tiledeskMessage = await generateSTT(audioFileUrl, attributes, sessionInfo, settings)
937
+ console.log('(voice) Message captured after STT -->', tiledeskMessage)
938
+
939
+ if(!tiledeskMessage){
938
940
  //case NO_INPUT
939
941
  const queryString = utils.buildQueryString(req.query);
940
942
  winston.debug('case no input.. redirect '+ queryString)
@@ -953,13 +955,7 @@ router.post('/record/:callSid/',async (req, res) => {
953
955
  })
954
956
  }
955
957
 
956
-
957
- let tiledeskMessage= {
958
- text:textMessage,
959
- senderFullname: from,
960
- type: 'text',
961
- channel: { name: CHANNEL_NAME }
962
- };
958
+
963
959
  let tdMessage = await tdChannel.send(tiledeskMessage, user.token, conversation_id);
964
960
  winston.debug("message sent : ", tdMessage);
965
961
 
@@ -979,6 +975,77 @@ router.post('/record/:callSid/',async (req, res) => {
979
975
 
980
976
  })
981
977
 
978
+ async function generateSTT(audioFileUrl, attributes, sessionInfo, settings){
979
+
980
+ winston.debug("(voice) generateSTT: "+ attributes.VOICE_PROVIDER);
981
+
982
+ let tiledeskMessage = {}, text = null;
983
+ switch(attributes.VOICE_PROVIDER){
984
+ case VOICE_PROVIDER.OPENAI:
985
+ let GPT_KEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.key
986
+ let publicKey = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.publicKey
987
+ if(publicKey){
988
+ let keep_going = await aiService.checkQuoteAvailability(sessionInfo.project_id, settings.token).catch((err)=>{
989
+ winston.error('errr while checkQuoteAvailability for project:', sessionInfo.project_id, err.response?.data)
990
+ })
991
+ winston.verbose('(voice) checkQuoteAvailability return: '+ keep_going);
992
+ if(!keep_going){
993
+ //no token is available --> close conversation
994
+ return tiledeskMessage= {
995
+ //text:'\\close',
996
+ text:'/close',
997
+ senderFullname: sessionInfo.from,
998
+ type: 'text',
999
+ channel: { name: CHANNEL_NAME },
1000
+ attributes: {
1001
+ subtype: "info",
1002
+ action: 'close'+JSON.stringify({event: 'quota_exceeded'}),
1003
+ payload: {
1004
+ catchEvent: 'quota_exceeded'
1005
+ },
1006
+ timestamp: 'xxxxxx'
1007
+ }
1008
+ };
1009
+
1010
+ }
1011
+ }
1012
+
1013
+ text = await aiService.speechToText(audioFileUrl, attributes.STT_MODEL, GPT_KEY).catch((err)=>{
1014
+ winston.error('errr while transcript', err.response?.data)
1015
+ })
1016
+ tiledeskMessage= {
1017
+ text: text,
1018
+ senderFullname: sessionInfo.from,
1019
+ type: 'text',
1020
+ channel: { name: CHANNEL_NAME }
1021
+ };
1022
+ break;
1023
+ case VOICE_PROVIDER.ELEVENLABS:
1024
+ let ELEVENLABS_APIKEY = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.ELEVENLABS))?.key
1025
+ // La condizione negli input del metodo è corretta, ma può essere scritta in modo più leggibile:
1026
+ const ttsLanguage = attributes.TTS_LANGUAGE || 'en';
1027
+ text = await this.aiService.speechToTextElevenLabs(
1028
+ audioFileUrl,
1029
+ attributes.STT_MODEL,
1030
+ ttsLanguage,
1031
+ ELEVENLABS_APIKEY
1032
+ ).catch((err) => {
1033
+ winston.error('errr while creating elevenlabs audio message', err?.response?.data);
1034
+ });
1035
+ tiledeskMessage= {
1036
+ text: text,
1037
+ senderFullname: sessionInfo.from,
1038
+ type: 'text',
1039
+ channel: { name: CHANNEL_NAME }
1040
+ };
1041
+ break;
1042
+
1043
+ }
1044
+
1045
+ return tiledeskMessage
1046
+ }
1047
+
1048
+
982
1049
 
983
1050
  router.get('/addon/transcript', async (req, res) => {
984
1051
  winston.verbose("(vxml) called GET /transcript query-->" , req.query);
@@ -1225,6 +1292,9 @@ async function startApp(settings, callback) {
1225
1292
  if(settings.OPENAI_ENDPOINT){
1226
1293
  OPENAI_ENDPOINT = settings.OPENAI_ENDPOINT
1227
1294
  }
1295
+ if(settings.ELEVENLABS_ENDPOINT){
1296
+ ELEVENLABS_ENDPOINT = settings.ELEVENLABS_ENDPOINT
1297
+ }
1228
1298
 
1229
1299
  if(settings.MAX_POLLING_TIME){
1230
1300
  MAX_POLLING_TIME = settings.MAX_POLLING_TIME;
@@ -1249,6 +1319,7 @@ async function startApp(settings, callback) {
1249
1319
  //init Services
1250
1320
  aiService = new AiService({
1251
1321
  OPENAI_ENDPOINT: OPENAI_ENDPOINT,
1322
+ ELEVENLABS_ENDPOINT: ELEVENLABS_ENDPOINT,
1252
1323
  API_URL: API_URL
1253
1324
  })
1254
1325
  integrationService = new IntegrationService({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tiledesk/tiledesk-voice-twilio-connector",
3
- "version": "0.1.22",
3
+ "version": "0.1.23",
4
4
  "description": "Tiledesk VOICE Twilio connector",
5
5
  "license": "MIT",
6
6
  "author": "Gabriele Panico",
@@ -102,7 +102,7 @@ class TiledeskTwilioTranslator {
102
102
 
103
103
 
104
104
  this.user = sessionInfo.user
105
- this.voiceSettings = sessionInfo.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))
105
+ this.integrations = sessionInfo.integrations
106
106
 
107
107
 
108
108
  const xml = xmlbuilder.create("Response", {});
@@ -310,7 +310,7 @@ class TiledeskTwilioTranslator {
310
310
  gather.att("action", this.BASE_URL + '/speechresult/' + xmlAttributes.callSid + queryUrl)
311
311
  .att("method", "POST")
312
312
  .att("language", xmlAttributes.TTS_VOICE_LANGUAGE)
313
- .att('speechTimeout', "0")
313
+ .att('speechTimeout', "auto")
314
314
 
315
315
  //if(xmlAttributes && xmlAttributes.noInputTimeout){
316
316
  // gather.att("timeout", xmlAttributes.noInputTimeout/1000 ).up();
@@ -534,7 +534,7 @@ class TiledeskTwilioTranslator {
534
534
  if (command.type === "message") {
535
535
  //case type: TEXT
536
536
  if(command.message.type === 'text'){
537
- if(that.voiceProvider === VOICE_PROVIDER.OPENAI){
537
+ if(that.voiceProvider !== VOICE_PROVIDER.TWILIO){
538
538
  let voiceMessageUrl = await that.generateTTS(command.message.text, attributes)
539
539
  rootEle.ele('Play', {}, voiceMessageUrl )
540
540
  }else{
@@ -622,11 +622,24 @@ class TiledeskTwilioTranslator {
622
622
  }
623
623
 
624
624
  async generateTTS(text, attributes){
625
- let GPT_KEY = this.voiceSettings?.key
626
625
 
627
- let audioData = await this.aiService.textToSpeech(text, attributes.TTS_VOICE_NAME, attributes.TTS_MODEL, GPT_KEY).catch((err)=>{
628
- console.log('errr while creating audio message', err.response?.data)
629
- })
626
+ let audioData = null;
627
+ switch(this.voiceProvider){
628
+ case VOICE_PROVIDER.OPENAI:
629
+ let GPT_KEY = this.integrations.find((el => el.type === VOICE_PROVIDER.OPENAI))?.key
630
+ audioData = await this.aiService.textToSpeech(text, attributes.TTS_VOICE_NAME, attributes.TTS_MODEL, GPT_KEY).catch((err)=>{
631
+ console.log('errr while creating audio message', err.response?.data)
632
+ })
633
+ break;
634
+ case VOICE_PROVIDER.ELEVENLABS:
635
+ let ELEVENLABS_APIKEY = this.integrations.find((el => el.type === VOICE_PROVIDER.ELEVENLABS))?.key
636
+ audioData = await this.aiService.textToSpeechElevenLabs(text, attributes.TTS_VOICE_NAME, attributes.TTS_MODEL, ELEVENLABS_APIKEY).catch((err)=>{
637
+ console.log('errr while creating elevenlabs audio message', err.response?.data)
638
+ })
639
+ break;
640
+
641
+ }
642
+
630
643
  let fileUrl = await this.uploadService.upload(attributes.callSid, audioData, this.user).catch((err)=>{
631
644
  console.log('errr while uploading audioData', err.response)
632
645
  })
@@ -10,7 +10,8 @@ const TYPE_MESSAGE = require('./constants').TYPE_MESSAGE
10
10
  const MESSAGE_TYPE_MINE = require('./constants').MESSAGE_TYPE_MINE
11
11
  const MESSAGE_TYPE_OTHERS = require('./constants').MESSAGE_TYPE_OTHERS
12
12
  const CHANNEL_NAME = require('./constants').CHANNEL_NAME
13
-
13
+ const VOICE_PROVIDER = require('./constants').VOICE_PROVIDER;
14
+ const OPENAI_SETTINGS = require('./constants').OPENAI_SETTINGS;
14
15
 
15
16
  const winston = require("../winston");
16
17
 
@@ -89,6 +90,56 @@ class VoiceChannel {
89
90
  //if index is not present: set to default (0)
90
91
  await this.redis_client.set('tiledesk:vxml:'+callId + ':delayIndex', 0, {'EX': 86400});
91
92
  }
93
+
94
+
95
+ async saveSettingsForCallId(attributes, callId){
96
+
97
+ winston.debug('saveSettingsForCallId: attributes -->', attributes)
98
+ let flowAttributes = {}
99
+ if(attributes && attributes.flowAttributes){
100
+
101
+ flowAttributes = attributes.flowAttributes;
102
+
103
+ //MANAGE VOICE SETTINGS from globals attributes
104
+ let voiceProvider = VOICE_PROVIDER.TWILIO
105
+ if(flowAttributes.VOICE_PROVIDER){
106
+ voiceProvider = flowAttributes.VOICE_PROVIDER
107
+
108
+ }
109
+
110
+ // IF VOICE_PROVIDER is TWILIO --> default values is on user account twilio settings
111
+ // IF VOICE_PROVIDER is OPENAI --> set default values from constants
112
+ if(voiceProvider === VOICE_PROVIDER.OPENAI){
113
+ flowAttributes.TTS_VOICE_NAME = flowAttributes.TTS_VOICE_NAME? flowAttributes.TTS_VOICE_NAME : OPENAI_SETTINGS.TTS_VOICE_NAME;
114
+ flowAttributes.TTS_MODEL = flowAttributes.TTS_MODEL? flowAttributes.TTS_MODEL : OPENAI_SETTINGS.TTS_MODEL;
115
+ flowAttributes.STT_MODEL = flowAttributes.STT_MODEL? flowAttributes.STT_MODEL : OPENAI_SETTINGS.STT_MODEL;
116
+ }
117
+
118
+
119
+ }
120
+
121
+ const index = await this.redis_client.get('tiledesk:vxml:'+callId + ':attributes');
122
+ winston.debug('saveSettingsForCallId: attributes found -->'+index)
123
+ if(index){
124
+ //set index to default (0)
125
+ await this.redis_client.set('tiledesk:vxml:'+callId + ':attributes', JSON.stringify(flowAttributes), {'EX': 86400});
126
+ return;
127
+ }
128
+ //if index is not present: set to default (0)
129
+ await this.redis_client.set('tiledesk:vxml:'+callId + ':attributes', JSON.stringify(flowAttributes), {'EX': 86400});
130
+
131
+ }
132
+
133
+
134
+ async getSettingsForCallId(callId){
135
+ const attributes = await this.redis_client.get('tiledesk:vxml:'+callId + ':attributes');
136
+ if(attributes){
137
+ return JSON.parse(attributes)
138
+ }
139
+ return {};
140
+ }
141
+
142
+
92
143
 
93
144
 
94
145
 
@@ -38,8 +38,8 @@ module.exports = {
38
38
  SPEECH_FORM: 'speech_form',
39
39
 
40
40
  },
41
- BASE_POOLING_DELAY: 250,
42
- MAX_POLLING_TIME: 30000,
41
+ BASE_POOLING_DELAY: 500,
42
+ MAX_POLLING_TIME: 50000,
43
43
  VOICE_NAME: 'Polly.Danielle',
44
44
  VOICE_LANGUAGE: 'en-US',
45
45
  CALL_STATUS: {
@@ -51,7 +51,8 @@ module.exports = {
51
51
  },
52
52
  VOICE_PROVIDER: {
53
53
  OPENAI: 'openai',
54
- TWILIO: 'twilio'
54
+ TWILIO: 'twilio',
55
+ ELEVENLABS: 'elevenlabs'
55
56
  },
56
57
  OPENAI_SETTINGS:{
57
58
  TTS_VOICE_NAME: 'alloy',
@@ -39,8 +39,9 @@ class FileUtils {
39
39
  responseType: 'arraybuffer',
40
40
  method: 'GET'
41
41
  }).then((resbody) => {
42
- console.log('okkkkkkk')
43
- resolve(resbody.data);
42
+ const buffer = Buffer.from(resbody.data, 'binary');
43
+ resolve(buffer);
44
+ //resolve(resbody.data);
44
45
  }).catch((err) => {
45
46
  reject(err);
46
47
  })
@@ -15,11 +15,16 @@ class AiService {
15
15
  if (!config.OPENAI_ENDPOINT) {
16
16
  throw new Error("[AiService] config.OPENAI_ENDPOINT is mandatory");
17
17
  }
18
+ if(!config.ELEVENLABS_ENDPOINT){
19
+ throw new Error("[AiService] config.ELEVENLABS_ENDPOINT is mandatory");
20
+ }
18
21
  if (!config.API_URL) {
19
22
  throw new Error("[AiService] config.API_URL is mandatory");
20
23
  }
24
+
21
25
 
22
26
  this.OPENAI_ENDPOINT = config.OPENAI_ENDPOINT;
27
+ this.ELEVENLABS_ENDPOINT = config.ELEVENLABS_ENDPOINT;
23
28
  this.API_URL = config.API_URL;
24
29
 
25
30
  }
@@ -28,15 +33,20 @@ class AiService {
28
33
 
29
34
  winston.debug("[AiService] speechToText url: "+ fileUrl);
30
35
  let file = await fileUtils.downloadFromUrl(fileUrl).catch((err) => {
31
- winston.error("[AiService] err: ", err)
36
+ winston.error("[AiService] err while downloadFromUrl: ", err)
32
37
  return null; // fallback per evitare undefined
33
38
  })
34
39
 
40
+ if (!file) {
41
+ winston.error('file non esisteeeeeeee')
42
+ return;
43
+ }
35
44
 
36
45
  return new Promise((resolve, reject) => {
37
-
46
+
47
+
38
48
  const formData = new FormData();
39
- formData.append('file', file, { filename: 'audiofile', contentType: 'audio/mpeg' });
49
+ formData.append('file', file, { filename: 'audiofile.wav', contentType: 'audio/wav' });
40
50
  formData.append('model', model);
41
51
 
42
52
  axios({
@@ -91,10 +101,87 @@ class AiService {
91
101
 
92
102
  }
93
103
 
104
+
105
+
106
+ async speechToTextElevenLabs(fileUrl, model, language, API_KEY) {
107
+
108
+ winston.debug("[AiService] ELEVEN Labs speechToText url: "+ fileUrl);
109
+ let file = await fileUtils.downloadFromUrl(fileUrl).catch((err) => {
110
+ winston.error("[AiService] err: ", err)
111
+ return null; // fallback per evitare undefined
112
+ })
113
+
114
+ if (!file) {
115
+ winston.debug('[AiService] ELEVEN Labs speechToText file NOT EXIST: . . . return')
116
+ return;
117
+ }
118
+
119
+ return new Promise((resolve, reject) => {
120
+
121
+
122
+ const formData = new FormData();
123
+ formData.append('file', file, { filename: 'audiofile.wav', contentType: 'audio/wav' });
124
+ formData.append('model_id', "scribe_v1");
125
+ formData.append('language_code', language)
126
+
127
+ axios({
128
+ url: this.ELEVENLABS_ENDPOINT + "/v1/speech-to-text",
129
+ headers: {
130
+ ...formData.getHeaders(),
131
+ "xi-api-key": API_KEY
132
+ },
133
+ data: formData,
134
+ method: 'POST'
135
+ }).then((resbody) => {
136
+ console.log('dataaaaaa', resbody)
137
+ resolve(resbody.data.text);
138
+ }).catch((err) => {
139
+ console.log('errrrrrr', err?.response)
140
+ reject(err);
141
+ })
142
+
143
+ })
144
+ }
145
+
146
+ async textToSpeechElevenLabs(text, voice_id, model, API_KEY){
147
+
148
+
149
+ const data = {
150
+ model_id: model,
151
+ text: text,
152
+ output_format: "mp3_44100_128",
153
+ };
154
+
155
+
156
+ winston.debug('[AiService] ELEVEN Labs textToSpeech config:', data)
157
+
158
+ return new Promise((resolve, reject) => {
159
+ axios({
160
+ url: this.ELEVENLABS_ENDPOINT + "/v1/text-to-speech/"+ voice_id,
161
+ headers: {
162
+ "Content-Type": "application/json",
163
+ "xi-api-key": API_KEY
164
+ },
165
+ responseType: 'arraybuffer',
166
+ data: data,
167
+ method: "POST",
168
+ }).then( async (response) => {
169
+ resolve(response?.data)
170
+ })
171
+ .catch((err) => {
172
+ winston.error("[AiService] ELEVEN Labs textToSpeech error: ", err);
173
+ reject(err)
174
+ });
175
+ });
176
+
177
+ }
178
+
179
+
180
+
94
181
  async checkQuoteAvailability(projectId, token) {
95
182
 
96
183
  winston.debug("[AiService] checkQuoteAvailability for project: "+ projectId);
97
-
184
+
98
185
  return new Promise((resolve, reject) => {
99
186
 
100
187
  axios({
@@ -105,12 +192,13 @@ class AiService {
105
192
  },
106
193
  method: 'GET'
107
194
  }).then((resbody) => {
108
- if (resbody && resbody.isAvailable === true) {
195
+ if (resbody && resbody.data?.isAvailable === true) {
109
196
  resolve(true)
110
197
  } else {
111
198
  resolve(false)
112
199
  }
113
200
  }).catch((err) => {
201
+ winston.error("[AiService] checkQuoteAvailability error: ", err.response?.data);
114
202
  reject(err);
115
203
  })
116
204
 
@@ -24,7 +24,7 @@ class IntegrationService {
24
24
 
25
25
  async getKeyFromIntegrations(id_project, integration_name, token){
26
26
 
27
- winston.debug('[IntegrationService] getKeyFromIntegrations id_project:', id_project)
27
+ winston.debug('[IntegrationService] getKeyFromIntegrations id_project:'+ id_project + ' ' + integration_name)
28
28
 
29
29
  return await axios({
30
30
  url: this.API_URL + "/"+ id_project + "/integration/name/" + integration_name,
@@ -35,10 +35,10 @@ class IntegrationService {
35
35
  data: {},
36
36
  method: "GET",
37
37
  }).then( async (response) => {
38
- if (!response.data || response.data?.value) {
38
+ if (!response.data || !response.data?.value) {
39
39
  return null;
40
40
  }
41
-
41
+
42
42
  return response.data?.value?.apikey
43
43
  })
44
44
  .catch((err) => {
@@ -69,7 +69,6 @@ class IntegrationService {
69
69
  return response.data?.gptkey
70
70
  })
71
71
  .catch((err) => {
72
- winston.error("[IntegrationService] getKeyFromKbSettings error: ", err.response?.data);
73
72
  return null;
74
73
  });
75
74
 
@@ -47,7 +47,8 @@ class UploadService {
47
47
 
48
48
  //const formData = new FormData();
49
49
  //formData.append('file', file, { filename: 'audiofile_'+user._id+'_'+id+'.mp3', contentType: 'audio/mpeg' });
50
-
50
+ user.token = 'JWT eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJfaWQiOiI2NWM1ZjExNjlmYWYyZDA0Y2Q3ZGE1MjciLCJlbWFpbCI6ImdhYnJpZWxlQHRpbGVkZXNrLmNvbSIsImZpcnN0bmFtZSI6IkdhYnJpZWxlIiwibGFzdG5hbWUiOiJQYW5pY28iLCJlbWFpbHZlcmlmaWVkIjp0cnVlLCJpYXQiOjE3NDgyNTY2MTUsImF1ZCI6Imh0dHBzOi8vdGlsZWRlc2suY29tIiwiaXNzIjoiaHR0cHM6Ly90aWxlZGVzay5jb20iLCJzdWIiOiJ1c2VyIiwianRpIjoiNWUyZDhhYmUtYzQ0YS00MjJiLWE3MjUtYWYwMjcxNDgyZTczIn0.AcT1tNbE3AcfctJXfOsfUbytRNUQlhBqPUctxzXMjehZOS2ORJThWaPqPxrvqTTIyeOU2l6eoTw8_tqfRJGlp6X4m9KLio87axGl1z3WYBgh8bSMIkAw2zSIUuJmpjBuT8EZdjXZClXRUAliAvAoFRgCmhWJ1tODVvBynLiSb37sB_zscqWH5L5eF1vdt6HHizEO4HbGABQS00I2hEPn99ssC9Y3W4_UhDcitZG80ACwS_Bpl6uk8OxAFybZ1DHHkBS1AK-lCO2P2JJCFRyM33mcvTgb9B6pADETzgJT2qfgOU4-1Pm0l55Mij1LS-h7QTj95DTFQMM7DD6elP0WcA'
51
+
51
52
  axios({
52
53
  url: this.API_URL + "/files/users",
53
54
  headers: {