garriga 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +76 -0
- package/index.js +292 -257
- package/package.json +1 -1
package/README.md
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# 📦 Proyecto Garriga
|
|
2
|
+
|
|
3
|
+
Este repositorio contiene el código fuente para el paquete npm del proyecto.
|
|
4
|
+
|
|
5
|
+
> [!IMPORTANT]
|
|
6
|
+
> **LEER ANTES DE MODIFICAR:** Es crítico seguir las instrucciones de flujo de trabajo descritas abajo para evitar desincronizaciones graves.
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
## ⚠️ Flujo de Trabajo Crítico
|
|
11
|
+
|
|
12
|
+
Este es un **paquete npm**. La relación entre el repositorio y el registro es la siguiente:
|
|
13
|
+
|
|
14
|
+
1. **Gitea:** Almacenamos el proyecto aquí para tener una copia de seguridad y un registro histórico de cambios (git commits).
|
|
15
|
+
2. **npm:** El código que **realmente vale** y se utiliza en producción es el que está alojado en npm.
|
|
16
|
+
|
|
17
|
+
> **REGLA DE ORO:** Cada vez que se suba una nueva versión del código a Gitea, **ES OBLIGATORIO** realizar un `npm publish`.
|
|
18
|
+
>
|
|
19
|
+
> De no hacerlo, se producirá una desincronización entre el repositorio y el paquete que puede causar conflictos graves.
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## 🛠️ Instalación y Configuración
|
|
24
|
+
|
|
25
|
+
### 1. Clonar el repositorio
|
|
26
|
+
|
|
27
|
+
Para bajarte el código a tu máquina local:
|
|
28
|
+
|
|
29
|
+
git clone ssh://IPdesarrollo:222/empresa/garriga.git
|
|
30
|
+
|
|
31
|
+
### 2. Configuración de SSH (Importante)
|
|
32
|
+
|
|
33
|
+
Para poder subir cambios, debes configurar tu acceso en Gitea:
|
|
34
|
+
|
|
35
|
+
1. Añade tu clave pública SSH en las **Claves de implementación (Deploy Keys)** del proyecto en Gitea.
|
|
36
|
+
2. ✅ **Asegúrate de marcar la casilla: "Habilitar acceso de escritura"**.
|
|
37
|
+
|
|
38
|
+
---
|
|
39
|
+
|
|
40
|
+
## 🚀 Publicación en npm
|
|
41
|
+
|
|
42
|
+
El proceso de autenticación en npm utiliza autenticación de dos factores (2FA) y WebAuthn. Sigue estos pasos meticulosamente.
|
|
43
|
+
|
|
44
|
+
### Paso 1: Login (`npm login`)
|
|
45
|
+
|
|
46
|
+
Ejecuta el siguiente comando en tu terminal:
|
|
47
|
+
|
|
48
|
+
npm login
|
|
49
|
+
|
|
50
|
+
**Sigue estos pasos exactos:**
|
|
51
|
+
|
|
52
|
+
1. La terminal te pedirá presionar `ENTER` para abrir el navegador.
|
|
53
|
+
- ⚠️ **Nota:** Debes usar **Google Chrome**. (Firefox suele dar problemas con este flujo).
|
|
54
|
+
2. Introduce las credenciales de la cuenta **Cardus**.
|
|
55
|
+
- _¿No tienes las claves?_ Búscalas en **Discord**, en el canal `#docs-it`.
|
|
56
|
+
3. Pulsa el botón de **2FA**. Aparecerá un código QR en la pantalla.
|
|
57
|
+
4. **Escanea el QR con tu móvil**.
|
|
58
|
+
5. El móvil te pedirá activar el **Bluetooth**. Acéptalo.
|
|
59
|
+
6. En tu ordenador, ve a la configuración de Bluetooth. Verás una solicitud de conexión de tu teléfono; **acéptala**.
|
|
60
|
+
7. El teléfono te pedirá identificación biométrica (PIN, Huella Dactilar o FaceID).
|
|
61
|
+
|
|
62
|
+
### Paso 2: Publicar (`npm publish`)
|
|
63
|
+
|
|
64
|
+
Una vez logueado correctamente (y si todo ha salido bien), ejecuta:
|
|
65
|
+
|
|
66
|
+
npm publish
|
|
67
|
+
|
|
68
|
+
> **Nota:** El proceso de validación es **exactamente el mismo** que en el login (QR, Bluetooth, Biometría).
|
|
69
|
+
|
|
70
|
+
---
|
|
71
|
+
|
|
72
|
+
## 🆘 ¿Necesitas ayuda?
|
|
73
|
+
|
|
74
|
+
Si no has entendido el proceso o algo falla, **NO fuerces la subida**.
|
|
75
|
+
|
|
76
|
+
Este flujo tiene cierta complejidad técnica. Por favor, contacta con alguien del equipo que conozca el proceso antes de modificar nada. Es mejor preguntar que desincronizar el repositorio.
|
package/index.js
CHANGED
|
@@ -1,298 +1,333 @@
|
|
|
1
1
|
const axios = require("axios");
|
|
2
2
|
|
|
3
3
|
class LLMAdapter {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
4
|
+
constructor(provider = "chatgpt") {
|
|
5
|
+
switch (provider) {
|
|
6
|
+
case "chatgpt":
|
|
7
|
+
this.adapter = new ChatGptAPI();
|
|
8
|
+
break;
|
|
9
|
+
case "localLLM":
|
|
10
|
+
this.adapter = new localLLM();
|
|
11
|
+
break;
|
|
12
|
+
case "gemini":
|
|
13
|
+
this.adapter = new GeminiAPI();
|
|
14
|
+
break;
|
|
15
15
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
}
|
|
16
|
+
default:
|
|
17
|
+
throw new Error("Proveedor de IA no disponible");
|
|
19
18
|
}
|
|
19
|
+
}
|
|
20
20
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
21
|
+
async generateResponse(prompt, userToken, source) {
|
|
22
|
+
return this.adapter.generateResponse(prompt, userToken, source);
|
|
23
|
+
}
|
|
24
|
+
async generateRateResponse(responseId, ratePoints, rateText, userToken) {
|
|
25
|
+
return this.adapter.generateRateResponse(
|
|
26
|
+
responseId,
|
|
27
|
+
ratePoints,
|
|
28
|
+
rateText,
|
|
29
|
+
userToken,
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
async getAIhistorial(fecha_inicial, fecha_final, id_usuario) {
|
|
33
|
+
return this.adapter.getAIhistorial(fecha_inicial, fecha_final, id_usuario);
|
|
34
|
+
}
|
|
35
|
+
async getAIQueryTypes() {
|
|
36
|
+
return this.adapter.getAIQueryTypes();
|
|
37
|
+
}
|
|
33
38
|
}
|
|
34
39
|
|
|
35
40
|
class ChatGptAPI {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
41
|
+
constructor() {
|
|
42
|
+
this.apiKey = process.env.CHATGPTAPIKEY;
|
|
43
|
+
this.apiUrl =
|
|
44
|
+
process.env.CHATGPTURLBASE ||
|
|
45
|
+
"https://api.openai.com/v1/chat/completions";
|
|
46
|
+
this.model = process.env.CHATGPTMODEL || "gpt-3.5-turbo";
|
|
47
|
+
}
|
|
48
|
+
async generateResponse(prompt) {
|
|
49
|
+
try {
|
|
50
|
+
const response = await axios.post(
|
|
51
|
+
this.apiUrl,
|
|
52
|
+
{
|
|
53
|
+
model: this.model,
|
|
54
|
+
messages: [
|
|
55
|
+
{ role: "system", content: "You are a helpful assistant." },
|
|
56
|
+
{ role: "user", content: prompt },
|
|
57
|
+
],
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
headers: {
|
|
61
|
+
"Content-Type": "application/json",
|
|
62
|
+
Authorization: "Bearer " + this.apiKey,
|
|
63
|
+
},
|
|
64
|
+
},
|
|
65
|
+
);
|
|
59
66
|
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
67
|
+
return response.data.choices[0].message.content;
|
|
68
|
+
} catch (error) {
|
|
69
|
+
console.error(
|
|
70
|
+
`Error al hacer la llamada de la API ChatGPT: ${error.message}`,
|
|
71
|
+
);
|
|
72
|
+
throw error;
|
|
65
73
|
}
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
74
|
+
}
|
|
75
|
+
async generateRateResponse(responseId, ratePoints, rateText, userToken) {
|
|
76
|
+
const LLMToken = await this.getToken({
|
|
77
|
+
username: "admin",
|
|
78
|
+
password: "1234",
|
|
79
|
+
});
|
|
80
|
+
try {
|
|
81
|
+
const response = await axios.post(
|
|
82
|
+
this.apiUrlRateResponse,
|
|
83
|
+
{
|
|
84
|
+
responseId,
|
|
85
|
+
ratePoints,
|
|
86
|
+
rateText,
|
|
87
|
+
userToken,
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
headers: {
|
|
91
|
+
"Content-Type": "application/json",
|
|
92
|
+
Authorization: "Bearer " + LLMToken,
|
|
93
|
+
},
|
|
94
|
+
},
|
|
95
|
+
);
|
|
84
96
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
}
|
|
97
|
+
return response;
|
|
98
|
+
} catch (error) {
|
|
99
|
+
console.error(`Error al hacer la llamada de la API Local: ${error}`);
|
|
100
|
+
throw error;
|
|
90
101
|
}
|
|
91
|
-
|
|
92
|
-
|
|
102
|
+
}
|
|
103
|
+
async getAIhistorial() {}
|
|
104
|
+
async getAIQueryTypes() {}
|
|
93
105
|
}
|
|
94
106
|
|
|
95
107
|
class GeminiAPI {
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
108
|
+
constructor() {
|
|
109
|
+
this.apiKey = process.env.GEMINIAPIKEY;
|
|
110
|
+
this.baseURL =
|
|
111
|
+
process.env.GEMINIURLBASE ||
|
|
112
|
+
"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent";
|
|
113
|
+
}
|
|
100
114
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
115
|
+
get URL() {
|
|
116
|
+
return this.baseURL + "?key=" + this.apiKey;
|
|
117
|
+
}
|
|
104
118
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
119
|
+
async generateResponse(prompt) {
|
|
120
|
+
try {
|
|
121
|
+
const response = await axios.post(
|
|
122
|
+
this.URL,
|
|
123
|
+
{
|
|
124
|
+
contents: [
|
|
125
|
+
{
|
|
126
|
+
parts: [{ text: prompt }],
|
|
127
|
+
},
|
|
128
|
+
],
|
|
129
|
+
},
|
|
130
|
+
{
|
|
131
|
+
headers: {
|
|
132
|
+
"Content-Type": "application/json",
|
|
133
|
+
},
|
|
134
|
+
},
|
|
135
|
+
);
|
|
122
136
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
137
|
+
return response.data?.candidates?.[0].content?.parts?.[0]?.text || null;
|
|
138
|
+
} catch (error) {
|
|
139
|
+
console.error(
|
|
140
|
+
`Error al hacer la llamada de la API Gemini: ${error.message}`,
|
|
141
|
+
);
|
|
142
|
+
throw error;
|
|
128
143
|
}
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
144
|
+
}
|
|
145
|
+
async generateRateResponse(responseId, ratePoints, rateText, userToken) {
|
|
146
|
+
const LLMToken = await this.getToken({
|
|
147
|
+
username: "admin",
|
|
148
|
+
password: "1234",
|
|
149
|
+
});
|
|
150
|
+
try {
|
|
151
|
+
const response = await axios.post(
|
|
152
|
+
this.apiUrlRateResponse,
|
|
153
|
+
{
|
|
154
|
+
responseId,
|
|
155
|
+
ratePoints,
|
|
156
|
+
rateText,
|
|
157
|
+
userToken,
|
|
158
|
+
},
|
|
159
|
+
{
|
|
160
|
+
headers: {
|
|
161
|
+
"Content-Type": "application/json",
|
|
162
|
+
Authorization: "Bearer " + LLMToken,
|
|
163
|
+
},
|
|
164
|
+
},
|
|
165
|
+
);
|
|
147
166
|
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
}
|
|
167
|
+
return response;
|
|
168
|
+
} catch (error) {
|
|
169
|
+
console.error(`Error al hacer la llamada de la API Local: ${error}`);
|
|
170
|
+
throw error;
|
|
153
171
|
}
|
|
154
|
-
|
|
155
|
-
|
|
172
|
+
}
|
|
173
|
+
async getAIhistorial() {}
|
|
174
|
+
async getAIQueryTypes() {}
|
|
156
175
|
}
|
|
157
176
|
|
|
158
177
|
class localLLM {
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
178
|
+
constructor() {
|
|
179
|
+
this.apiUrlGetToken = "http://iaService:29000/token";
|
|
180
|
+
this.apiUrlSendQuery = "http://iaService:29000/query";
|
|
181
|
+
this.apiUrlRateResponse = "http://iaService:29000/rate-response";
|
|
182
|
+
this.apiUrlHistorial = "http://iaService:29000/historial";
|
|
183
|
+
this.apiUrlQueryTypes = "http://iaService:29000/query-types";
|
|
184
|
+
this.apiUrlOllama = "http://10.20.150.102:11434/api/generate";
|
|
185
|
+
this.model = "GrupoImpuLLMtec";
|
|
186
|
+
}
|
|
187
|
+
async getToken({ username, password }) {
|
|
188
|
+
try {
|
|
189
|
+
const responseToken = await axios.post(
|
|
190
|
+
this.apiUrlGetToken,
|
|
191
|
+
{
|
|
192
|
+
username,
|
|
193
|
+
password,
|
|
194
|
+
},
|
|
195
|
+
{
|
|
196
|
+
headers: {
|
|
197
|
+
"Content-Type": "application/json",
|
|
198
|
+
},
|
|
199
|
+
},
|
|
200
|
+
);
|
|
201
|
+
return responseToken.data.access_token;
|
|
202
|
+
} catch (error) {
|
|
203
|
+
console.error(
|
|
204
|
+
`Error al hacer la llamada de gettoken de la API Local: ${error}`,
|
|
205
|
+
);
|
|
206
|
+
throw error;
|
|
187
207
|
}
|
|
208
|
+
}
|
|
188
209
|
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
210
|
+
async generateResponse(prompt, userToken, source = "mistral7b-genei:latest") {
|
|
211
|
+
if (userToken === "modeloLocalOllama") {
|
|
212
|
+
try {
|
|
213
|
+
const response = await axios.post(
|
|
214
|
+
this.apiUrlOllama,
|
|
215
|
+
{
|
|
216
|
+
prompt,
|
|
217
|
+
model: source,
|
|
218
|
+
stream: false,
|
|
219
|
+
},
|
|
220
|
+
{
|
|
221
|
+
headers: {
|
|
222
|
+
"Content-Type": "application/json",
|
|
223
|
+
},
|
|
224
|
+
},
|
|
225
|
+
);
|
|
205
226
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
227
|
+
return response?.data.response;
|
|
228
|
+
} catch (error) {
|
|
229
|
+
console.error(
|
|
230
|
+
`Error al hacer la llamada de la API Local Ollama: ${error}`,
|
|
231
|
+
);
|
|
232
|
+
throw error;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
const LLMToken = await this.getToken({
|
|
236
|
+
username: "admin",
|
|
237
|
+
password: "1234",
|
|
238
|
+
});
|
|
239
|
+
try {
|
|
240
|
+
const response = await axios.post(
|
|
241
|
+
this.apiUrlSendQuery,
|
|
242
|
+
{
|
|
243
|
+
question: prompt,
|
|
244
|
+
userToken,
|
|
245
|
+
source,
|
|
246
|
+
},
|
|
247
|
+
{
|
|
248
|
+
headers: {
|
|
249
|
+
"Content-Type": "application/json",
|
|
250
|
+
Authorization: "Bearer " + LLMToken,
|
|
251
|
+
},
|
|
252
|
+
},
|
|
253
|
+
);
|
|
228
254
|
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
}
|
|
255
|
+
return response;
|
|
256
|
+
} catch (error) {
|
|
257
|
+
console.error(`Error al hacer la llamada de la API Local: ${error}`);
|
|
258
|
+
throw error;
|
|
234
259
|
}
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
260
|
+
}
|
|
261
|
+
async generateRateResponse(responseId, ratePoints, rateText, userToken) {
|
|
262
|
+
const LLMToken = await this.getToken({
|
|
263
|
+
username: "admin",
|
|
264
|
+
password: "1234",
|
|
265
|
+
});
|
|
266
|
+
try {
|
|
267
|
+
const response = await axios.post(
|
|
268
|
+
this.apiUrlRateResponse,
|
|
269
|
+
{
|
|
270
|
+
responseId,
|
|
271
|
+
ratePoints,
|
|
272
|
+
rateText,
|
|
273
|
+
userToken,
|
|
274
|
+
},
|
|
275
|
+
{
|
|
276
|
+
headers: {
|
|
277
|
+
"Content-Type": "application/json",
|
|
278
|
+
Authorization: "Bearer " + LLMToken,
|
|
279
|
+
},
|
|
280
|
+
},
|
|
281
|
+
);
|
|
253
282
|
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
}
|
|
283
|
+
return response;
|
|
284
|
+
} catch (error) {
|
|
285
|
+
console.error(`Error al hacer la llamada de la API Local: ${error}`);
|
|
286
|
+
throw error;
|
|
259
287
|
}
|
|
288
|
+
}
|
|
260
289
|
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
290
|
+
async getAIhistorial(fecha_inicial, fecha_final, id_usuario) {
|
|
291
|
+
const LLMToken = await this.getToken({
|
|
292
|
+
username: "admin",
|
|
293
|
+
password: "1234",
|
|
294
|
+
});
|
|
295
|
+
const urlCompletaHistorial = `${this.apiUrlHistorial}/${fecha_inicial}/${fecha_final}/${id_usuario}`;
|
|
296
|
+
try {
|
|
297
|
+
const response = await axios.get(urlCompletaHistorial, {
|
|
298
|
+
headers: {
|
|
299
|
+
"Content-Type": "application/json",
|
|
300
|
+
Authorization: "Bearer " + LLMToken,
|
|
301
|
+
},
|
|
302
|
+
});
|
|
271
303
|
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
}
|
|
304
|
+
return response;
|
|
305
|
+
} catch (error) {
|
|
306
|
+
console.error(`Error al hacer la llamada de la API Local: ${error}`);
|
|
307
|
+
throw error;
|
|
277
308
|
}
|
|
309
|
+
}
|
|
278
310
|
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
311
|
+
async getAIQueryTypes() {
|
|
312
|
+
const LLMToken = await this.getToken({
|
|
313
|
+
username: "admin",
|
|
314
|
+
password: "1234",
|
|
315
|
+
});
|
|
316
|
+
const urlCompletaQueryTypes = `${this.apiUrlQueryTypes}`;
|
|
317
|
+
try {
|
|
318
|
+
const response = await axios.get(urlCompletaQueryTypes, {
|
|
319
|
+
headers: {
|
|
320
|
+
"Content-Type": "application/json",
|
|
321
|
+
Authorization: "Bearer " + LLMToken,
|
|
322
|
+
},
|
|
323
|
+
});
|
|
289
324
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
}
|
|
325
|
+
return response;
|
|
326
|
+
} catch (error) {
|
|
327
|
+
console.error(`Error al hacer la llamada de la API Local: ${error}`);
|
|
328
|
+
throw error;
|
|
295
329
|
}
|
|
330
|
+
}
|
|
296
331
|
}
|
|
297
332
|
|
|
298
333
|
module.exports = LLMAdapter;
|