garriga 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +298 -0
  2. package/package.json +12 -0
package/index.js ADDED
@@ -0,0 +1,298 @@
1
+ const axios = require("axios");
2
+
3
+ class LLMAdapter {
4
+ constructor(provider = "chatgpt") {
5
+ switch (provider) {
6
+ case "chatgpt":
7
+ this.adapter = new ChatGptAPI();
8
+ break;
9
+ case "localLLM":
10
+ this.adapter = new localLLM();
11
+ break;
12
+ case "gemini":
13
+ this.adapter = new GeminiAPI();
14
+ break;
15
+
16
+ default:
17
+ throw new Error("Proveedor de IA no disponible");
18
+ }
19
+ }
20
+
21
+ async generateResponse(prompt, userToken, source) {
22
+ return this.adapter.generateResponse(prompt, userToken, source);
23
+ }
24
+ async generateRateResponse(responseId, ratePoints, rateText, userToken) {
25
+ return this.adapter.generateRateResponse(responseId, ratePoints, rateText, userToken);
26
+ }
27
+ async getAIhistorial(fecha_inicial, fecha_final, id_usuario) {
28
+ return this.adapter.getAIhistorial(fecha_inicial, fecha_final, id_usuario);
29
+ }
30
+ async getAIQueryTypes() {
31
+ return this.adapter.getAIQueryTypes();
32
+ }
33
+ }
34
+
35
+ class ChatGptAPI {
36
+ constructor() {
37
+ this.apiKey = process.env.CHATGPTAPIKEY;
38
+ this.apiUrl = "https://api.openai.com/v1/chat/completions";
39
+ this.model = "gpt-3.5-turbo";
40
+ }
41
+ async generateResponse(prompt) {
42
+ try {
43
+ const response = await axios.post(
44
+ this.apiUrl,
45
+ {
46
+ model: this.model,
47
+ messages: [
48
+ { role: "system", content: "You are a helpful assistant." },
49
+ { role: "user", content: prompt },
50
+ ],
51
+ },
52
+ {
53
+ headers: {
54
+ "Content-Type": "application/json",
55
+ Authorization: "Bearer " + this.apiKey,
56
+ },
57
+ },
58
+ );
59
+
60
+ return response.data.choices[0].message.content;
61
+ } catch (error) {
62
+ console.error(`Error al hacer la llamada de la API ChatGPT: ${error.message}`);
63
+ throw error;
64
+ }
65
+ }
66
+ async generateRateResponse(responseId, ratePoints, rateText, userToken) {
67
+ const LLMToken = await this.getToken({ username: "admin", password: "1234" });
68
+ try {
69
+ const response = await axios.post(
70
+ this.apiUrlRateResponse,
71
+ {
72
+ responseId,
73
+ ratePoints,
74
+ rateText,
75
+ userToken,
76
+ },
77
+ {
78
+ headers: {
79
+ "Content-Type": "application/json",
80
+ Authorization: "Bearer " + LLMToken,
81
+ },
82
+ },
83
+ );
84
+
85
+ return response;
86
+ } catch (error) {
87
+ console.error(`Error al hacer la llamada de la API Local: ${error}`);
88
+ throw error;
89
+ }
90
+ }
91
+ async getAIhistorial() {}
92
+ async getAIQueryTypes() {}
93
+ }
94
+
95
+ class GeminiAPI {
96
+ constructor() {
97
+ this.apiKey = process.env.GEMINIAPIKEY;
98
+ this.baseURL = "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent";
99
+ }
100
+
101
+ get URL() {
102
+ return this.baseURL + "?key=" + this.apiKey;
103
+ }
104
+
105
+ async generateResponse(prompt) {
106
+ try {
107
+ const response = await axios.post(
108
+ this.URL,
109
+ {
110
+ contents: [
111
+ {
112
+ parts: [{ text: prompt }],
113
+ },
114
+ ],
115
+ },
116
+ {
117
+ headers: {
118
+ "Content-Type": "application/json",
119
+ },
120
+ },
121
+ );
122
+
123
+ return response.data?.candidates?.[0].content?.parts?.[0]?.text || null;
124
+ } catch (error) {
125
+ console.error(`Error al hacer la llamada de la API Gemini: ${error.message}`);
126
+ throw error;
127
+ }
128
+ }
129
+ async generateRateResponse(responseId, ratePoints, rateText, userToken) {
130
+ const LLMToken = await this.getToken({ username: "admin", password: "1234" });
131
+ try {
132
+ const response = await axios.post(
133
+ this.apiUrlRateResponse,
134
+ {
135
+ responseId,
136
+ ratePoints,
137
+ rateText,
138
+ userToken,
139
+ },
140
+ {
141
+ headers: {
142
+ "Content-Type": "application/json",
143
+ Authorization: "Bearer " + LLMToken,
144
+ },
145
+ },
146
+ );
147
+
148
+ return response;
149
+ } catch (error) {
150
+ console.error(`Error al hacer la llamada de la API Local: ${error}`);
151
+ throw error;
152
+ }
153
+ }
154
+ async getAIhistorial() {}
155
+ async getAIQueryTypes() {}
156
+ }
157
+
158
+ class localLLM {
159
+ constructor() {
160
+ this.apiUrlGetToken = "http://iaService:29000/token";
161
+ this.apiUrlSendQuery = "http://iaService:29000/query";
162
+ this.apiUrlRateResponse = "http://iaService:29000/rate-response";
163
+ this.apiUrlHistorial = "http://iaService:29000/historial";
164
+ this.apiUrlQueryTypes = "http://iaService:29000/query-types";
165
+ this.apiUrlOllama = "http://10.20.150.102:11434/api/generate";
166
+ this.model = "GrupoImpuLLMtec";
167
+ }
168
+ async getToken({ username, password }) {
169
+ try {
170
+ const responseToken = await axios.post(
171
+ this.apiUrlGetToken,
172
+ {
173
+ username,
174
+ password,
175
+ },
176
+ {
177
+ headers: {
178
+ "Content-Type": "application/json",
179
+ },
180
+ },
181
+ );
182
+ return responseToken.data.access_token;
183
+ } catch (error) {
184
+ console.error(`Error al hacer la llamada de gettoken de la API Local: ${error}`);
185
+ throw error;
186
+ }
187
+ }
188
+
189
+ async generateResponse(prompt, userToken, source = "mistral7b-genei:latest") {
190
+ if (userToken === "modeloLocalOllama") {
191
+ try {
192
+ const response = await axios.post(
193
+ this.apiUrlOllama,
194
+ {
195
+ prompt,
196
+ model: source,
197
+ stream: false,
198
+ },
199
+ {
200
+ headers: {
201
+ "Content-Type": "application/json",
202
+ },
203
+ },
204
+ );
205
+
206
+ return response?.data.response;
207
+ } catch (error) {
208
+ console.error(`Error al hacer la llamada de la API Local Ollama: ${error}`);
209
+ throw error;
210
+ }
211
+ }
212
+ const LLMToken = await this.getToken({ username: "admin", password: "1234" });
213
+ try {
214
+ const response = await axios.post(
215
+ this.apiUrlSendQuery,
216
+ {
217
+ question: prompt,
218
+ userToken,
219
+ source,
220
+ },
221
+ {
222
+ headers: {
223
+ "Content-Type": "application/json",
224
+ Authorization: "Bearer " + LLMToken,
225
+ },
226
+ },
227
+ );
228
+
229
+ return response;
230
+ } catch (error) {
231
+ console.error(`Error al hacer la llamada de la API Local: ${error}`);
232
+ throw error;
233
+ }
234
+ }
235
+ async generateRateResponse(responseId, ratePoints, rateText, userToken) {
236
+ const LLMToken = await this.getToken({ username: "admin", password: "1234" });
237
+ try {
238
+ const response = await axios.post(
239
+ this.apiUrlRateResponse,
240
+ {
241
+ responseId,
242
+ ratePoints,
243
+ rateText,
244
+ userToken,
245
+ },
246
+ {
247
+ headers: {
248
+ "Content-Type": "application/json",
249
+ Authorization: "Bearer " + LLMToken,
250
+ },
251
+ },
252
+ );
253
+
254
+ return response;
255
+ } catch (error) {
256
+ console.error(`Error al hacer la llamada de la API Local: ${error}`);
257
+ throw error;
258
+ }
259
+ }
260
+
261
+ async getAIhistorial(fecha_inicial, fecha_final, id_usuario) {
262
+ const LLMToken = await this.getToken({ username: "admin", password: "1234" });
263
+ const urlCompletaHistorial = `${this.apiUrlHistorial}/${fecha_inicial}/${fecha_final}/${id_usuario}`;
264
+ try {
265
+ const response = await axios.get(urlCompletaHistorial, {
266
+ headers: {
267
+ "Content-Type": "application/json",
268
+ Authorization: "Bearer " + LLMToken,
269
+ },
270
+ });
271
+
272
+ return response;
273
+ } catch (error) {
274
+ console.error(`Error al hacer la llamada de la API Local: ${error}`);
275
+ throw error;
276
+ }
277
+ }
278
+
279
+ async getAIQueryTypes() {
280
+ const LLMToken = await this.getToken({ username: "admin", password: "1234" });
281
+ const urlCompletaQueryTypes = `${this.apiUrlQueryTypes}`;
282
+ try {
283
+ const response = await axios.get(urlCompletaQueryTypes, {
284
+ headers: {
285
+ "Content-Type": "application/json",
286
+ Authorization: "Bearer " + LLMToken,
287
+ },
288
+ });
289
+
290
+ return response;
291
+ } catch (error) {
292
+ console.error(`Error al hacer la llamada de la API Local: ${error}`);
293
+ throw error;
294
+ }
295
+ }
296
+ }
297
+
298
+ module.exports = LLMAdapter;
package/package.json ADDED
@@ -0,0 +1,12 @@
1
+ {
2
+ "name": "garriga",
3
+ "version": "1.0.0",
4
+ "description": "AI connector",
5
+ "main": "index.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "keywords": [],
10
+ "author": "Alexandra",
11
+ "license": "ISC"
12
+ }