@runnerpro/backend 1.13.22 → 1.13.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -33,7 +33,6 @@ const index_1 = require("../../index");
|
|
|
33
33
|
const common_1 = require("@runnerpro/common");
|
|
34
34
|
const index_2 = require("../../locale/index");
|
|
35
35
|
const multer_1 = __importDefault(require("multer"));
|
|
36
|
-
const axios_1 = __importDefault(require("axios"));
|
|
37
36
|
const saveResponseTime_1 = require("../saveResponseTime");
|
|
38
37
|
const fluent_ffmpeg_1 = __importDefault(require("fluent-ffmpeg"));
|
|
39
38
|
const ffmpeg_static_1 = __importDefault(require("ffmpeg-static"));
|
|
@@ -221,26 +220,40 @@ const sendMessage = (req, res, { sendNotification, firebaseMessaging, isClient }
|
|
|
221
220
|
const idCliente = isClient ? req.session.userid : req.body.idCliente;
|
|
222
221
|
let idWorkout, type = 1;
|
|
223
222
|
if (!isClient) {
|
|
223
|
+
console.time('markReadMessage');
|
|
224
224
|
// Si es entrenador, se marca leído cuando se envía un mensaje
|
|
225
225
|
yield markReadMessage({ isClient, idCliente });
|
|
226
|
+
console.timeEnd('markReadMessage');
|
|
226
227
|
// Cambiar el tono del texto a uno neutro y corregir la ortografía
|
|
227
228
|
if (idCliente === 'sJzhGqaJcddq96x80hZAlPJ6pET2' && process.env.NODE_ENV === 'PROD') {
|
|
228
|
-
console.
|
|
229
|
-
|
|
229
|
+
console.time('cambiarTonoEntrenadorNeutro');
|
|
230
|
+
try {
|
|
231
|
+
text = yield cambiarTonoEntrenadorNeutro(text);
|
|
232
|
+
}
|
|
233
|
+
catch (error) {
|
|
234
|
+
console.error('Error en cambiarTonoEntrenadorNeutro:', error);
|
|
235
|
+
}
|
|
236
|
+
finally {
|
|
237
|
+
console.timeEnd('cambiarTonoEntrenadorNeutro');
|
|
238
|
+
}
|
|
230
239
|
}
|
|
231
240
|
}
|
|
232
241
|
// Devuelve el texto en el otro idioma si el cliente no habla español y el idioma del cliente
|
|
242
|
+
console.time('getPreferredLanguageForChat');
|
|
233
243
|
const { textSpanish, textPreferredLanguage, preferredLanguage } = yield getPreferredLanguageForChat({
|
|
234
244
|
text,
|
|
235
245
|
idCliente,
|
|
236
246
|
isClient,
|
|
237
247
|
});
|
|
248
|
+
console.timeEnd('getPreferredLanguageForChat');
|
|
238
249
|
// Enviado cuando es un mensaje de comentario sobre intelligence
|
|
239
250
|
if (newMessage) {
|
|
240
251
|
type = 6;
|
|
241
252
|
idWorkout = newMessage.idWorkout;
|
|
242
253
|
}
|
|
254
|
+
console.time('query');
|
|
243
255
|
const [message] = yield (0, index_1.query)('INSERT INTO [CHAT MESSAGE] ([ID CLIENTE], [ID SENDER], [TEXT], [TEXT PREFERRED LANGUAGE], [PREFERRED LANGUAGE], [REPLY MESSAGE ID], [ID WORKOUT], [TYPE]) VALUES (?, ?, ?, ?, ?, ?, ?, ?) RETURNING [ID]', [isClient ? userid : idCliente, userid, textSpanish, textPreferredLanguage, preferredLanguage, replyMessageId, idWorkout, type]);
|
|
256
|
+
console.timeEnd('query');
|
|
244
257
|
res.send({ idMessage: message.id, text: textSpanish });
|
|
245
258
|
if (!isClient) {
|
|
246
259
|
sendNotification({
|
|
@@ -250,13 +263,16 @@ const sendMessage = (req, res, { sendNotification, firebaseMessaging, isClient }
|
|
|
250
263
|
screen: common_1.NOTIFICATION_SCREEN_TYPES.CHAT,
|
|
251
264
|
});
|
|
252
265
|
// Enviar a N8N lo que ha escrito el entrenador
|
|
253
|
-
const [lastSuggestionMsg] =
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
266
|
+
// const [lastSuggestionMsg] = await query(
|
|
267
|
+
// 'SELECT [ID] FROM [CHAT MESSAGE] WHERE [ID CLIENTE] = ? AND [SUGGESTION TEXT] IS NOT NULL ORDER BY [ID] DESC LIMIT 1',
|
|
268
|
+
// [idCliente]
|
|
269
|
+
// );
|
|
270
|
+
// if (lastSuggestionMsg) {
|
|
271
|
+
// await axios.put(`${process.env.N8N_URL}/edc2484f-7010-44c1-8c1d-82924496a2eb`, {
|
|
272
|
+
// id: lastSuggestionMsg.id,
|
|
273
|
+
// msg: textSpanish,
|
|
274
|
+
// });
|
|
275
|
+
// }
|
|
260
276
|
}
|
|
261
277
|
});
|
|
262
278
|
const getPreferredLanguageForChat = ({ text, idCliente, isClient }) => __awaiter(void 0, void 0, void 0, function* () {
|
package/lib/cjs/prompt/index.js
CHANGED
|
@@ -11,21 +11,29 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
12
|
exports.sendPrompt = void 0;
|
|
13
13
|
const { VertexAI } = require('@google-cloud/vertexai');
|
|
14
|
-
function sendPrompt(prompt) {
|
|
14
|
+
function sendPrompt(prompt, options = {}) {
|
|
15
15
|
return __awaiter(this, void 0, void 0, function* () {
|
|
16
16
|
// Inicializa el cliente de Vertex AI
|
|
17
17
|
const vertex_ai = new VertexAI({
|
|
18
18
|
project: process.env.PROJECT_ID, // Reemplaza con tu ID de proyecto de Google Cloud
|
|
19
19
|
location: 'us-central1', // Reemplaza con tu región
|
|
20
20
|
});
|
|
21
|
-
// Selecciona el modelo de Gemini
|
|
22
|
-
const model = 'gemini-2.5-flash'; //
|
|
23
|
-
// Configura el modelo generativo
|
|
21
|
+
// Selecciona el modelo de Gemini más rápido
|
|
22
|
+
const model = 'gemini-2.5-flash'; // Flash es más rápido que Pro
|
|
23
|
+
// Configura el modelo generativo con parámetros optimizados para velocidad
|
|
24
24
|
const generativeModel = vertex_ai.preview.getGenerativeModel({
|
|
25
25
|
model: model,
|
|
26
|
+
generationConfig: {
|
|
27
|
+
maxOutputTokens: options.maxOutputTokens || 512, // Limitar tokens para respuestas más rápidas
|
|
28
|
+
temperature: options.temperature || 0.3, // Menor temperatura = más determinístico y rápido
|
|
29
|
+
topP: options.topP || 0.8,
|
|
30
|
+
topK: options.topK || 20,
|
|
31
|
+
},
|
|
26
32
|
});
|
|
27
33
|
try {
|
|
28
|
-
|
|
34
|
+
// Timeout para evitar esperas largas
|
|
35
|
+
const timeoutPromise = new Promise((_, reject) => setTimeout(() => reject(new Error('Timeout en sendPrompt')), options.timeout || 15000));
|
|
36
|
+
const resp = yield Promise.race([generativeModel.generateContent(prompt), timeoutPromise]);
|
|
29
37
|
const content = resp.response.candidates[0].content;
|
|
30
38
|
// Asumiendo que la respuesta es de tipo texto
|
|
31
39
|
if (content.parts && content.parts.length > 0 && content.parts[0].text)
|
|
@@ -34,6 +42,7 @@ function sendPrompt(prompt) {
|
|
|
34
42
|
}
|
|
35
43
|
catch (error) {
|
|
36
44
|
console.error('Error al enviar el prompt:', error);
|
|
45
|
+
throw error; // Re-throw para que el código que llama pueda manejar el error
|
|
37
46
|
}
|
|
38
47
|
});
|
|
39
48
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"conversation.d.ts","sourceRoot":"","sources":["../../../../../src/chat/api/conversation.ts"],"names":[],"mappings":"AAmBA,QAAA,MAAM,iBAAiB,0BAA2B,GAAG,SAuBpD,CAAC;
|
|
1
|
+
{"version":3,"file":"conversation.d.ts","sourceRoot":"","sources":["../../../../../src/chat/api/conversation.ts"],"names":[],"mappings":"AAmBA,QAAA,MAAM,iBAAiB,0BAA2B,GAAG,SAuBpD,CAAC;AA+bF,OAAO,EAAE,iBAAiB,EAAE,CAAC"}
|
|
@@ -1,3 +1,10 @@
|
|
|
1
|
-
|
|
1
|
+
interface SendPromptOptions {
|
|
2
|
+
maxOutputTokens?: number;
|
|
3
|
+
temperature?: number;
|
|
4
|
+
topP?: number;
|
|
5
|
+
topK?: number;
|
|
6
|
+
timeout?: number;
|
|
7
|
+
}
|
|
8
|
+
declare function sendPrompt(prompt: string, options?: SendPromptOptions): Promise<any>;
|
|
2
9
|
export { sendPrompt };
|
|
3
10
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/prompt/index.ts"],"names":[],"mappings":"AAEA,iBAAe,UAAU,CAAC,MAAM,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/prompt/index.ts"],"names":[],"mappings":"AAEA,UAAU,iBAAiB;IACzB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,iBAAe,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,GAAE,iBAAsB,gBAqCxE;AAED,OAAO,EAAE,UAAU,EAAE,CAAC"}
|