@peopl-health/nexus 1.7.10 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/assistants/BaseAssistant.js +88 -42
- package/lib/config/awsConfig.js +1 -1
- package/lib/config/llmConfig.js +15 -9
- package/lib/controllers/assistantController.js +5 -9
- package/lib/core/NexusMessaging.js +3 -2
- package/lib/helpers/assistantHelper.js +32 -17
- package/lib/helpers/llmsHelper.js +29 -2
- package/lib/index.js +14 -23
- package/lib/models/messageModel.js +4 -3
- package/lib/models/threadModel.js +3 -0
- package/lib/providers/{OpenAIProvider.js → OpenAIAssistantsProvider.js} +33 -79
- package/lib/providers/OpenAIResponsesProvider.js +380 -0
- package/lib/providers/createProvider.js +24 -0
- package/lib/services/assistantService.js +84 -88
- package/lib/storage/MongoStorage.js +4 -10
- package/lib/utils/index.js +0 -2
- package/package.json +2 -2
- package/lib/utils/defaultLLMProvider.js +0 -20
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
const llmConfig = require('../config/llmConfig');
|
|
2
2
|
const { Thread } = require('../models/threadModel');
|
|
3
3
|
const { getLastNMessages } = require('../helpers/assistantHelper');
|
|
4
|
+
const { createProvider } = require('../providers/createProvider');
|
|
4
5
|
|
|
5
6
|
/**
|
|
6
7
|
* Flexible base assistant implementation that integrates with OpenAI Threads
|
|
@@ -25,8 +26,10 @@ class BaseAssistant {
|
|
|
25
26
|
|
|
26
27
|
if (!this.provider && this.client) {
|
|
27
28
|
try {
|
|
28
|
-
const
|
|
29
|
-
|
|
29
|
+
const provider = createProvider({
|
|
30
|
+
client: this.client,
|
|
31
|
+
variant: process.env.VARIANT || 'assistants'
|
|
32
|
+
});
|
|
30
33
|
this.provider = provider;
|
|
31
34
|
if (typeof llmConfig.setOpenAIProvider === 'function') {
|
|
32
35
|
llmConfig.setOpenAIProvider(provider);
|
|
@@ -54,7 +57,24 @@ class BaseAssistant {
|
|
|
54
57
|
|
|
55
58
|
_ensureClient() {
|
|
56
59
|
if (!this.client) {
|
|
57
|
-
|
|
60
|
+
this.client = this.provider?.getClient?.() || null;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if (!this.client) {
|
|
64
|
+
throw new Error('LLM client not configured. Ensure client is initialized.');
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (!this.provider) {
|
|
68
|
+
try {
|
|
69
|
+
const variant = process.env.VARIANT || 'assistants';
|
|
70
|
+
const provider = createProvider({ client: this.client, variant });
|
|
71
|
+
this.provider = provider;
|
|
72
|
+
if (typeof llmConfig.setOpenAIProvider === 'function') {
|
|
73
|
+
llmConfig.setOpenAIProvider(provider);
|
|
74
|
+
}
|
|
75
|
+
} catch (error) {
|
|
76
|
+
console.warn('[BaseAssistant] Failed to bootstrap provider:', error?.message || error);
|
|
77
|
+
}
|
|
58
78
|
}
|
|
59
79
|
}
|
|
60
80
|
|
|
@@ -147,35 +167,41 @@ class BaseAssistant {
|
|
|
147
167
|
throw new Error('Assistant thread not initialized. Call create() before sendMessage().');
|
|
148
168
|
}
|
|
149
169
|
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
170
|
+
const provider = this.provider || null;
|
|
171
|
+
if (!provider || typeof provider.addMessage !== 'function') {
|
|
172
|
+
throw new Error('Provider not configured. Ensure configureLLMProvider has been called.');
|
|
173
|
+
}
|
|
154
174
|
|
|
155
|
-
const
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
175
|
+
const assistantId = this.assistantId;
|
|
176
|
+
const threadId = this.thread.thread_id;
|
|
177
|
+
await provider.addMessage({ threadId, role: 'user', content: message });
|
|
178
|
+
|
|
179
|
+
const runConfig = { threadId, assistantId, ...options };
|
|
159
180
|
|
|
160
181
|
const toolSchemas = this.getToolSchemas();
|
|
161
182
|
if (toolSchemas.length > 0) {
|
|
162
183
|
runConfig.tools = toolSchemas;
|
|
163
184
|
}
|
|
164
185
|
|
|
165
|
-
const run = await
|
|
166
|
-
this.thread.thread_id,
|
|
167
|
-
runConfig
|
|
168
|
-
);
|
|
186
|
+
const run = await provider.runConversation(runConfig);
|
|
169
187
|
|
|
170
|
-
return await this.waitForCompletion(
|
|
188
|
+
return await this.waitForCompletion(threadId, run.id, options);
|
|
171
189
|
}
|
|
172
190
|
|
|
173
191
|
async waitForCompletion(threadId, runId, { interval = 2000, maxAttempts = 30 } = {}) {
|
|
174
192
|
this._ensureClient();
|
|
175
|
-
|
|
193
|
+
const provider = this.provider || null;
|
|
194
|
+
if (!provider || typeof provider.getRun !== 'function') {
|
|
195
|
+
throw new Error('Provider not configured. Cannot poll run status.');
|
|
196
|
+
}
|
|
176
197
|
|
|
198
|
+
let attempts = 0;
|
|
177
199
|
while (attempts < maxAttempts) {
|
|
178
|
-
const run = await
|
|
200
|
+
const run = await provider.getRun({ threadId, runId });
|
|
201
|
+
|
|
202
|
+
if (!run) {
|
|
203
|
+
throw new Error('Unable to retrieve run status.');
|
|
204
|
+
}
|
|
179
205
|
|
|
180
206
|
if (run.status === 'completed') {
|
|
181
207
|
return run;
|
|
@@ -183,7 +209,7 @@ class BaseAssistant {
|
|
|
183
209
|
|
|
184
210
|
if (run.status === 'requires_action') {
|
|
185
211
|
await this.handleRequiresAction(run);
|
|
186
|
-
} else if (['failed', 'cancelled', 'expired', 'incomplete'].includes(run.status)) {
|
|
212
|
+
} else if (['failed', 'cancelled', 'expired', 'incomplete', 'errored'].includes(run.status)) {
|
|
187
213
|
throw new Error(`Assistant run ended with status '${run.status}'`);
|
|
188
214
|
}
|
|
189
215
|
|
|
@@ -199,13 +225,26 @@ class BaseAssistant {
|
|
|
199
225
|
const threadRef = threadDoc || this.thread;
|
|
200
226
|
if (!threadRef) return [];
|
|
201
227
|
|
|
202
|
-
const
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
228
|
+
const provider = this.provider || null;
|
|
229
|
+
if (!provider || typeof provider.listMessages !== 'function') {
|
|
230
|
+
throw new Error('OpenAI provider not configured. Cannot list messages.');
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
const response = await provider.listMessages({ threadId: threadRef.thread_id, order: 'asc' });
|
|
234
|
+
const messages = Array.isArray(response?.data) ? response.data : Array.isArray(response?.items) ? response.items : [];
|
|
235
|
+
|
|
236
|
+
return messages.map((msg) => {
|
|
237
|
+
const parts = Array.isArray(msg.content) ? msg.content : [];
|
|
238
|
+
const textContents = parts
|
|
239
|
+
.map((part) => {
|
|
240
|
+
if (part.type === 'text' && part.text?.value) return part.text.value;
|
|
241
|
+
if ((part.type === 'input_text' || part.type === 'output_text') && part.text) return part.text;
|
|
242
|
+
return null;
|
|
243
|
+
})
|
|
244
|
+
.filter(Boolean);
|
|
245
|
+
|
|
207
246
|
const content = textContents.length <= 1 ? textContents[0] || '' : textContents;
|
|
208
|
-
return { role: msg.role, content };
|
|
247
|
+
return { role: msg.role || 'user', content };
|
|
209
248
|
});
|
|
210
249
|
}
|
|
211
250
|
|
|
@@ -218,19 +257,25 @@ class BaseAssistant {
|
|
|
218
257
|
this.lastMessages = await getLastNMessages(whatsappId, 20);
|
|
219
258
|
}
|
|
220
259
|
|
|
260
|
+
const provider = createProvider({ variant: process.env.VARIANT || 'assistants' });
|
|
261
|
+
if (!provider || typeof provider.createConversation !== 'function') {
|
|
262
|
+
throw new Error('Provider not configured. Cannot create conversation.');
|
|
263
|
+
}
|
|
264
|
+
|
|
221
265
|
const initialMessages = await this.buildInitialMessages({ code, context });
|
|
222
|
-
const
|
|
266
|
+
const conversation = await provider.createConversation({
|
|
267
|
+
messages: initialMessages,
|
|
268
|
+
});
|
|
223
269
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
270
|
+
this.thread = {
|
|
271
|
+
...conversation,
|
|
272
|
+
thread_id: conversation?.id,
|
|
273
|
+
assistant_id: this.assistantId.startsWith('assistant') ? this.assistantId : null,
|
|
274
|
+
prompt_id: this.assistantId.startsWith('pmpt') ? this.assistantId : null,
|
|
275
|
+
code,
|
|
276
|
+
};
|
|
230
277
|
|
|
231
|
-
|
|
232
|
-
this.thread = thread;
|
|
233
|
-
return thread;
|
|
278
|
+
return this.thread;
|
|
234
279
|
}
|
|
235
280
|
|
|
236
281
|
async buildInitialMessages({ code }) {
|
|
@@ -264,16 +309,17 @@ class BaseAssistant {
|
|
|
264
309
|
}
|
|
265
310
|
}
|
|
266
311
|
|
|
267
|
-
|
|
268
|
-
|
|
312
|
+
const provider = this.provider || null;
|
|
313
|
+
if (!provider || typeof provider.submitToolOutputs !== 'function') {
|
|
314
|
+
console.warn('[BaseAssistant] Cannot submit tool outputs: provider not configured');
|
|
269
315
|
return outputs;
|
|
270
316
|
}
|
|
271
317
|
|
|
272
|
-
await
|
|
273
|
-
run.thread_id,
|
|
274
|
-
run.id,
|
|
275
|
-
|
|
276
|
-
);
|
|
318
|
+
await provider.submitToolOutputs({
|
|
319
|
+
threadId: run.thread_id || this.thread?.thread_id,
|
|
320
|
+
runId: run.id,
|
|
321
|
+
toolOutputs: outputs
|
|
322
|
+
});
|
|
277
323
|
|
|
278
324
|
return outputs;
|
|
279
325
|
}
|
package/lib/config/awsConfig.js
CHANGED
|
@@ -78,7 +78,7 @@ async function downloadFileFromS3(bucketName, key, downloadPath) {
|
|
|
78
78
|
}
|
|
79
79
|
}
|
|
80
80
|
|
|
81
|
-
async function generatePresignedUrl(bucketName, key, expiration =
|
|
81
|
+
async function generatePresignedUrl(bucketName, key, expiration = 3000) {
|
|
82
82
|
const params = {
|
|
83
83
|
Bucket: bucketName,
|
|
84
84
|
Key: key,
|
package/lib/config/llmConfig.js
CHANGED
|
@@ -1,22 +1,27 @@
|
|
|
1
1
|
const runtimeConfig = require('./runtimeConfig');
|
|
2
|
-
const {
|
|
2
|
+
const { createProvider } = require('../providers/createProvider');
|
|
3
3
|
|
|
4
4
|
let anthropicClient = null;
|
|
5
5
|
let openaiClient = null;
|
|
6
|
-
let
|
|
6
|
+
let providerInstance = null;
|
|
7
|
+
let providerVariant = 'assistants';
|
|
7
8
|
|
|
8
9
|
const setOpenAIClient = (client) => {
|
|
9
10
|
openaiClient = client || null;
|
|
10
11
|
module.exports.openaiClient = openaiClient;
|
|
11
12
|
if (!client) {
|
|
12
|
-
|
|
13
|
-
module.exports.
|
|
13
|
+
providerInstance = null;
|
|
14
|
+
module.exports.providerInstance = null;
|
|
15
|
+
providerVariant = 'assistants';
|
|
16
|
+
module.exports.providerVariant = providerVariant;
|
|
14
17
|
}
|
|
15
18
|
};
|
|
16
19
|
|
|
17
20
|
const setOpenAIProvider = (provider) => {
|
|
18
|
-
|
|
19
|
-
module.exports.
|
|
21
|
+
providerInstance = provider || null;
|
|
22
|
+
module.exports.providerInstance = providerInstance;
|
|
23
|
+
providerVariant = provider?.variant || providerVariant || 'assistants';
|
|
24
|
+
module.exports.providerVariant = providerVariant;
|
|
20
25
|
|
|
21
26
|
if (!provider) {
|
|
22
27
|
setOpenAIClient(null);
|
|
@@ -33,10 +38,10 @@ const setOpenAIProvider = (provider) => {
|
|
|
33
38
|
};
|
|
34
39
|
|
|
35
40
|
const getOpenAIProvider = ({ instantiate = true } = {}) => {
|
|
36
|
-
if (
|
|
41
|
+
if (providerInstance) return providerInstance;
|
|
37
42
|
if (!instantiate) return null;
|
|
38
43
|
if (!openaiClient) return null;
|
|
39
|
-
const provider =
|
|
44
|
+
const provider = createProvider({ client: openaiClient, variant: providerVariant });
|
|
40
45
|
setOpenAIProvider(provider);
|
|
41
46
|
return provider;
|
|
42
47
|
};
|
|
@@ -69,7 +74,8 @@ const resolveAnthropicClient = () => {
|
|
|
69
74
|
|
|
70
75
|
module.exports = {
|
|
71
76
|
openaiClient,
|
|
72
|
-
|
|
77
|
+
providerInstance,
|
|
78
|
+
providerVariant,
|
|
73
79
|
setOpenAIClient,
|
|
74
80
|
setOpenAIProvider,
|
|
75
81
|
getOpenAIProvider,
|
|
@@ -55,14 +55,7 @@ const createAssistantController = async (req, res) => {
|
|
|
55
55
|
try {
|
|
56
56
|
console.log('codes', codes);
|
|
57
57
|
for (const code of codes) {
|
|
58
|
-
|
|
59
|
-
if (thread !== null) {
|
|
60
|
-
await switchAssistant(code, assistant_id);
|
|
61
|
-
console.log('FORCE', force);
|
|
62
|
-
if (!force) continue;
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
await createAssistant(code, assistant_id, [...instrucciones, ...messages], thread);
|
|
58
|
+
await createAssistant(code, assistant_id, [...instrucciones, ...messages], force);
|
|
66
59
|
console.log('messages', messages);
|
|
67
60
|
for (const message of messages) {
|
|
68
61
|
console.log('message', message);
|
|
@@ -90,7 +83,10 @@ const getInfoAssistantController = async (req, res) => {
|
|
|
90
83
|
|
|
91
84
|
const listAssistantController = async (req, res) => {
|
|
92
85
|
try {
|
|
93
|
-
const
|
|
86
|
+
const nodeEnv = process.env.NODE_ENV;
|
|
87
|
+
const airtableStatus = nodeEnv === 'production' ? 'prod' :
|
|
88
|
+
nodeEnv === 'development' ? 'dev' : nodeEnv;
|
|
89
|
+
const assistants = await getRecordByFilter(Config_ID, 'assistants', `status="${airtableStatus}"`);
|
|
94
90
|
return res.status(200).send({ message: 'List assistants' , assistants});
|
|
95
91
|
} catch (error) {
|
|
96
92
|
console.log(error);
|
|
@@ -498,7 +498,8 @@ class NexusMessaging {
|
|
|
498
498
|
if (response) {
|
|
499
499
|
await this.sendMessage({
|
|
500
500
|
code: from,
|
|
501
|
-
body: response
|
|
501
|
+
body: response,
|
|
502
|
+
processed: true
|
|
502
503
|
});
|
|
503
504
|
}
|
|
504
505
|
} catch (error) {
|
|
@@ -637,7 +638,7 @@ class NexusMessaging {
|
|
|
637
638
|
if (botResponse) {
|
|
638
639
|
await this.sendMessage({
|
|
639
640
|
code: chatId,
|
|
640
|
-
|
|
641
|
+
body: botResponse,
|
|
641
642
|
processed: true
|
|
642
643
|
});
|
|
643
644
|
}
|
|
@@ -7,6 +7,7 @@ const { convertPdfToImages } = require('./filesHelper.js');
|
|
|
7
7
|
const { analyzeImage } = require('../helpers/llmsHelper.js');
|
|
8
8
|
|
|
9
9
|
const { getRecordByFilter } = require('../services/airtableService.js');
|
|
10
|
+
const { createProvider } = require('../providers/createProvider');
|
|
10
11
|
|
|
11
12
|
const fs = require('fs');
|
|
12
13
|
const path = require('path');
|
|
@@ -17,11 +18,12 @@ const mode = process.env.NODE_ENV || 'dev';
|
|
|
17
18
|
|
|
18
19
|
async function checkRunStatus(assistant, thread_id, run_id, retryCount = 0, maxRetries = 30) {
|
|
19
20
|
try {
|
|
20
|
-
const provider =
|
|
21
|
-
const run = await provider.getRun({
|
|
21
|
+
const provider = createProvider({ variant: process.env.VARIANT || 'assistants' });
|
|
22
|
+
const run = await provider.getRun({ threadId: thread_id, runId: run_id });
|
|
22
23
|
console.log(`Status: ${run.status} ${thread_id} ${run_id} (attempt ${retryCount + 1})`);
|
|
23
24
|
|
|
24
|
-
|
|
25
|
+
const failedStatuses = ['failed', 'expired', 'incomplete', 'errored'];
|
|
26
|
+
if (failedStatuses.includes(run.status)) {
|
|
25
27
|
console.log(`Run failed. ${run.status} `);
|
|
26
28
|
console.log('Error:');
|
|
27
29
|
console.log(run);
|
|
@@ -37,7 +39,7 @@ async function checkRunStatus(assistant, thread_id, run_id, retryCount = 0, maxR
|
|
|
37
39
|
await assistant.handleRequiresAction(run);
|
|
38
40
|
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
39
41
|
return checkRunStatus(assistant, thread_id, run_id, retryCount + 1, maxRetries);
|
|
40
|
-
} else if (run.status
|
|
42
|
+
} else if (!['completed', 'succeeded'].includes(run.status)) {
|
|
41
43
|
if (retryCount >= maxRetries) {
|
|
42
44
|
return false;
|
|
43
45
|
}
|
|
@@ -206,14 +208,13 @@ async function downloadMediaAndCreateFile(code, reply) {
|
|
|
206
208
|
return fileNames;
|
|
207
209
|
}
|
|
208
210
|
|
|
209
|
-
async function processIndividualMessage(code, reply, thread) {
|
|
211
|
+
async function processIndividualMessage(code, reply, provider, thread) {
|
|
212
|
+
const tempFiles = [];
|
|
210
213
|
try {
|
|
211
|
-
const provider = llmConfig.requireOpenAIProvider();
|
|
212
214
|
const formattedMessage = formatMessage(reply);
|
|
213
215
|
console.log('[processIndividualMessage] formattedMessage:', formattedMessage);
|
|
214
216
|
const isNotAssistant = !reply.from_me;
|
|
215
217
|
let messagesChat = [];
|
|
216
|
-
let attachments = [];
|
|
217
218
|
let url = null;
|
|
218
219
|
|
|
219
220
|
if (formattedMessage) {
|
|
@@ -224,6 +225,7 @@ async function processIndividualMessage(code, reply, thread) {
|
|
|
224
225
|
if (reply.is_media) {
|
|
225
226
|
console.log('IS MEDIA', reply.is_media);
|
|
226
227
|
const fileNames = await downloadMediaAndCreateFile(code, reply);
|
|
228
|
+
tempFiles.push(...fileNames);
|
|
227
229
|
for (const fileName of fileNames) {
|
|
228
230
|
console.log(fileName);
|
|
229
231
|
// Skip WBMP images and stickers
|
|
@@ -255,13 +257,17 @@ async function processIndividualMessage(code, reply, thread) {
|
|
|
255
257
|
});
|
|
256
258
|
} else {
|
|
257
259
|
console.log('Add attachment');
|
|
258
|
-
const file = await provider.uploadFile({
|
|
260
|
+
/*const file = await provider.uploadFile({
|
|
259
261
|
file: fs.createReadStream(fileName),
|
|
260
262
|
purpose: 'vision',
|
|
261
263
|
});
|
|
262
264
|
messagesChat.push({
|
|
263
265
|
type: 'image_file',
|
|
264
266
|
image_file: { file_id: file.id },
|
|
267
|
+
});*/
|
|
268
|
+
messagesChat.push({
|
|
269
|
+
type: 'text',
|
|
270
|
+
text: imageAnalysis.description,
|
|
265
271
|
});
|
|
266
272
|
}
|
|
267
273
|
} else if (fileName.includes('audio')) {
|
|
@@ -271,7 +277,6 @@ async function processIndividualMessage(code, reply, thread) {
|
|
|
271
277
|
language: 'es'
|
|
272
278
|
});
|
|
273
279
|
const transcriptText = audioTranscript?.text || audioTranscript;
|
|
274
|
-
console.log('Inside AUDIO', transcriptText);
|
|
275
280
|
messagesChat.push({
|
|
276
281
|
type: 'text',
|
|
277
282
|
text: transcriptText,
|
|
@@ -280,30 +285,40 @@ async function processIndividualMessage(code, reply, thread) {
|
|
|
280
285
|
}
|
|
281
286
|
}
|
|
282
287
|
|
|
283
|
-
console.log('messagesChat', messagesChat);
|
|
284
|
-
console.log('[processIndividualMessage] attachments:', attachments);
|
|
285
|
-
console.log('[processIndividualMessage] formattedMessage:', formattedMessage);
|
|
286
|
-
|
|
288
|
+
console.log('[processIndividualMessage] messagesChat', messagesChat);
|
|
287
289
|
|
|
288
290
|
// ONLY add user messages to the thread
|
|
291
|
+
const threadId = process.env.VARIANT === 'responses' ? thread?.conversation_id : thread?.thread_id;
|
|
289
292
|
if (isNotAssistant) {
|
|
293
|
+
console.log('Adding user message to thread', thread);
|
|
290
294
|
await provider.addMessage({
|
|
291
|
-
|
|
295
|
+
threadId,
|
|
292
296
|
role: 'user',
|
|
293
|
-
content: messagesChat
|
|
294
|
-
attachments: attachments
|
|
297
|
+
content: messagesChat
|
|
295
298
|
});
|
|
296
299
|
console.log('[processIndividualMessage] User message added to thread');
|
|
297
300
|
}
|
|
298
301
|
|
|
299
302
|
await Message.updateOne(
|
|
300
303
|
{ message_id: reply.message_id, timestamp: reply.timestamp },
|
|
301
|
-
{ $set: { assistant_id: thread.assistant_id, thread_id:
|
|
304
|
+
{ $set: { assistant_id: thread.assistant_id, thread_id: threadId } }
|
|
302
305
|
);
|
|
303
306
|
|
|
304
307
|
return {isNotAssistant, url};
|
|
305
308
|
} catch (err) {
|
|
306
309
|
console.log(`Error inside process message ${err}`);
|
|
310
|
+
} finally {
|
|
311
|
+
if (tempFiles.length > 0) {
|
|
312
|
+
await Promise.all(tempFiles.map(async (filePath) => {
|
|
313
|
+
try {
|
|
314
|
+
await fs.promises.unlink(filePath);
|
|
315
|
+
} catch (error) {
|
|
316
|
+
if (error?.code !== 'ENOENT') {
|
|
317
|
+
console.warn('[processIndividualMessage] Failed to remove temp file:', filePath, error?.message || error);
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}));
|
|
321
|
+
}
|
|
307
322
|
}
|
|
308
323
|
}
|
|
309
324
|
|
|
@@ -42,6 +42,33 @@ async function analyzeImage(imagePath) {
|
|
|
42
42
|
const imageBuffer = await fs.promises.readFile(imagePath);
|
|
43
43
|
const base64Image = imageBuffer.toString('base64');
|
|
44
44
|
|
|
45
|
+
// Descrciption of the image
|
|
46
|
+
const imageDescription = 'Describe the image in detail.';
|
|
47
|
+
const messageDescription = await anthropicClient.messages.create({
|
|
48
|
+
model: 'claude-3-7-sonnet-20250219',
|
|
49
|
+
max_tokens: 1024,
|
|
50
|
+
messages: [
|
|
51
|
+
{
|
|
52
|
+
role: 'user',
|
|
53
|
+
content: [
|
|
54
|
+
{
|
|
55
|
+
type: 'image',
|
|
56
|
+
source: {
|
|
57
|
+
type: 'base64',
|
|
58
|
+
media_type: mimeType,
|
|
59
|
+
data: base64Image,
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
{
|
|
63
|
+
type: 'text',
|
|
64
|
+
text: imageDescription,
|
|
65
|
+
},
|
|
66
|
+
],
|
|
67
|
+
},
|
|
68
|
+
],
|
|
69
|
+
});
|
|
70
|
+
const description = messageDescription.content[0].text;
|
|
71
|
+
|
|
45
72
|
// Create a more specific prompt for table detection and extraction
|
|
46
73
|
const tablePrompt = `Please analyze this image and respond in the following format:
|
|
47
74
|
1. First, determine if there is a table in the image.
|
|
@@ -187,8 +214,8 @@ Ejemplo 1:
|
|
|
187
214
|
const isTable = (table === null) ? false : true;
|
|
188
215
|
const isRelevant = (messageRelevanceStr.includes('YES')) ? true : false;
|
|
189
216
|
|
|
190
|
-
return {
|
|
191
|
-
has_table: isTable, table_data: table};
|
|
217
|
+
return {description: description, medical_analysis: messageAnalysisStr,
|
|
218
|
+
medical_relevance: isRelevant, has_table: isTable, table_data: table};
|
|
192
219
|
} catch (error) {
|
|
193
220
|
console.error('Error analyzing image:', error);
|
|
194
221
|
throw error;
|
package/lib/index.js
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
const { NexusMessaging, setDefaultInstance } = require('./core/NexusMessaging');
|
|
2
2
|
const { MongoStorage } = require('./storage/MongoStorage');
|
|
3
3
|
const { MessageParser } = require('./utils/messageParser');
|
|
4
|
-
const {
|
|
4
|
+
const { createProvider } = require('./providers/createProvider');
|
|
5
|
+
const { OpenAIAssistantsProvider } = require('./providers/OpenAIAssistantsProvider');
|
|
6
|
+
const { OpenAIResponsesProvider } = require('./providers/OpenAIResponsesProvider');
|
|
5
7
|
const { loadNexusConfig } = require('./config/configLoader');
|
|
6
8
|
const templateController = require('./controllers/templateController');
|
|
7
9
|
const templateFlowController = require('./controllers/templateFlowController');
|
|
@@ -9,14 +11,13 @@ const interactive = require('./interactive');
|
|
|
9
11
|
const runtimeConfig = require('./config/runtimeConfig');
|
|
10
12
|
const llmConfigModule = require('./config/llmConfig');
|
|
11
13
|
const {
|
|
12
|
-
configureLLMProvider: configureAssistantsLLM,
|
|
13
14
|
registerAssistant,
|
|
14
15
|
overrideGetAssistantById,
|
|
15
16
|
configureAssistants: setAssistantsConfig
|
|
16
17
|
} = require('./services/assistantService');
|
|
17
18
|
const { TwilioProvider } = require('./adapters/TwilioProvider');
|
|
18
19
|
const { BaileysProvider } = require('./adapters/BaileysProvider');
|
|
19
|
-
const { BaseAssistant
|
|
20
|
+
const { BaseAssistant } = require('./assistants/BaseAssistant');
|
|
20
21
|
const {
|
|
21
22
|
setPreprocessingHandler,
|
|
22
23
|
hasPreprocessingHandler,
|
|
@@ -134,16 +135,14 @@ class Nexus {
|
|
|
134
135
|
|
|
135
136
|
// Initialize default LLM provider if requested
|
|
136
137
|
if (llm === 'openai') {
|
|
137
|
-
this.llmProvider = new DefaultLLMProvider(llmConfig);
|
|
138
138
|
try {
|
|
139
|
-
const providerInstance =
|
|
140
|
-
|
|
141
|
-
: null;
|
|
139
|
+
const providerInstance = createProvider(llmConfig);
|
|
140
|
+
this.llmProvider = providerInstance;
|
|
142
141
|
|
|
143
142
|
if (providerInstance && typeof llmConfigModule.setOpenAIProvider === 'function') {
|
|
144
143
|
llmConfigModule.setOpenAIProvider(providerInstance);
|
|
145
|
-
} else if (
|
|
146
|
-
llmConfigModule.setOpenAIClient(
|
|
144
|
+
} else if (providerInstance?.getClient && typeof llmConfigModule.setOpenAIClient === 'function') {
|
|
145
|
+
llmConfigModule.setOpenAIClient(providerInstance.getClient());
|
|
147
146
|
}
|
|
148
147
|
} catch (err) {
|
|
149
148
|
console.warn('[Nexus] Failed to expose OpenAI provider:', err?.message || err);
|
|
@@ -175,15 +174,7 @@ class Nexus {
|
|
|
175
174
|
const assistantsConfig = assistantsOpt || assistantOpt;
|
|
176
175
|
try {
|
|
177
176
|
if (this.llmProvider && typeof configureAssistantsLLM === 'function') {
|
|
178
|
-
|
|
179
|
-
? this.llmProvider.getProvider()
|
|
180
|
-
: null;
|
|
181
|
-
|
|
182
|
-
if (providerInstance) {
|
|
183
|
-
configureAssistantsLLM(providerInstance);
|
|
184
|
-
} else if (typeof this.llmProvider.getClient === 'function') {
|
|
185
|
-
configureAssistantsLLM(this.llmProvider.getClient());
|
|
186
|
-
}
|
|
177
|
+
configureAssistantsLLM(this.llmProvider);
|
|
187
178
|
}
|
|
188
179
|
if (assistantsConfig) {
|
|
189
180
|
if (assistantsConfig.registry && typeof assistantsConfig.registry === 'object') {
|
|
@@ -313,7 +304,7 @@ class Nexus {
|
|
|
313
304
|
|
|
314
305
|
/**
|
|
315
306
|
* Get LLM provider instance
|
|
316
|
-
* @returns {
|
|
307
|
+
* @returns {Object|null} LLM provider instance
|
|
317
308
|
*/
|
|
318
309
|
getLLMProvider() {
|
|
319
310
|
return this.llmProvider;
|
|
@@ -330,11 +321,11 @@ module.exports = {
|
|
|
330
321
|
BaileysProvider,
|
|
331
322
|
MongoStorage,
|
|
332
323
|
MessageParser,
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
324
|
+
createProvider,
|
|
325
|
+
OpenAIAssistantsProvider,
|
|
326
|
+
OpenAIResponsesProvider,
|
|
327
|
+
BaseAssistant,
|
|
336
328
|
registerAssistant,
|
|
337
|
-
configureAssistantsLLM,
|
|
338
329
|
overrideGetAssistantById,
|
|
339
330
|
configureAssistants: setAssistantsConfig,
|
|
340
331
|
setPreprocessingHandler,
|
|
@@ -47,7 +47,7 @@ const messageSchema = new mongoose.Schema({
|
|
|
47
47
|
}
|
|
48
48
|
}, { timestamps: true });
|
|
49
49
|
|
|
50
|
-
messageSchema.index({ message_id: 1,
|
|
50
|
+
messageSchema.index({ message_id: 1, timestamp: 1 }, { unique: true });
|
|
51
51
|
|
|
52
52
|
messageSchema.pre('save', function (next) {
|
|
53
53
|
if (this.timestamp) {
|
|
@@ -75,10 +75,11 @@ async function insertMessage(values) {
|
|
|
75
75
|
group_id: values.group_id,
|
|
76
76
|
reply_id: values.reply_id,
|
|
77
77
|
from_me: values.from_me,
|
|
78
|
-
processed: skipNumbers.includes(values.numero),
|
|
78
|
+
processed: values.processed || skipNumbers.includes(values.numero),
|
|
79
79
|
media: values.media ? values.media : null,
|
|
80
80
|
content_sid: values.content_sid || null
|
|
81
81
|
};
|
|
82
|
+
console.log('Inserting message', messageData);
|
|
82
83
|
|
|
83
84
|
await Message.findOneAndUpdate(
|
|
84
85
|
{ message_id: values.message_id, body: values.body },
|
|
@@ -113,7 +114,7 @@ function getMessageValues(message, content, reply, is_media) {
|
|
|
113
114
|
const group_id = is_group ? message.key.remoteJid : null;
|
|
114
115
|
const reply_id = reply || null;
|
|
115
116
|
const from_me = message.key.fromMe;
|
|
116
|
-
|
|
117
|
+
|
|
117
118
|
return {
|
|
118
119
|
nombre_whatsapp,
|
|
119
120
|
numero,
|
|
@@ -3,6 +3,8 @@ const mongoose = require('mongoose');
|
|
|
3
3
|
const threadSchema = new mongoose.Schema({
|
|
4
4
|
code: { type: String, required: true },
|
|
5
5
|
assistant_id: { type: String, required: true },
|
|
6
|
+
conversation_id: { type: String, required: true },
|
|
7
|
+
prompt_id: { type: String, default: null },
|
|
6
8
|
thread_id: { type: String, required: true },
|
|
7
9
|
patient_id: { type: String, default: null },
|
|
8
10
|
run_id: { type: String, default: null },
|
|
@@ -14,6 +16,7 @@ const threadSchema = new mongoose.Schema({
|
|
|
14
16
|
|
|
15
17
|
threadSchema.index({ code: 1, active: 1 });
|
|
16
18
|
threadSchema.index({ thread_id: 1 });
|
|
19
|
+
threadSchema.index({ conversation_id: 1 });
|
|
17
20
|
|
|
18
21
|
const Thread = mongoose.model('Thread', threadSchema);
|
|
19
22
|
|