@peopl-health/nexus 2.5.11 → 2.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/examples/basic-usage.js +3 -4
- package/lib/assistants/BaseAssistant.js +1 -16
- package/lib/config/airtableConfig.js +3 -0
- package/lib/controllers/conversationController.js +52 -42
- package/lib/core/NexusMessaging.js +66 -87
- package/lib/helpers/messageHelper.js +0 -26
- package/lib/models/messageModel.js +6 -0
- package/lib/providers/OpenAIResponsesProvider.js +85 -80
- package/lib/providers/OpenAIResponsesProviderTools.js +4 -1
- package/lib/providers/createProvider.js +11 -1
- package/lib/routes/index.js +1 -1
- package/lib/services/ConversationManager.js +43 -0
- package/lib/services/DefaultConversationManager.js +207 -0
- package/lib/services/airtableService.js +1 -1
- package/lib/services/assistantServiceCore.js +0 -23
- package/lib/services/conversationService.js +42 -10
- package/package.json +2 -2
|
@@ -3,9 +3,8 @@ const { Thread } = require('../models/threadModel');
|
|
|
3
3
|
const { retryWithBackoff } = require('../utils/retryHelper');
|
|
4
4
|
const {
|
|
5
5
|
handleFunctionCalls: handleFunctionCallsUtil,
|
|
6
|
-
handlePendingFunctionCalls: handlePendingFunctionCallsUtil,
|
|
7
|
-
transformToolsForResponsesAPI: transformToolsForResponsesAPIUtil
|
|
8
6
|
} = require('./OpenAIResponsesProviderTools');
|
|
7
|
+
const { DefaultConversationManager } = require('../services/DefaultConversationManager');
|
|
9
8
|
const { logger } = require('../utils/logger');
|
|
10
9
|
|
|
11
10
|
const CONVERSATION_PREFIX = 'conv_';
|
|
@@ -25,6 +24,7 @@ class OpenAIResponsesProvider {
|
|
|
25
24
|
organization,
|
|
26
25
|
client,
|
|
27
26
|
defaultModels = {},
|
|
27
|
+
conversationManager,
|
|
28
28
|
} = options;
|
|
29
29
|
|
|
30
30
|
if (!client && !apiKey) {
|
|
@@ -41,6 +41,7 @@ class OpenAIResponsesProvider {
|
|
|
41
41
|
};
|
|
42
42
|
|
|
43
43
|
this.variant = 'responses';
|
|
44
|
+
this.conversationManager = conversationManager || new DefaultConversationManager();
|
|
44
45
|
|
|
45
46
|
this.responses = this.client.responses;
|
|
46
47
|
this.conversations = this.client.conversations;
|
|
@@ -161,33 +162,6 @@ class OpenAIResponsesProvider {
|
|
|
161
162
|
return await this.client.conversations.items.list(id, { order, limit });
|
|
162
163
|
}
|
|
163
164
|
|
|
164
|
-
async cleanupOrphanedFunctionCalls(threadId, deleteAll = false) {
|
|
165
|
-
try {
|
|
166
|
-
const id = this._ensurethreadId(threadId);
|
|
167
|
-
const messages = await this.listMessages({ threadId: id, order: 'desc' });
|
|
168
|
-
const items = messages?.data || [];
|
|
169
|
-
|
|
170
|
-
if (items.length === 0) return;
|
|
171
|
-
|
|
172
|
-
if (deleteAll) {
|
|
173
|
-
logger.info(`[OpenAIResponsesProvider] Deleting all ${items.length} items from conversation`);
|
|
174
|
-
await Promise.all(items.map(item =>
|
|
175
|
-
this.conversations.items.delete(item.id, {conversation_id: id})
|
|
176
|
-
));
|
|
177
|
-
return;
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
const outputCallIds = new Set(items.filter(i => i.type === 'function_call_output').map(o => o.call_id));
|
|
181
|
-
const orphanedCalls = items.filter(i => i.type === 'function_call' && !outputCallIds.has(i.call_id));
|
|
182
|
-
|
|
183
|
-
if (orphanedCalls.length > 0) {
|
|
184
|
-
await Promise.all(orphanedCalls.map(call => this.conversations.items.delete(call.id, {conversation_id: id})));
|
|
185
|
-
}
|
|
186
|
-
} catch (error) {
|
|
187
|
-
logger.warn('[OpenAIResponsesProvider] Failed to cleanup conversation:', error?.message);
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
|
|
191
165
|
_normalizeThread(thread) {
|
|
192
166
|
return {
|
|
193
167
|
conversationId: thread.conversation_id || thread.getConversationId?.(),
|
|
@@ -200,107 +174,138 @@ class OpenAIResponsesProvider {
|
|
|
200
174
|
*/
|
|
201
175
|
async executeRun({ thread, assistant, tools = [], config = {} }) {
|
|
202
176
|
const { conversationId, assistantId } = this._normalizeThread(thread);
|
|
203
|
-
|
|
204
|
-
...config,
|
|
205
|
-
assistant,
|
|
206
|
-
toolMetadata: {
|
|
207
|
-
numero: thread.code,
|
|
208
|
-
assistant_id: assistantId
|
|
209
|
-
}
|
|
210
|
-
};
|
|
211
|
-
|
|
177
|
+
|
|
212
178
|
logger.info('[OpenAIResponsesProvider] Starting run', {
|
|
213
179
|
conversationId,
|
|
214
180
|
assistantId
|
|
215
181
|
});
|
|
216
182
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
183
|
+
try {
|
|
184
|
+
// Delegate context building to conversation manager
|
|
185
|
+
const context = await this.conversationManager.buildContext({
|
|
186
|
+
thread,
|
|
187
|
+
assistant,
|
|
188
|
+
config: {
|
|
189
|
+
...config,
|
|
190
|
+
threadId: conversationId,
|
|
191
|
+
assistantId,
|
|
192
|
+
toolMetadata: {
|
|
193
|
+
numero: thread.code,
|
|
194
|
+
assistant_id: assistantId
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
});
|
|
230
198
|
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
199
|
+
const filter = thread.code ? { code: thread.code, active: true } : null;
|
|
200
|
+
|
|
201
|
+
// Get clinical context for prompt variables
|
|
202
|
+
const clinicalData = await this.conversationManager.getClinicalData(thread.code);
|
|
203
|
+
const promptVariables = clinicalData ? {
|
|
204
|
+
clinical_context: clinicalData.clinicalContext || '',
|
|
205
|
+
last_symptoms: clinicalData.lastSymptoms || ''
|
|
206
|
+
} : null;
|
|
207
|
+
|
|
208
|
+
// Execute with built context
|
|
209
|
+
const result = await this.runConversation({
|
|
210
|
+
threadId: conversationId,
|
|
211
|
+
assistantId,
|
|
212
|
+
tools,
|
|
213
|
+
context,
|
|
214
|
+
promptVariables,
|
|
215
|
+
assistant,
|
|
216
|
+
...config
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
// Delegate response processing to conversation manager
|
|
220
|
+
await this.conversationManager.processResponse(result, thread, config);
|
|
221
|
+
|
|
222
|
+
const completed = result.status === 'completed';
|
|
223
|
+
const output = await this.getRunText({
|
|
224
|
+
runId: result.id,
|
|
225
|
+
fallback: ''
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
if (filter) {
|
|
229
|
+
await Thread.updateOne(filter, { $set: { run_id: null } });
|
|
230
|
+
}
|
|
234
231
|
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
232
|
+
logger.info('[OpenAIResponsesProvider] Run complete', {
|
|
233
|
+
runId: result.id,
|
|
234
|
+
completed,
|
|
235
|
+
toolsExecuted: result.tools_executed?.length || 0
|
|
236
|
+
});
|
|
240
237
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
238
|
+
return {
|
|
239
|
+
run: result,
|
|
240
|
+
completed,
|
|
241
|
+
output,
|
|
242
|
+
tools_executed: result.tools_executed || [],
|
|
243
|
+
retries: result.retries || 0
|
|
244
|
+
};
|
|
245
|
+
|
|
246
|
+
} catch (error) {
|
|
247
|
+
logger.error('[OpenAIResponsesProvider] Execute run failed', {
|
|
248
|
+
conversationId,
|
|
249
|
+
assistantId,
|
|
250
|
+
error: error.message
|
|
251
|
+
});
|
|
252
|
+
throw error;
|
|
253
|
+
}
|
|
248
254
|
}
|
|
249
255
|
|
|
250
256
|
async runConversation({
|
|
251
257
|
threadId,
|
|
252
258
|
assistantId,
|
|
253
259
|
additionalMessages = [],
|
|
260
|
+
context = null,
|
|
254
261
|
instructions = null,
|
|
255
262
|
additionalInstructions = null,
|
|
256
|
-
toolOutputs = [],
|
|
257
263
|
metadata = {},
|
|
258
264
|
topP,
|
|
259
265
|
temperature,
|
|
260
266
|
maxOutputTokens,
|
|
261
267
|
truncationStrategy = 'auto',
|
|
262
|
-
tools = [],
|
|
263
268
|
model,
|
|
264
269
|
assistant,
|
|
265
270
|
toolMetadata,
|
|
271
|
+
promptVariables = null,
|
|
266
272
|
} = {}) {
|
|
267
273
|
try {
|
|
268
274
|
const id = this._ensurethreadId(threadId);
|
|
269
|
-
|
|
275
|
+
|
|
276
|
+
let input = context || this._convertItemsToApiFormat(additionalMessages);
|
|
270
277
|
let allToolsExecuted = [];
|
|
271
278
|
let totalRetries = 0;
|
|
272
279
|
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
}
|
|
280
|
+
input = input.filter(item => item.type !== 'function_call' && item.type !== 'function_call_output');
|
|
281
|
+
|
|
282
|
+
const promptConfig = promptVariables
|
|
283
|
+
? { id: assistantId, variables: promptVariables }
|
|
284
|
+
: { id: assistantId };
|
|
285
|
+
logger.info('[OpenAIResponsesProvider] Prompt config', { promptConfig });
|
|
280
286
|
|
|
281
|
-
input = [...input, ...toolOutputs];
|
|
282
287
|
const makeAPICall = (inputData) => retryWithBackoff(() =>
|
|
283
288
|
this.client.responses.create({
|
|
284
|
-
|
|
285
|
-
prompt: { id: assistantId },
|
|
289
|
+
prompt: promptConfig,
|
|
286
290
|
model: model || this.defaults.responseModel,
|
|
287
291
|
instructions: additionalInstructions || instructions,
|
|
288
292
|
input: inputData,
|
|
289
293
|
metadata, top_p: topP, temperature, max_output_tokens: maxOutputTokens,
|
|
290
294
|
truncation: truncationStrategy,
|
|
291
|
-
tools: transformToolsForResponsesAPIUtil(this.variant, tools),
|
|
292
295
|
}), { providerName: PROVIDER_NAME });
|
|
293
296
|
|
|
294
297
|
const { result: response, retries } = await makeAPICall(input);
|
|
295
298
|
totalRetries += retries;
|
|
296
299
|
let finalResponse = response;
|
|
297
300
|
|
|
301
|
+
// Handle function calls following OpenAI pattern
|
|
298
302
|
if (assistant && response.output) {
|
|
299
303
|
const functionCalls = response.output.filter(item => item.type === 'function_call');
|
|
300
304
|
|
|
301
305
|
if (functionCalls.length > 0) {
|
|
302
306
|
const { outputs, toolsExecuted } = await handleFunctionCallsUtil(functionCalls, assistant, toolMetadata || { thread_id: id, assistant_id: assistantId });
|
|
303
307
|
|
|
308
|
+
input.push(...response.output);
|
|
304
309
|
input.push(...outputs);
|
|
305
310
|
allToolsExecuted.push(...toolsExecuted);
|
|
306
311
|
|
|
@@ -5,13 +5,16 @@ async function executeFunctionCall(assistant, call, metadata = {}) {
|
|
|
5
5
|
const name = call.name;
|
|
6
6
|
const args = call.arguments ? JSON.parse(call.arguments) : {};
|
|
7
7
|
|
|
8
|
+
logger.info('[executeFunctionCall] Calling tool', { name, args, call_id: call.call_id });
|
|
9
|
+
|
|
8
10
|
let result, success = true;
|
|
9
11
|
try {
|
|
10
12
|
result = await assistant.executeTool(name, args);
|
|
13
|
+
logger.info('[executeFunctionCall] Tool completed', { name, call_id: call.call_id, duration_ms: Date.now() - startTime });
|
|
11
14
|
} catch (error) {
|
|
12
15
|
result = { error: error?.message || 'Tool execution failed' };
|
|
13
16
|
success = false;
|
|
14
|
-
logger.error('[
|
|
17
|
+
logger.error('[executeFunctionCall] Tool execution failed', { name, call_id: call.call_id, error: error?.message });
|
|
15
18
|
}
|
|
16
19
|
|
|
17
20
|
const toolData = {
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
const { OpenAIAssistantsProvider } = require('./OpenAIAssistantsProvider');
|
|
2
2
|
const { OpenAIResponsesProvider } = require('./OpenAIResponsesProvider');
|
|
3
|
+
const { DefaultConversationManager } = require('../services/DefaultConversationManager');
|
|
3
4
|
const { logger } = require('../utils/logger');
|
|
4
5
|
|
|
5
6
|
const PROVIDER_VARIANTS = {
|
|
@@ -16,8 +17,17 @@ function createProvider(config = {}) {
|
|
|
16
17
|
.toString()
|
|
17
18
|
.toLowerCase();
|
|
18
19
|
|
|
20
|
+
// Create conversation manager if not provided
|
|
21
|
+
const conversationManager = config.conversationManager || new DefaultConversationManager({
|
|
22
|
+
memorySystem: config.memorySystem
|
|
23
|
+
});
|
|
24
|
+
|
|
19
25
|
const ProviderClass = PROVIDER_VARIANTS[variant] || OpenAIAssistantsProvider;
|
|
20
|
-
return new ProviderClass({
|
|
26
|
+
return new ProviderClass({
|
|
27
|
+
...config,
|
|
28
|
+
variant,
|
|
29
|
+
conversationManager
|
|
30
|
+
});
|
|
21
31
|
}
|
|
22
32
|
|
|
23
33
|
module.exports = {
|
package/lib/routes/index.js
CHANGED
|
@@ -68,8 +68,8 @@ const templateRouteDefinitions = {
|
|
|
68
68
|
};
|
|
69
69
|
|
|
70
70
|
const threadRouteDefinitions = {
|
|
71
|
-
'PUT /review/:code': 'updateThreadReviewStatus',
|
|
72
71
|
'PUT /review/all': 'updateAllThreadsReviewStatus',
|
|
72
|
+
'PUT /review/:code': 'updateThreadReviewStatus',
|
|
73
73
|
'GET /review': 'getThreadsByReviewStatus'
|
|
74
74
|
};
|
|
75
75
|
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
const { logger } = require('../utils/logger');
|
|
2
|
+
|
|
3
|
+
class ConversationManager {
|
|
4
|
+
constructor(options = {}) {
|
|
5
|
+
this.memorySystem = options.memorySystem || null;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
async buildContext({ thread, assistant, config = {} }) {
|
|
9
|
+
throw new Error('buildContext must be implemented by subclass');
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
async processResponse(response, thread, config = {}) {
|
|
13
|
+
throw new Error('processResponse must be implemented by subclass');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
setMemorySystem(memorySystem) {
|
|
17
|
+
this.memorySystem = memorySystem;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
getMemorySystem() {
|
|
21
|
+
return this.memorySystem;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
optimizeContextWindow(messages, maxTokens = 8000) {
|
|
25
|
+
if (!Array.isArray(messages) || messages.length === 0) return [];
|
|
26
|
+
const systemMessages = messages.filter(m => m.role === 'system');
|
|
27
|
+
const recentMessages = messages.filter(m => m.role !== 'system').slice(-20);
|
|
28
|
+
return [...systemMessages, ...recentMessages];
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
_convertToApiFormat(messages) {
|
|
32
|
+
return messages.map(msg => ({
|
|
33
|
+
role: msg.role || 'user',
|
|
34
|
+
content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)
|
|
35
|
+
}));
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
_logActivity(action, metadata = {}) {
|
|
39
|
+
logger.info(`[ConversationManager] ${action}`, metadata);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
module.exports = { ConversationManager };
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
const { ConversationManager } = require('./ConversationManager');
|
|
2
|
+
const { getLastNMessages } = require('../helpers/messageHelper');
|
|
3
|
+
const { handlePendingFunctionCalls: handlePendingFunctionCallsUtil } = require('../providers/OpenAIResponsesProviderTools');
|
|
4
|
+
const { getRecordByFilter } = require('./airtableService');
|
|
5
|
+
const { Follow_Up_ID } = require('../config/airtableConfig');
|
|
6
|
+
const { logger } = require('../utils/logger');
|
|
7
|
+
|
|
8
|
+
class DefaultConversationManager extends ConversationManager {
|
|
9
|
+
constructor(options = {}) {
|
|
10
|
+
super(options);
|
|
11
|
+
this.maxHistoricalMessages = parseInt(process.env.MAX_HISTORICAL_MESSAGES || '50', 10);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async buildContext({ thread, assistant, config = {} }) {
|
|
15
|
+
this._logActivity('Building context', { threadCode: thread.code });
|
|
16
|
+
|
|
17
|
+
try {
|
|
18
|
+
const allMessages = await getLastNMessages(thread.code, this.maxHistoricalMessages);
|
|
19
|
+
const additionalMessages = config.additionalMessages || [];
|
|
20
|
+
|
|
21
|
+
// New conversation - no history yet
|
|
22
|
+
if (!allMessages?.length) {
|
|
23
|
+
return additionalMessages;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const messageContext = allMessages.reverse().map(msg => ({
|
|
27
|
+
role: msg.origin === 'patient' ? 'user' : 'assistant',
|
|
28
|
+
content: msg.body || msg.content || ''
|
|
29
|
+
}));
|
|
30
|
+
|
|
31
|
+
return [...additionalMessages, ...messageContext];
|
|
32
|
+
} catch (error) {
|
|
33
|
+
logger.error('[DefaultConversationManager] Context building failed', {
|
|
34
|
+
threadCode: thread.code,
|
|
35
|
+
error: error.message
|
|
36
|
+
});
|
|
37
|
+
throw error;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async processResponse(response, thread, config = {}) {
|
|
42
|
+
this._logActivity('Processing response', { threadCode: thread.code, responseId: response.id });
|
|
43
|
+
|
|
44
|
+
try {
|
|
45
|
+
if (this.memorySystem && response.output) {
|
|
46
|
+
await this.memorySystem.processResponse(response, thread);
|
|
47
|
+
}
|
|
48
|
+
this._logActivity('Response processed successfully', {
|
|
49
|
+
threadCode: thread.code,
|
|
50
|
+
responseId: response.id
|
|
51
|
+
});
|
|
52
|
+
} catch (error) {
|
|
53
|
+
logger.error('[DefaultConversationManager] Response processing failed', {
|
|
54
|
+
threadCode: thread.code,
|
|
55
|
+
error: error.message
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async handlePendingFunctionCalls(assistant, conversationMessages, toolMetadata) {
|
|
61
|
+
if (!assistant || !conversationMessages?.length) return { outputs: [], toolsExecuted: [] };
|
|
62
|
+
|
|
63
|
+
try {
|
|
64
|
+
return await handlePendingFunctionCallsUtil(assistant, conversationMessages, toolMetadata);
|
|
65
|
+
} catch (error) {
|
|
66
|
+
logger.error('[DefaultConversationManager] Function call handling failed', { error: error.message });
|
|
67
|
+
return { outputs: [], toolsExecuted: [] };
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
optimizeContextWindow(messages) {
|
|
72
|
+
if (!Array.isArray(messages) || !messages.length) return [];
|
|
73
|
+
|
|
74
|
+
const cappedMessages = messages.length > this.maxHistoricalMessages
|
|
75
|
+
? messages.slice(-this.maxHistoricalMessages)
|
|
76
|
+
: messages;
|
|
77
|
+
|
|
78
|
+
if (messages.length > this.maxHistoricalMessages) {
|
|
79
|
+
this._logActivity('Context capped', { originalCount: messages.length, cappedCount: this.maxHistoricalMessages });
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return this._convertToApiFormat(cappedMessages);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
_convertToApiFormat(items) {
|
|
86
|
+
return items.map(item => ({
|
|
87
|
+
role: item.role || 'user',
|
|
88
|
+
content: this._normalizeContent(item.content),
|
|
89
|
+
type: item.type || 'message',
|
|
90
|
+
}));
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
_normalizeContent(content) {
|
|
94
|
+
if (typeof content === 'string') return content;
|
|
95
|
+
if (Array.isArray(content)) return content;
|
|
96
|
+
if (content?.text) return content.text;
|
|
97
|
+
if (content?.type === 'text' && content.text) return content.text;
|
|
98
|
+
return content && typeof content === 'object' ? JSON.stringify(content) : content || '';
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async _getClinicalContext(whatsappId) {
|
|
102
|
+
try {
|
|
103
|
+
const [clinicalRecords, symptomsRecords] = await Promise.all([
|
|
104
|
+
getRecordByFilter(Follow_Up_ID, 'estado_general', `{whatsapp_id}='${whatsappId}'`),
|
|
105
|
+
getRecordByFilter(Follow_Up_ID, 'all_triages_last_month', `{whatsapp_id}='${whatsappId}'`)
|
|
106
|
+
]);
|
|
107
|
+
|
|
108
|
+
let clinicalContext = null;
|
|
109
|
+
if (clinicalRecords && clinicalRecords.length > 0 && clinicalRecords[0]['clinical-context-json']) {
|
|
110
|
+
clinicalContext = JSON.parse(clinicalRecords[0]['clinical-context-json']);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
const sortedSymptoms = symptomsRecords
|
|
114
|
+
?.filter(r => r.Date)
|
|
115
|
+
.sort((a, b) => new Date(b.Date) - new Date(a.Date))
|
|
116
|
+
.slice(0, 3) || [];
|
|
117
|
+
|
|
118
|
+
const symptoms = this._parseSymptoms(sortedSymptoms);
|
|
119
|
+
|
|
120
|
+
return { ...clinicalContext, symptoms };
|
|
121
|
+
} catch (error) {
|
|
122
|
+
logger.error('[DefaultConversationManager] Error fetching clinical context', { error: error.message });
|
|
123
|
+
return null;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
_parseSymptoms(records) {
|
|
128
|
+
if (!records || records.length === 0) return [];
|
|
129
|
+
|
|
130
|
+
return records.map(record => {
|
|
131
|
+
const symptomNames = record['symptom_name (from symptoms)'];
|
|
132
|
+
const grades = record['grade_num (from symptoms)'];
|
|
133
|
+
const recommendations = record['short_recommendations'];
|
|
134
|
+
const date = record['Date'];
|
|
135
|
+
|
|
136
|
+
if (!symptomNames || !grades) return null;
|
|
137
|
+
|
|
138
|
+
const nameArray = typeof symptomNames === 'string'
|
|
139
|
+
? symptomNames.split(',').map(s => s.trim())
|
|
140
|
+
: Array.isArray(symptomNames) ? symptomNames : [symptomNames];
|
|
141
|
+
|
|
142
|
+
const gradeArray = typeof grades === 'string'
|
|
143
|
+
? grades.split(',').map(g => parseInt(g.trim()))
|
|
144
|
+
: Array.isArray(grades) ? grades.map(g => parseInt(g)) : [parseInt(grades)];
|
|
145
|
+
|
|
146
|
+
const recArray = recommendations && typeof recommendations === 'string'
|
|
147
|
+
? recommendations.split('.,').map(r => r.trim())
|
|
148
|
+
: [];
|
|
149
|
+
|
|
150
|
+
const symptoms = nameArray
|
|
151
|
+
.map((name, idx) => ({
|
|
152
|
+
name,
|
|
153
|
+
grade: gradeArray[idx] || 0,
|
|
154
|
+
recommendation: recArray[idx] || ''
|
|
155
|
+
}))
|
|
156
|
+
.filter(s => s.grade > 0);
|
|
157
|
+
|
|
158
|
+
return {
|
|
159
|
+
date,
|
|
160
|
+
symptoms
|
|
161
|
+
};
|
|
162
|
+
}).filter(Boolean);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
async getClinicalData(whatsappId) {
|
|
166
|
+
try {
|
|
167
|
+
const data = await this._getClinicalContext(whatsappId);
|
|
168
|
+
if (!data) return null;
|
|
169
|
+
|
|
170
|
+
// Format clinical context (demographics + clinical data)
|
|
171
|
+
const contextParts = [];
|
|
172
|
+
if (data.demografia) {
|
|
173
|
+
const d = data.demografia;
|
|
174
|
+
if (d.edad) contextParts.push(`Edad: ${d.edad}`);
|
|
175
|
+
if (d.sexo) contextParts.push(`Sexo: ${d.sexo}`);
|
|
176
|
+
}
|
|
177
|
+
if (data.datos_clinicos) {
|
|
178
|
+
const c = data.datos_clinicos;
|
|
179
|
+
if (c.diagnostico) contextParts.push(`Diagnóstico: ${c.diagnostico}`);
|
|
180
|
+
if (c.tratamiento_actual) contextParts.push(`Tratamiento actual: ${c.tratamiento_actual}`);
|
|
181
|
+
if (c.medicacion_actual) contextParts.push(`Medicación: ${c.medicacion_actual}`);
|
|
182
|
+
if (c.alergias_medicamentos) contextParts.push(`Alergias: ${c.alergias_medicamentos}`);
|
|
183
|
+
if (c.comorbilidades) contextParts.push(`Comorbilidades: ${c.comorbilidades}`);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Format last symptoms
|
|
187
|
+
const symptomParts = [];
|
|
188
|
+
if (data.symptoms?.length > 0) {
|
|
189
|
+
data.symptoms.forEach(triage => {
|
|
190
|
+
const dateStr = new Date(triage.date).toLocaleDateString('es-PE');
|
|
191
|
+
const symptomsStr = triage.symptoms.map(s => `${s.name} (grado ${s.grade})`).join(', ');
|
|
192
|
+
if (symptomsStr) symptomParts.push(`${dateStr}: ${symptomsStr}`);
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return {
|
|
197
|
+
clinicalContext: contextParts.join('. ') || '',
|
|
198
|
+
lastSymptoms: symptomParts.join('; ') || 'Sin síntomas reportados recientemente'
|
|
199
|
+
};
|
|
200
|
+
} catch (error) {
|
|
201
|
+
logger.error('[DefaultConversationManager] Error getting clinical data', { error: error.message });
|
|
202
|
+
return null;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
module.exports = { DefaultConversationManager };
|
|
@@ -52,7 +52,7 @@ async function getRecordByFilter(baseID, tableName, filter, view = 'Grid view')
|
|
|
52
52
|
});
|
|
53
53
|
return records;
|
|
54
54
|
} catch (error) {
|
|
55
|
-
logger.error(`Error fetching records by ${filter}:`, error);
|
|
55
|
+
logger.error(`Error fetching records from ${tableName} by ${filter}:`, error);
|
|
56
56
|
}
|
|
57
57
|
}
|
|
58
58
|
|
|
@@ -41,15 +41,6 @@ const createAssistantCore = async (code, assistant_id, messages = [], force = fa
|
|
|
41
41
|
try {
|
|
42
42
|
const assistant = getAssistantById(assistant_id, null);
|
|
43
43
|
const initialThread = await assistant.create(code, curRow[0]);
|
|
44
|
-
|
|
45
|
-
const provider = createProvider({ variant: process.env.VARIANT || 'assistants' });
|
|
46
|
-
for (const message of messages) {
|
|
47
|
-
await provider.addMessage({
|
|
48
|
-
threadId: initialThread.id,
|
|
49
|
-
role: 'assistant',
|
|
50
|
-
content: message
|
|
51
|
-
});
|
|
52
|
-
}
|
|
53
44
|
|
|
54
45
|
const thread = {
|
|
55
46
|
code: code,
|
|
@@ -76,17 +67,9 @@ const addMsgAssistantCore = async (code, inMessages, role = 'system', reply = fa
|
|
|
76
67
|
const thread = await getThread(code);
|
|
77
68
|
if (!thread) return null;
|
|
78
69
|
|
|
79
|
-
const provider = createProvider({ variant: process.env.VARIANT || 'assistants' });
|
|
80
|
-
const threadId = thread.getConversationId();
|
|
81
|
-
|
|
82
70
|
try {
|
|
83
71
|
const messages = Array.isArray(inMessages) ? inMessages : [inMessages];
|
|
84
72
|
|
|
85
|
-
await provider.addMessage({
|
|
86
|
-
threadId,
|
|
87
|
-
messages: messages.map(message => ({ role, content: message }))
|
|
88
|
-
});
|
|
89
|
-
|
|
90
73
|
// Save system messages to database for frontend visibility
|
|
91
74
|
if (!skipSystemMessage) {
|
|
92
75
|
for (let i = 0; i < messages.length; i++) {
|
|
@@ -205,12 +188,6 @@ const replyAssistantCore = async (code, message_ = null, thread_ = null, runOpti
|
|
|
205
188
|
const urls = processResults.filter(r => r.url).map(r => ({ url: r.url }));
|
|
206
189
|
const allMessagesToAdd = processResults.flatMap(r => r.messages || []);
|
|
207
190
|
const allTempFiles = processResults.flatMap(r => r.tempFiles || []);
|
|
208
|
-
|
|
209
|
-
if (allMessagesToAdd.length > 0) {
|
|
210
|
-
logger.info(`[replyAssistantCore] Adding ${allMessagesToAdd.length} messages to thread in batch`);
|
|
211
|
-
const threadId = finalThread.getConversationId();
|
|
212
|
-
await provider.addMessage({ threadId, messages: allMessagesToAdd });
|
|
213
|
-
}
|
|
214
191
|
|
|
215
192
|
await Promise.all(processResults.map(r => updateMessageRecord(r.reply, finalThread)));
|
|
216
193
|
await cleanupFiles(allTempFiles);
|