@peopl-health/nexus 2.4.11 → 2.4.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -118,7 +118,7 @@ class TwilioProvider extends MessageProvider {
118
118
  provider: 'twilio',
119
119
  timestamp: new Date(),
120
120
  fromMe: true,
121
- processed: false
121
+ processed: messageData.processed !== undefined ? messageData.processed : false
122
122
  });
123
123
  logger.info('[TwilioProvider] Message persisted successfully', { messageId: result.sid });
124
124
  } catch (storageError) {
@@ -29,7 +29,7 @@ const addInsAssistantController = async (req, res) => {
29
29
  const role = variant === 'responses' ? 'developer' : 'user';
30
30
 
31
31
  const ans = await addInsAssistant(code, instruction, role);
32
- if (ans) await sendMessage({code, body: ans, fileType: 'text'});
32
+ if (ans) await sendMessage({code, body: ans, fileType: 'text', origin: 'assistant'});
33
33
  return res.status(200).send({ message: 'Add instruction to the assistant' });
34
34
  } catch (error) {
35
35
  logger.error(error);
@@ -42,7 +42,7 @@ const addMsgAssistantController = async (req, res) => {
42
42
 
43
43
  try {
44
44
  const ans = await addMsgAssistant(code, messages, role, reply);
45
- if (ans) await sendMessage({code, body: ans, fileType: 'text'});
45
+ if (ans) await sendMessage({code, body: ans, fileType: 'text', origin: 'assistant'});
46
46
  return res.status(200).send({ message: 'Add message to the assistant' });
47
47
  } catch (error) {
48
48
  logger.error(error);
@@ -232,7 +232,8 @@ const getConversationReplyController = async (req, res) => {
232
232
 
233
233
  const messageData = {
234
234
  code: formattedPhoneNumber,
235
- fileType: 'text'
235
+ fileType: 'text',
236
+ _fromConversationReply: true
236
237
  };
237
238
 
238
239
  // Handle template message (contentSid provided)
@@ -279,6 +279,10 @@ class NexusMessaging {
279
279
  throw new Error('No provider initialized');
280
280
  }
281
281
 
282
+ if (messageData._fromConversationReply && messageData.code && (messageData.body || messageData.message || messageData.contentSid)) {
283
+ messageData.processed = true;
284
+ }
285
+
282
286
  const result = await this.provider.sendMessage(messageData);
283
287
 
284
288
  // Store message only if provider does not handle persistence itself
@@ -297,14 +301,24 @@ class NexusMessaging {
297
301
  });
298
302
  }
299
303
 
300
- // Add to thread context for manual sends
301
- if (messageData.origin !== 'assistant' && messageData.code &&
302
- (messageData.body || messageData.message)) {
304
+ // Add to thread context for manual sends (text messages and templates)
305
+ let messageContent = messageData.body || messageData.message;
306
+ if (!messageContent && messageData.contentSid && typeof this.provider.renderTemplate === 'function') {
307
+ try {
308
+ messageContent = await this.provider.renderTemplate(messageData.contentSid, messageData.variables);
309
+ } catch (err) {
310
+ logger.warn(`[NexusMessaging] Failed to render template for thread: ${err.message}`);
311
+ }
312
+ }
313
+
314
+ if (messageData.origin !== 'assistant' && messageData.code && messageContent) {
315
+ const skipSystemMessage = messageData._fromConversationReply === true;
303
316
  await addMsgAssistant(
304
317
  messageData.code,
305
- [messageData.body || messageData.message],
318
+ [messageContent],
306
319
  'assistant',
307
- false
320
+ false,
321
+ skipSystemMessage
308
322
  );
309
323
  }
310
324
 
@@ -4,6 +4,7 @@ const { analyzeImage } = require('./llmsHelper.js');
4
4
  const { cleanupFiles, downloadMediaAndCreateFile } = require('./filesHelper.js');
5
5
  const { formatMessage } = require('./messageHelper.js');
6
6
  const { sanitizeLogMetadata } = require('../utils/sanitizer.js');
7
+ const { withTracing } = require('../utils/tracingDecorator.js');
7
8
 
8
9
  /**
9
10
  * Structured logging with PHI protection
@@ -56,35 +57,41 @@ const processTextMessage = (reply) => {
56
57
  return messagesChat;
57
58
  };
58
59
 
59
- const processImageFile = async (fileName, reply) => {
60
+ const processImageFileCore = async (fileName, reply) => {
60
61
  let imageAnalysis = null;
61
62
  let url = null;
62
63
  const messagesChat = [];
64
+ const timings = {
65
+ analysis_ms: 0,
66
+ url_generation_ms: 0
67
+ };
63
68
 
64
69
  const isSticker = reply.media?.mediaType === 'sticker' ||
65
70
  fileName.toLowerCase().includes('sticker/') ||
66
71
  fileName.toLowerCase().includes('/sticker/');
67
72
 
68
73
  try {
69
- imageAnalysis = await analyzeImage(fileName, isSticker, reply.media?.contentType);
70
-
71
- logger.info('processImageFile', {
72
- message_id: reply.message_id,
73
- bucketName: reply.media?.bucketName,
74
- key: reply.media?.key,
75
- is_sticker: isSticker,
76
- medical_relevance: imageAnalysis?.medical_relevance,
77
- has_table: imageAnalysis?.has_table,
78
- analysis_type: imageAnalysis?.medical_analysis ? 'medical' : 'general'
79
- });
80
-
81
- logger.debug('processImageFile_analysis', { imageAnalysis });
74
+ const { result: analysis, duration: analysisDuration } = await withTracing(
75
+ analyzeImage,
76
+ 'analyze_image',
77
+ () => ({ 'image.is_sticker': isSticker, 'image.message_id': reply.message_id }),
78
+ { returnTiming: true }
79
+ )(fileName, isSticker, reply.media?.contentType);
80
+ imageAnalysis = analysis;
81
+ timings.analysis_ms = analysisDuration;
82
82
 
83
83
  const invalidAnalysis = ['NOT_MEDICAL', 'QUALITY_INSUFFICIENT'];
84
84
 
85
85
  // Generate presigned URL only if medically relevant AND not a sticker
86
86
  if (imageAnalysis?.medical_relevance && !isSticker) {
87
- url = await generatePresignedUrl(reply.media.bucketName, reply.media.key);
87
+ const { result: presignedUrl, duration: urlDuration } = await withTracing(
88
+ generatePresignedUrl,
89
+ 'generate_presigned_url',
90
+ () => ({ 'url.bucket': reply.media.bucketName }),
91
+ { returnTiming: true }
92
+ )(reply.media.bucketName, reply.media.key);
93
+ url = presignedUrl;
94
+ timings.url_generation_ms = urlDuration;
88
95
  }
89
96
 
90
97
  // Add appropriate text based on analysis
@@ -104,6 +111,18 @@ const processImageFile = async (fileName, reply) => {
104
111
  text: imageAnalysis?.description || 'Image processed',
105
112
  });
106
113
  }
114
+
115
+ logger.info('processImageFile', {
116
+ message_id: reply.message_id,
117
+ is_sticker: isSticker,
118
+ medical_relevance: imageAnalysis?.medical_relevance,
119
+ has_table: imageAnalysis?.has_table,
120
+ analysis_type: imageAnalysis?.medical_analysis ? 'medical' : 'general',
121
+ ...timings
122
+ });
123
+
124
+ logger.debug('processImageFile_analysis', { imageAnalysis });
125
+
107
126
  } catch (error) {
108
127
  logger.error('processImageFile', error, {
109
128
  message_id: reply.message_id,
@@ -116,18 +135,36 @@ const processImageFile = async (fileName, reply) => {
116
135
  });
117
136
  }
118
137
 
119
- return { messagesChat, url };
138
+ return { messagesChat, url, timings };
120
139
  };
121
140
 
122
- const processAudioFile = async (fileName, provider) => {
141
+ const processImageFile = withTracing(
142
+ processImageFileCore,
143
+ 'process_image_file',
144
+ (fileName, reply) => ({
145
+ 'image.message_id': reply.message_id,
146
+ 'image.has_media': !!reply.media
147
+ })
148
+ );
149
+
150
+ const processAudioFileCore = async (fileName, provider) => {
123
151
  const messagesChat = [];
152
+ const timings = {
153
+ transcribe_ms: 0
154
+ };
124
155
 
125
156
  try {
126
- const audioTranscript = await provider.transcribeAudio({
127
- file: fs.createReadStream(fileName),
128
- responseFormat: 'text',
129
- language: 'es'
130
- });
157
+ const { result: audioTranscript, duration: transcribeDuration } = await withTracing(
158
+ async () => provider.transcribeAudio({
159
+ file: fs.createReadStream(fileName),
160
+ responseFormat: 'text',
161
+ language: 'es'
162
+ }),
163
+ 'transcribe_audio',
164
+ () => ({ 'audio.file_name': fileName ? fileName.split('/').pop().replace(/^[^-]+-[^-]+-/, 'xxx-xxx-') : 'unknown' }),
165
+ { returnTiming: true }
166
+ )();
167
+ timings.transcribe_ms = transcribeDuration;
131
168
 
132
169
  const transcriptText = audioTranscript?.text || audioTranscript;
133
170
  messagesChat.push({
@@ -138,7 +175,8 @@ const processAudioFile = async (fileName, provider) => {
138
175
  logger.info('processAudioFile', {
139
176
  fileName: fileName ? fileName.split('/').pop().replace(/^[^-]+-[^-]+-/, 'xxx-xxx-') : 'unknown',
140
177
  transcription_success: true,
141
- transcript_length: transcriptText?.length || 0
178
+ transcript_length: transcriptText?.length || 0,
179
+ ...timings
142
180
  });
143
181
 
144
182
  logger.debug('processAudioFile_transcript', { transcriptText });
@@ -153,34 +191,44 @@ const processAudioFile = async (fileName, provider) => {
153
191
  });
154
192
  }
155
193
 
156
- return messagesChat;
194
+ return { messagesChat, timings };
157
195
  };
158
196
 
159
- const processMediaFiles = async (code, reply, provider) => {
197
+ const processAudioFile = withTracing(
198
+ processAudioFileCore,
199
+ 'process_audio_file',
200
+ (fileName) => ({
201
+ 'audio.file_name': fileName ? fileName.split('/').pop().replace(/^[^-]+-[^-]+-/, 'xxx-xxx-') : 'unknown'
202
+ })
203
+ );
204
+
205
+ const processMediaFilesCore = async (code, reply, provider) => {
160
206
  let url = null;
161
207
  const messagesChat = [];
162
208
  const tempFiles = [];
209
+ const timings = {
210
+ download_ms: 0,
211
+ image_analysis_ms: 0,
212
+ audio_transcription_ms: 0,
213
+ url_generation_ms: 0
214
+ };
163
215
 
164
216
  if (!reply.is_media) {
165
- return { messagesChat, url, tempFiles };
217
+ return { messagesChat, url, tempFiles, timings };
166
218
  }
167
219
 
168
- logger.info('processMediaFiles', {
169
- message_id: reply.message_id,
170
- processing_media: true
171
- });
172
-
173
- const fileNames = await downloadMediaAndCreateFile(code, reply);
220
+ const { result: fileNames, duration: downloadDuration } = await withTracing(
221
+ downloadMediaAndCreateFile,
222
+ 'download_media',
223
+ () => ({ 'media.message_id': reply.message_id, 'media.type': reply.media?.mediaType }),
224
+ { returnTiming: true }
225
+ )(code, reply);
226
+ timings.download_ms = downloadDuration;
174
227
  tempFiles.push(...fileNames);
175
228
 
176
229
  for (const fileName of fileNames) {
177
230
  const safeFileName = fileName ? fileName.split('/').pop().replace(/^[^-]+-[^-]+-/, 'xxx-xxx-') : 'unknown';
178
231
 
179
- logger.info('processMediaFiles_file', {
180
- message_id: reply.message_id,
181
- fileName: safeFileName
182
- });
183
-
184
232
  // Skip only WBMP files (unsupported format)
185
233
  if (fileName.toLowerCase().includes('.wbmp')) {
186
234
  logger.info('processMediaFiles_skip', {
@@ -200,34 +248,75 @@ const processMediaFiles = async (code, reply, provider) => {
200
248
  fileName.toLowerCase().includes('/sticker/');
201
249
 
202
250
  if (isImageLike) {
203
- const { messagesChat: imageMessages, url: imageUrl } = await processImageFile(fileName, reply);
251
+ const { messagesChat: imageMessages, url: imageUrl, timings: imageTimings } = await processImageFile(fileName, reply);
252
+
204
253
  messagesChat.push(...imageMessages);
205
254
  if (imageUrl) url = imageUrl;
255
+
256
+ if (imageTimings) {
257
+ timings.image_analysis_ms += imageTimings.analysis_ms || 0;
258
+ timings.url_generation_ms += imageTimings.url_generation_ms || 0;
259
+ }
206
260
  } else if (fileName.includes('audio')) {
207
- const audioMessages = await processAudioFile(fileName, provider);
261
+ const { messagesChat: audioMessages, timings: audioTimings } = await processAudioFile(fileName, provider);
262
+
208
263
  messagesChat.push(...audioMessages);
264
+
265
+ if (audioTimings) {
266
+ timings.audio_transcription_ms += audioTimings.transcribe_ms || 0;
267
+ }
209
268
  }
210
269
  }
211
270
 
212
- return { messagesChat, url, tempFiles };
271
+ logger.info('processMediaFiles_complete', {
272
+ message_id: reply.message_id,
273
+ file_count: fileNames.length,
274
+ ...timings
275
+ });
276
+
277
+ return { messagesChat, url, tempFiles, timings };
213
278
  };
214
279
 
215
- const processThreadMessage = async (code, replies, provider) => {
280
+ const processMediaFiles = withTracing(
281
+ processMediaFilesCore,
282
+ 'process_media_files',
283
+ (code, reply) => ({
284
+ 'media.message_id': reply.message_id,
285
+ 'media.is_media': reply.is_media
286
+ })
287
+ );
288
+
289
+ const processThreadMessageCore = async (code, replies, provider) => {
216
290
  const replyArray = Array.isArray(replies) ? replies : [replies];
291
+ const timings = {
292
+ download_ms: 0,
293
+ image_analysis_ms: 0,
294
+ audio_transcription_ms: 0,
295
+ url_generation_ms: 0,
296
+ total_media_ms: 0
297
+ };
217
298
 
218
299
  const results = await Promise.all(
219
300
  replyArray.map(async (reply, i) => {
220
301
  let tempFiles = [];
302
+
221
303
  try {
222
304
  const isPatient = reply.origin === 'patient';
223
- const [textMessages, mediaResult] = await Promise.all([
224
- Promise.resolve(processTextMessage(reply)),
225
- processMediaFiles(code, reply, provider)
226
- ]);
227
305
 
228
- const { messagesChat: mediaMessages, url, tempFiles: mediaFiles } = mediaResult;
306
+ const textMessages = processTextMessage(reply);
307
+ const mediaResult = await processMediaFiles(code, reply, provider);
308
+
309
+ const { messagesChat: mediaMessages, url, tempFiles: mediaFiles, timings: mediaTimings } = mediaResult;
229
310
  tempFiles = mediaFiles;
230
311
 
312
+ if (mediaTimings) {
313
+ timings.download_ms += mediaTimings.download_ms || 0;
314
+ timings.image_analysis_ms += mediaTimings.image_analysis_ms || 0;
315
+ timings.audio_transcription_ms += mediaTimings.audio_transcription_ms || 0;
316
+ timings.url_generation_ms += mediaTimings.url_generation_ms || 0;
317
+ timings.total_media_ms += (mediaTimings.download_ms + mediaTimings.image_analysis_ms + mediaTimings.audio_transcription_ms + mediaTimings.url_generation_ms);
318
+ }
319
+
231
320
  const allMessages = [...textMessages, ...mediaMessages];
232
321
  const role = reply.origin === 'patient' ? 'user' : 'assistant';
233
322
  const messages = allMessages.map(content => ({ role, content }));
@@ -235,22 +324,40 @@ const processThreadMessage = async (code, replies, provider) => {
235
324
  logger.info('processThreadMessage', {
236
325
  index: i + 1,
237
326
  total: replyArray.length,
238
- isPatient,
239
- hasUrl: !!url
327
+ isPatient,
328
+ hasMedia: reply.is_media,
329
+ hasUrl: !!url
240
330
  });
241
331
 
242
332
  return { isPatient, url, messages, reply, tempFiles };
243
333
  } catch (error) {
244
- logger.error('processThreadMessage', error, { message_id: reply.message_id, origin: reply.origin });
334
+ logger.error('processThreadMessage', error, {
335
+ message_id: reply.message_id,
336
+ origin: reply.origin
337
+ });
245
338
  await cleanupFiles(tempFiles);
246
339
  return { isPatient: false, url: null, messages: [], reply, tempFiles: [] };
247
340
  }
248
341
  })
249
342
  );
250
343
 
251
- return results;
344
+ logger.info('processThreadMessage_complete', {
345
+ message_count: replyArray.length,
346
+ ...timings
347
+ });
348
+
349
+ return { results, timings };
252
350
  };
253
351
 
352
+ const processThreadMessage = withTracing(
353
+ processThreadMessageCore,
354
+ 'process_thread_messages',
355
+ (code, replies) => ({
356
+ 'messages.count': Array.isArray(replies) ? replies.length : 1,
357
+ 'thread.code': code
358
+ })
359
+ );
360
+
254
361
  module.exports = {
255
362
  processTextMessage,
256
363
  processImageFile,
@@ -30,7 +30,7 @@ const messageSchema = new mongoose.Schema({
30
30
  from_me: { type: Boolean, default: false },
31
31
  origin: {
32
32
  type: String,
33
- enum: ['whatsapp_platform', 'assistant', 'patient'],
33
+ enum: ['whatsapp_platform', 'assistant', 'patient', 'system', 'instruction'],
34
34
  default: 'whatsapp_platform' },
35
35
  tools_executed: [{
36
36
  tool_name: { type: String, required: true },
@@ -7,6 +7,7 @@ const { createProvider } = require('../providers/createProvider');
7
7
 
8
8
  const { Thread } = require('../models/threadModel.js');
9
9
  const { PredictionMetrics } = require('../models/predictionMetricsModel');
10
+ const { insertMessage } = require('../models/messageModel');
10
11
 
11
12
  const { getCurRow } = require('../helpers/assistantHelper.js');
12
13
  const { runAssistantAndWait, runAssistantWithRetries } = require('../helpers/assistantHelper.js');
@@ -186,7 +187,7 @@ const createAssistant = async (code, assistant_id, messages=[], force=false) =>
186
187
  return thread;
187
188
  };
188
189
 
189
- const addMsgAssistant = async (code, inMessages, role = 'user', reply = false) => {
190
+ const addMsgAssistant = async (code, inMessages, role = 'user', reply = false, skipSystemMessage = false) => {
190
191
  try {
191
192
  let thread = await Thread.findOne({ code: code });
192
193
  logger.info(thread);
@@ -204,6 +205,32 @@ const addMsgAssistant = async (code, inMessages, role = 'user', reply = false) =
204
205
  role: role,
205
206
  content: message
206
207
  });
208
+
209
+ // Save system message to database for frontend visibility
210
+ // Skip if message is already saved (e.g., from getConversationReplyController)
211
+ if (!skipSystemMessage) {
212
+ try {
213
+ const message_id = `system_${Date.now()}_${Math.random().toString(36).substring(7)}`;
214
+ await insertMessage({
215
+ nombre_whatsapp: 'System',
216
+ numero: code,
217
+ body: message,
218
+ timestamp: new Date(),
219
+ message_id: message_id,
220
+ is_group: false,
221
+ is_media: false,
222
+ from_me: true,
223
+ processed: true,
224
+ origin: 'system',
225
+ thread_id: thread.getConversationId(),
226
+ assistant_id: thread.getAssistantId(),
227
+ raw: { role: role }
228
+ });
229
+ } catch (err) {
230
+ // Don't throw - we don't want to break the flow if logging fails
231
+ logger.error('[addMsgAssistant] Error saving system message:', err);
232
+ }
233
+ }
207
234
  }
208
235
  },
209
236
  thread,
@@ -263,6 +290,29 @@ const addInstructionCore = async (code, instruction, role = 'user') => {
263
290
  null // no patientReply for instructions
264
291
  );
265
292
 
293
+ // Save instruction to database for frontend visibility
294
+ try {
295
+ const message_id = `instruction_${Date.now()}_${Math.random().toString(36).substring(7)}`;
296
+ await insertMessage({
297
+ nombre_whatsapp: 'Instruction',
298
+ numero: code,
299
+ body: instruction,
300
+ timestamp: new Date(),
301
+ message_id: message_id,
302
+ is_group: false,
303
+ is_media: false,
304
+ from_me: true,
305
+ processed: true,
306
+ origin: 'instruction',
307
+ thread_id: thread.getConversationId(),
308
+ assistant_id: thread.getAssistantId(),
309
+ raw: { role: role }
310
+ });
311
+ } catch (err) {
312
+ // Don't throw - we don't want to break the flow if logging fails
313
+ logger.error('[addInstructionCore] Error saving instruction message:', err);
314
+ }
315
+
266
316
  logger.info('RUN RESPONSE', output);
267
317
  return output;
268
318
  };
@@ -314,7 +364,7 @@ const replyAssistantCore = async (code, message_ = null, thread_ = null, runOpti
314
364
 
315
365
  logger.info(`[replyAssistantCore] Processing ${patientReply.length} messages in parallel`);
316
366
 
317
- const { result: processResults, duration: processMessagesMs } = await withTracing(
367
+ const { result: processResult, duration: processMessagesMs } = await withTracing(
318
368
  processThreadMessage,
319
369
  'process_thread_messages',
320
370
  (code, patientReply, provider) => ({
@@ -323,8 +373,22 @@ const replyAssistantCore = async (code, message_ = null, thread_ = null, runOpti
323
373
  }),
324
374
  { returnTiming: true }
325
375
  )(code, patientReply, provider);
376
+
377
+ const { results: processResults, timings: processTimings } = processResult;
326
378
  timings.process_messages_ms = processMessagesMs;
327
379
 
380
+ logger.debug('[replyAssistantCore] Process timings breakdown', { processTimings });
381
+
382
+ if (processTimings) {
383
+ timings.process_messages_breakdown = {
384
+ download_ms: processTimings.download_ms || 0,
385
+ image_analysis_ms: processTimings.image_analysis_ms || 0,
386
+ audio_transcription_ms: processTimings.audio_transcription_ms || 0,
387
+ url_generation_ms: processTimings.url_generation_ms || 0,
388
+ total_media_ms: processTimings.total_media_ms || 0
389
+ };
390
+ }
391
+
328
392
  const patientMsg = processResults.some(r => r.isPatient);
329
393
  const urls = processResults.filter(r => r.url).map(r => ({ url: r.url }));
330
394
  const allMessagesToAdd = processResults.flatMap(r => r.messages || []);
@@ -401,6 +465,11 @@ const replyAssistantCore = async (code, message_ = null, thread_ = null, runOpti
401
465
  });
402
466
 
403
467
  if (output && predictionTimeMs) {
468
+ logger.debug('[replyAssistantCore] Storing metrics with timing_breakdown', {
469
+ timing_breakdown: timings,
470
+ has_breakdown: !!timings.process_messages_breakdown
471
+ });
472
+
404
473
  await PredictionMetrics.create({
405
474
  message_id: `${code}-${Date.now()}`,
406
475
  numero: code,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@peopl-health/nexus",
3
- "version": "2.4.11",
3
+ "version": "2.4.12",
4
4
  "description": "Core messaging and assistant library for WhatsApp communication platforms",
5
5
  "keywords": [
6
6
  "whatsapp",