@peopl-health/nexus 2.4.7 → 2.4.9-fix-pdf-processing
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/examples/assistants/DoctorScheduleAssistant.js +3 -2
- package/lib/adapters/BaileysProvider.js +5 -4
- package/lib/adapters/TwilioProvider.js +20 -19
- package/lib/assistants/BaseAssistant.js +7 -6
- package/lib/config/awsConfig.js +14 -12
- package/lib/config/llmConfig.js +3 -2
- package/lib/config/mongoAuthConfig.js +5 -5
- package/lib/controllers/assistantController.js +12 -11
- package/lib/controllers/bugReportController.js +6 -5
- package/lib/controllers/conversationController.js +72 -71
- package/lib/controllers/interactionController.js +7 -6
- package/lib/controllers/mediaController.js +15 -13
- package/lib/controllers/messageController.js +7 -6
- package/lib/controllers/patientController.js +2 -1
- package/lib/controllers/qualityMessageController.js +5 -4
- package/lib/controllers/templateController.js +11 -9
- package/lib/controllers/uploadController.js +3 -1
- package/lib/core/NexusMessaging.js +18 -18
- package/lib/helpers/assistantHelper.js +8 -9
- package/lib/helpers/baileysHelper.js +4 -3
- package/lib/helpers/filesHelper.js +23 -14
- package/lib/helpers/llmsHelper.js +17 -10
- package/lib/helpers/mediaHelper.js +3 -2
- package/lib/helpers/messageHelper.js +12 -11
- package/lib/helpers/processHelper.js +2 -2
- package/lib/helpers/qrHelper.js +2 -1
- package/lib/helpers/twilioMediaProcessor.js +19 -29
- package/lib/helpers/whatsappHelper.js +3 -2
- package/lib/index.js +11 -14
- package/lib/interactive/index.js +11 -11
- package/lib/middleware/requestId.js +9 -14
- package/lib/models/messageModel.js +5 -4
- package/lib/providers/OpenAIAssistantsProvider.js +10 -9
- package/lib/providers/OpenAIResponsesProvider.js +24 -17
- package/lib/providers/OpenAIResponsesProviderTools.js +3 -5
- package/lib/providers/createProvider.js +2 -1
- package/lib/services/airtableService.js +6 -5
- package/lib/services/assistantService.js +73 -57
- package/lib/services/conversationService.js +16 -16
- package/lib/services/preprocessingHooks.js +3 -1
- package/lib/storage/MongoStorage.js +14 -14
- package/lib/utils/errorHandler.js +3 -1
- package/lib/utils/logger.js +35 -3
- package/lib/utils/mediaValidator.js +18 -14
- package/lib/utils/sanitizer.js +0 -6
- package/lib/utils/tracingDecorator.js +7 -1
- package/package.json +1 -1
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
const { getRecordByFilter } = require('../services/airtableService');
|
|
2
2
|
const { Monitoreo_ID } = require('../config/airtableConfig');
|
|
3
|
+
const { logger } = require('../utils/logger');
|
|
3
4
|
|
|
4
5
|
const getPatientInfoController = async (req, res) => {
|
|
5
6
|
try {
|
|
@@ -15,7 +16,7 @@ const getPatientInfoController = async (req, res) => {
|
|
|
15
16
|
const clinicalContext = records[0]['clinical-context-json'];
|
|
16
17
|
res.status(200).json({ success: true, whatsappId: id, clinicalContext, patientInfo: records[0] });
|
|
17
18
|
} catch (error) {
|
|
18
|
-
|
|
19
|
+
logger.error('Error fetching patient info:', error);
|
|
19
20
|
res.status(500).json({ success: false, error: error.message });
|
|
20
21
|
}
|
|
21
22
|
};
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
const { QualityMessage } = require('../models/qualityMessageModel');
|
|
2
2
|
const { Message } = require('../models/messageModel');
|
|
3
|
+
const { logger } = require('../utils/logger');
|
|
3
4
|
|
|
4
5
|
const addQualityVoteController = async (req, res) => {
|
|
5
6
|
try {
|
|
@@ -22,7 +23,7 @@ const addQualityVoteController = async (req, res) => {
|
|
|
22
23
|
|
|
23
24
|
res.status(201).json({ success: true, qualityVote });
|
|
24
25
|
} catch (error) {
|
|
25
|
-
|
|
26
|
+
logger.error('Error adding quality vote:', error);
|
|
26
27
|
res.status(500).json({ success: false, error: error.message });
|
|
27
28
|
}
|
|
28
29
|
};
|
|
@@ -41,7 +42,7 @@ const getQualityVotesByMessageController = async (req, res) => {
|
|
|
41
42
|
|
|
42
43
|
res.status(200).json({ success: true, messageId: message_id, summary, votes });
|
|
43
44
|
} catch (error) {
|
|
44
|
-
|
|
45
|
+
logger.error('Error fetching quality votes:', error);
|
|
45
46
|
res.status(500).json({ success: false, error: error.message });
|
|
46
47
|
}
|
|
47
48
|
};
|
|
@@ -52,7 +53,7 @@ const getQualityVotesByVoterController = async (req, res) => {
|
|
|
52
53
|
const votes = await QualityMessage.find({ voter_username }).populate('message_id').sort({ createdAt: -1 });
|
|
53
54
|
res.status(200).json({ success: true, voterUsername: voter_username, count: votes.length, votes });
|
|
54
55
|
} catch (error) {
|
|
55
|
-
|
|
56
|
+
logger.error('Error fetching voter quality votes:', error);
|
|
56
57
|
res.status(500).json({ success: false, error: error.message });
|
|
57
58
|
}
|
|
58
59
|
};
|
|
@@ -68,7 +69,7 @@ const getQualityVoteByMessageAndVoterController = async (req, res) => {
|
|
|
68
69
|
|
|
69
70
|
res.status(200).json({ success: true, vote });
|
|
70
71
|
} catch (error) {
|
|
71
|
-
|
|
72
|
+
logger.error('Error fetching quality vote:', error);
|
|
72
73
|
res.status(500).json({ success: false, error: error.message });
|
|
73
74
|
}
|
|
74
75
|
};
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
const { logger } = require('../utils/logger');
|
|
2
|
+
|
|
1
3
|
// Check if provider supports templates
|
|
2
4
|
const checkTemplateSupport = () => {
|
|
3
5
|
if (!nexusProvider) {
|
|
@@ -29,7 +31,7 @@ const configureNexusProvider = (provider) => {
|
|
|
29
31
|
try {
|
|
30
32
|
configureTemplateProvider(provider);
|
|
31
33
|
} catch (err) {
|
|
32
|
-
|
|
34
|
+
logger.warn('[templateController] Failed to propagate provider to template structure:', err?.message || err);
|
|
33
35
|
}
|
|
34
36
|
}
|
|
35
37
|
};
|
|
@@ -132,7 +134,7 @@ const createTemplate = async (req, res) => {
|
|
|
132
134
|
|
|
133
135
|
// Make sure the dates are valid before saving
|
|
134
136
|
if (isNaN(dateCreated.getTime())) {
|
|
135
|
-
|
|
137
|
+
logger.info('Invalid dateCreated, using current date');
|
|
136
138
|
dateCreated = currentDate;
|
|
137
139
|
}
|
|
138
140
|
|
|
@@ -182,8 +184,8 @@ const listTemplates = async (req, res) => {
|
|
|
182
184
|
const { status: queryStatus, type, limit = 50, showFlows: queryShowFlows } = req.query;
|
|
183
185
|
const TemplateModel = getTemplateModel();
|
|
184
186
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
+
logger.info('nexusProvider:', nexusProvider ? 'configured' : 'not configured');
|
|
188
|
+
logger.info('nexusProvider methods:', nexusProvider ? Object.keys(nexusProvider) : 'none');
|
|
187
189
|
|
|
188
190
|
checkTemplateSupport();
|
|
189
191
|
const twilioRawTemplates = await nexusProvider.listTemplates({ limit: parseInt(limit, 10) });
|
|
@@ -224,7 +226,7 @@ const listTemplates = async (req, res) => {
|
|
|
224
226
|
else if (reqData.status === 'PENDING') updateFields.status = 'PENDING';
|
|
225
227
|
}
|
|
226
228
|
} catch (err) {
|
|
227
|
-
|
|
229
|
+
logger.warn(`Could not fetch approval status for template ${twilioTemplate.sid}:`, err.message);
|
|
228
230
|
}
|
|
229
231
|
|
|
230
232
|
const onInsertFields = {
|
|
@@ -281,7 +283,7 @@ const listTemplates = async (req, res) => {
|
|
|
281
283
|
});
|
|
282
284
|
|
|
283
285
|
} catch (error) {
|
|
284
|
-
|
|
286
|
+
logger.error('Error in listTemplates:', error);
|
|
285
287
|
return handleApiError(res, error, 'Failed to list templates');
|
|
286
288
|
}
|
|
287
289
|
};
|
|
@@ -557,7 +559,7 @@ const getPredefinedTemplates = (req, res) => {
|
|
|
557
559
|
});
|
|
558
560
|
|
|
559
561
|
} catch (error) {
|
|
560
|
-
|
|
562
|
+
logger.error('Error getting predefined templates:', error);
|
|
561
563
|
res.status(500).json({
|
|
562
564
|
success: false,
|
|
563
565
|
error: 'Failed to retrieve predefined templates',
|
|
@@ -584,7 +586,7 @@ const getCompleteTemplate = async (req, res) => {
|
|
|
584
586
|
try {
|
|
585
587
|
checkTemplateSupport();
|
|
586
588
|
const twilioTemplate = await nexusProvider.getTemplate(sid);
|
|
587
|
-
|
|
589
|
+
logger.info('Fetched template from Twilio:', twilioTemplate);
|
|
588
590
|
|
|
589
591
|
let body = '';
|
|
590
592
|
let footer = '';
|
|
@@ -659,7 +661,7 @@ const getCompleteTemplate = async (req, res) => {
|
|
|
659
661
|
template.type = type;
|
|
660
662
|
}
|
|
661
663
|
} catch (twilioError) {
|
|
662
|
-
|
|
664
|
+
logger.error('Error fetching complete template from Twilio:', twilioError);
|
|
663
665
|
}
|
|
664
666
|
}
|
|
665
667
|
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
const multer = require('multer');
|
|
2
2
|
const { v4: uuidv4 } = require('uuid');
|
|
3
3
|
const { generatePresignedUrl, uploadBufferToS3 } = require('../config/awsConfig');
|
|
4
|
+
const { logger } = require('../utils/logger');
|
|
5
|
+
|
|
4
6
|
const bucketName = process.env.AWS_S3_BUCKET_NAME;
|
|
5
7
|
|
|
6
8
|
const storage = multer.memoryStorage();
|
|
@@ -66,7 +68,7 @@ const handleFileUpload = async (req, res) => {
|
|
|
66
68
|
contentType: file.mimetype
|
|
67
69
|
});
|
|
68
70
|
} catch (error) {
|
|
69
|
-
|
|
71
|
+
logger.error('Error uploading file:', error);
|
|
70
72
|
res.status(500).json({
|
|
71
73
|
success: false,
|
|
72
74
|
error: error.message || 'Failed to upload file'
|
|
@@ -3,6 +3,7 @@ const { addMsgAssistant, replyAssistant } = require('../services/assistantServic
|
|
|
3
3
|
const { createProvider } = require('../adapters/registry');
|
|
4
4
|
const runtimeConfig = require('../config/runtimeConfig');
|
|
5
5
|
const { hasPreprocessingHandler, invokePreprocessingHandler } = require('../services/preprocessingHooks');
|
|
6
|
+
const { logger } = require('../utils/logger');
|
|
6
7
|
|
|
7
8
|
const mongoose = require('mongoose');
|
|
8
9
|
const OpenAI = require('openai');
|
|
@@ -58,9 +59,9 @@ class NexusMessaging {
|
|
|
58
59
|
try {
|
|
59
60
|
await mongoose.connect(mongoUri);
|
|
60
61
|
this.mongodb = mongoose.connection;
|
|
61
|
-
|
|
62
|
+
logger.info('MongoDB connected successfully');
|
|
62
63
|
} catch (error) {
|
|
63
|
-
|
|
64
|
+
logger.error('MongoDB connection failed', { error: error.message });
|
|
64
65
|
throw error;
|
|
65
66
|
}
|
|
66
67
|
}
|
|
@@ -359,7 +360,7 @@ class NexusMessaging {
|
|
|
359
360
|
return await this.handleFlow(messageData);
|
|
360
361
|
} else {
|
|
361
362
|
// For regular messages and media, use batching if enabled
|
|
362
|
-
|
|
363
|
+
logger.info('Batching config:', this.batchingConfig);
|
|
363
364
|
if (this.batchingConfig.enabled && chatId) {
|
|
364
365
|
return await this._handleWithBatching(messageData, chatId);
|
|
365
366
|
} else {
|
|
@@ -437,7 +438,7 @@ class NexusMessaging {
|
|
|
437
438
|
}
|
|
438
439
|
|
|
439
440
|
if (!from || !body) {
|
|
440
|
-
|
|
441
|
+
logger.warn('Unable to resolve assistant inputs from message, skipping automatic reply.');
|
|
441
442
|
return;
|
|
442
443
|
}
|
|
443
444
|
|
|
@@ -452,7 +453,7 @@ class NexusMessaging {
|
|
|
452
453
|
});
|
|
453
454
|
}
|
|
454
455
|
} catch (error) {
|
|
455
|
-
|
|
456
|
+
logger.error('Error in handleMessageWithAssistant:', error);
|
|
456
457
|
}
|
|
457
458
|
}
|
|
458
459
|
|
|
@@ -488,7 +489,7 @@ class NexusMessaging {
|
|
|
488
489
|
const { from, body } = this._extractAssistantInputs(messageData);
|
|
489
490
|
|
|
490
491
|
if (!from) {
|
|
491
|
-
|
|
492
|
+
logger.warn('Unable to resolve sender for media message, skipping automatic reply.');
|
|
492
493
|
return;
|
|
493
494
|
}
|
|
494
495
|
|
|
@@ -516,7 +517,7 @@ class NexusMessaging {
|
|
|
516
517
|
});
|
|
517
518
|
}
|
|
518
519
|
} catch (error) {
|
|
519
|
-
|
|
520
|
+
logger.error('Error in handleMediaWithAssistant:', error);
|
|
520
521
|
}
|
|
521
522
|
}
|
|
522
523
|
|
|
@@ -530,16 +531,15 @@ class NexusMessaging {
|
|
|
530
531
|
|
|
531
532
|
const bucketName = runtimeConfig.get('AWS_S3_BUCKET_NAME') || process.env.AWS_S3_BUCKET_NAME;
|
|
532
533
|
if (!bucketName) {
|
|
533
|
-
|
|
534
|
+
logger.warn('[NexusMessaging] AWS_S3_BUCKET_NAME not configured. Skipping media persistence.');
|
|
534
535
|
return;
|
|
535
536
|
}
|
|
536
537
|
|
|
537
538
|
const { processTwilioMediaMessage } = require('../helpers/twilioMediaProcessor');
|
|
538
|
-
const
|
|
539
|
-
const mediaItems = await processTwilioMediaMessage(raw, logger, bucketName);
|
|
539
|
+
const mediaItems = await processTwilioMediaMessage(raw, bucketName);
|
|
540
540
|
|
|
541
541
|
if (!mediaItems || mediaItems.length === 0) {
|
|
542
|
-
|
|
542
|
+
logger.warn('[NexusMessaging] Media processing returned no items for incoming message.');
|
|
543
543
|
return;
|
|
544
544
|
}
|
|
545
545
|
|
|
@@ -573,13 +573,13 @@ class NexusMessaging {
|
|
|
573
573
|
values.media = mediaPayload;
|
|
574
574
|
|
|
575
575
|
await insertMessage(values);
|
|
576
|
-
|
|
576
|
+
logger.info('[NexusMessaging] Media message stored via legacy inserter', {
|
|
577
577
|
messageId: values.message_id,
|
|
578
578
|
numero: values.numero
|
|
579
579
|
});
|
|
580
580
|
}
|
|
581
581
|
} catch (error) {
|
|
582
|
-
|
|
582
|
+
logger.error('[NexusMessaging] Failed to ensure media persistence:', error);
|
|
583
583
|
}
|
|
584
584
|
}
|
|
585
585
|
|
|
@@ -618,7 +618,7 @@ class NexusMessaging {
|
|
|
618
618
|
// Clear existing timeout if there is one
|
|
619
619
|
if (this.pendingResponses.has(chatId)) {
|
|
620
620
|
clearTimeout(this.pendingResponses.get(chatId));
|
|
621
|
-
|
|
621
|
+
logger.info(`Received additional message from ${chatId}, resetting wait timer`);
|
|
622
622
|
}
|
|
623
623
|
|
|
624
624
|
// Calculate wait time with random variation
|
|
@@ -631,12 +631,12 @@ class NexusMessaging {
|
|
|
631
631
|
this.pendingResponses.delete(chatId);
|
|
632
632
|
await this._handleBatchedMessages(chatId);
|
|
633
633
|
} catch (error) {
|
|
634
|
-
|
|
634
|
+
logger.error(`Error handling batched messages for ${chatId}:`, error);
|
|
635
635
|
}
|
|
636
636
|
}, waitTime);
|
|
637
637
|
|
|
638
638
|
this.pendingResponses.set(chatId, timeoutId);
|
|
639
|
-
|
|
639
|
+
logger.info(`Waiting ${Math.round(waitTime/1000)} seconds for more messages from ${chatId}`);
|
|
640
640
|
}
|
|
641
641
|
|
|
642
642
|
/**
|
|
@@ -644,7 +644,7 @@ class NexusMessaging {
|
|
|
644
644
|
*/
|
|
645
645
|
async _handleBatchedMessages(chatId) {
|
|
646
646
|
try {
|
|
647
|
-
|
|
647
|
+
logger.info(`Processing batched messages from ${chatId} (including media if any)`);
|
|
648
648
|
|
|
649
649
|
// Get assistant response
|
|
650
650
|
const botResponse = await replyAssistant(chatId);
|
|
@@ -661,7 +661,7 @@ class NexusMessaging {
|
|
|
661
661
|
this.events.emit('messages:batched', { chatId, response: botResponse });
|
|
662
662
|
|
|
663
663
|
} catch (error) {
|
|
664
|
-
|
|
664
|
+
logger.error('Error in batched message handling:', error);
|
|
665
665
|
}
|
|
666
666
|
}
|
|
667
667
|
|
|
@@ -5,7 +5,7 @@ const { createProvider } = require('../providers/createProvider.js');
|
|
|
5
5
|
const { withTracing } = require('../utils/tracingDecorator');
|
|
6
6
|
|
|
7
7
|
const { getRecordByFilter } = require('../services/airtableService.js');
|
|
8
|
-
const { logger } = require('../
|
|
8
|
+
const { logger } = require('../utils/logger');
|
|
9
9
|
|
|
10
10
|
const DEFAULT_MAX_RETRIES = parseInt(process.env.MAX_RETRIES || '30', 10);
|
|
11
11
|
|
|
@@ -28,7 +28,7 @@ async function checkIfFinished(text) {
|
|
|
28
28
|
|
|
29
29
|
return completion.choices[0].message.content;
|
|
30
30
|
} catch (error) {
|
|
31
|
-
|
|
31
|
+
logger.error('[checkIfFinished] Error checking run status:', error);
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
34
|
|
|
@@ -78,7 +78,7 @@ const runAssistantAndWait = async ({
|
|
|
78
78
|
let completed = false;
|
|
79
79
|
|
|
80
80
|
try {
|
|
81
|
-
logger.
|
|
81
|
+
logger.info('[runAssistantAndWait] Run started', { runId: run.id, threadId: thread.getConversationId(), assistantId: thread.getAssistantId() });
|
|
82
82
|
({run, completed} = await provider.checkRunStatus(assistant, thread.getConversationId(), run.id, 0, maxRetries));
|
|
83
83
|
} finally {
|
|
84
84
|
if (filter) {
|
|
@@ -102,7 +102,7 @@ const executeAssistantAttempt = async (thread, assistant, runConfig, attemptNumb
|
|
|
102
102
|
runConfig
|
|
103
103
|
});
|
|
104
104
|
|
|
105
|
-
logger.
|
|
105
|
+
logger.info(`[executeAssistantAttempt] Attempt ${attemptNumber}: completed=${result.completed}, output=${result.output || '(empty)'}`);
|
|
106
106
|
|
|
107
107
|
return result;
|
|
108
108
|
};
|
|
@@ -135,17 +135,16 @@ const runAssistantWithRetries = async (thread, assistant, runConfig, patientRepl
|
|
|
135
135
|
const delay = retries === 1
|
|
136
136
|
? 500
|
|
137
137
|
: Math.min(1000 * Math.pow(1.5, retries - 1), 5000);
|
|
138
|
-
logger.
|
|
138
|
+
logger.info(`[runAssistantWithRetries] Retry ${retries}, waiting ${delay}ms`);
|
|
139
139
|
await new Promise(resolve => setTimeout(resolve, delay));
|
|
140
140
|
}
|
|
141
141
|
} while (retries < maxRetries && (!completed || !output));
|
|
142
142
|
|
|
143
143
|
const predictionTimeMs = Date.now() - startTime;
|
|
144
144
|
|
|
145
|
-
if (run?.last_error) logger.
|
|
146
|
-
logger.
|
|
147
|
-
logger.
|
|
148
|
-
logger.log('[runAssistantWithRetries] TIMING', { predictionTimeMs, retries });
|
|
145
|
+
if (run?.last_error) logger.warn('[runAssistantWithRetries] Run error', { error: run.last_error });
|
|
146
|
+
logger.info('[runAssistantWithRetries] Run completed', { completed, outputLength: output?.length || 0 });
|
|
147
|
+
logger.info('[runAssistantWithRetries] TIMING', { predictionTimeMs, retries });
|
|
149
148
|
|
|
150
149
|
return { run, output, completed, retries, predictionTimeMs };
|
|
151
150
|
};
|
|
@@ -2,6 +2,7 @@ const { Message, insertMessage, getMessageValues } = require('../models/messageM
|
|
|
2
2
|
const { uploadMediaToS3 } = require('./mediaHelper.js');
|
|
3
3
|
const { isRecentMessage } = require('./messageHelper.js');
|
|
4
4
|
const { downloadMediaMessage } = require('baileys');
|
|
5
|
+
const { logger } = require('../utils/logger');
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
async function processMessage(message, messageType) {
|
|
@@ -19,7 +20,7 @@ async function processMessage(message, messageType) {
|
|
|
19
20
|
|
|
20
21
|
return values;
|
|
21
22
|
} catch (error) {
|
|
22
|
-
|
|
23
|
+
logger.error('Failed to process message', { error: error.message, stack: error.stack });
|
|
23
24
|
throw error;
|
|
24
25
|
}
|
|
25
26
|
}
|
|
@@ -44,7 +45,7 @@ async function processMediaMessage(message, logger, messageType, bucketName, soc
|
|
|
44
45
|
|
|
45
46
|
return values;
|
|
46
47
|
} catch (error) {
|
|
47
|
-
|
|
48
|
+
logger.error('Failed to process message', { error: error.message, stack: error.stack });
|
|
48
49
|
throw error;
|
|
49
50
|
}
|
|
50
51
|
}
|
|
@@ -128,7 +129,7 @@ async function downloadMedia(message, logger, sock) {
|
|
|
128
129
|
);
|
|
129
130
|
return buffer;
|
|
130
131
|
} catch (error) {
|
|
131
|
-
|
|
132
|
+
logger.error('Failed to download media', { error: error.message, stack: error.stack });
|
|
132
133
|
throw error;
|
|
133
134
|
}
|
|
134
135
|
}
|
|
@@ -7,22 +7,29 @@ const sharp = require('sharp');
|
|
|
7
7
|
const { downloadFileFromS3 } = require('../config/awsConfig.js');
|
|
8
8
|
const { Message } = require('../models/messageModel.js');
|
|
9
9
|
const { sanitizeFilename } = require('../utils/sanitizer.js');
|
|
10
|
+
const { logger } = require('../utils/logger');
|
|
10
11
|
|
|
11
|
-
async function convertPdfToImages(pdfName) {
|
|
12
|
+
async function convertPdfToImages(pdfName, existingPdfPath = null) {
|
|
12
13
|
const outputDir = path.join(__dirname, 'assets', 'tmp');
|
|
13
14
|
|
|
14
15
|
const sanitizedName = sanitizeFilename(pdfName);
|
|
15
|
-
const pdfPath = path.join(outputDir, `${sanitizedName}.pdf`);
|
|
16
|
+
const pdfPath = existingPdfPath || path.join(outputDir, `${sanitizedName}.pdf`);
|
|
16
17
|
const outputPattern = path.join(outputDir, sanitizedName);
|
|
17
18
|
|
|
18
19
|
await fs.mkdir(outputDir, { recursive: true });
|
|
19
20
|
|
|
20
21
|
return new Promise((resolve, reject) => {
|
|
21
22
|
const args = ['-jpeg', pdfPath, outputPattern];
|
|
22
|
-
|
|
23
|
+
logger.info('[convertPdfToImages] Running: pdftoppm', args.join(' '));
|
|
23
24
|
|
|
24
25
|
execFile('pdftoppm', args, (error, stdout, stderr) => {
|
|
25
26
|
if (error) {
|
|
27
|
+
logger.error('[convertPdfToImages] Error details:', {
|
|
28
|
+
error: error.message,
|
|
29
|
+
stderr,
|
|
30
|
+
pdfPath,
|
|
31
|
+
pdfExists: require('fs').existsSync(pdfPath)
|
|
32
|
+
});
|
|
26
33
|
return reject(new Error(`Error splitting PDF: ${stderr || error.message}`));
|
|
27
34
|
}
|
|
28
35
|
|
|
@@ -71,7 +78,7 @@ async function combineImagesToPDF(config) {
|
|
|
71
78
|
imageFiles.sort();
|
|
72
79
|
}
|
|
73
80
|
|
|
74
|
-
|
|
81
|
+
logger.info(`Found ${imageFiles.length} image files to combine`);
|
|
75
82
|
|
|
76
83
|
const pdfDoc = await PDFDocument.create();
|
|
77
84
|
const processedFiles = [];
|
|
@@ -79,7 +86,7 @@ async function combineImagesToPDF(config) {
|
|
|
79
86
|
for (const [index, file] of imageFiles.entries()) {
|
|
80
87
|
try {
|
|
81
88
|
const filePath = path.join(inputDir, file);
|
|
82
|
-
|
|
89
|
+
logger.info(`Processing file ${index + 1}/${imageFiles.length}: ${file}`);
|
|
83
90
|
|
|
84
91
|
const imageBuffer = await fs.readFile(filePath);
|
|
85
92
|
const pngBuffer = await sharp(imageBuffer)
|
|
@@ -99,11 +106,11 @@ async function combineImagesToPDF(config) {
|
|
|
99
106
|
|
|
100
107
|
processedFiles.push(filePath);
|
|
101
108
|
} catch (error) {
|
|
102
|
-
|
|
109
|
+
logger.error(`Error processing file ${file}:`, error);
|
|
103
110
|
}
|
|
104
111
|
}
|
|
105
112
|
|
|
106
|
-
|
|
113
|
+
logger.info('Combined PDF created successfully');
|
|
107
114
|
|
|
108
115
|
return {
|
|
109
116
|
pdfBuffer: await pdfDoc.save(),
|
|
@@ -120,12 +127,12 @@ const cleanupFiles = async (files) => {
|
|
|
120
127
|
} catch (error) {
|
|
121
128
|
if (error?.code !== 'ENOENT') {
|
|
122
129
|
const safeFileName = filePath ? filePath.split('/').pop().replace(/^[^-]+-[^-]+-/, 'xxx-xxx-') : 'unknown';
|
|
123
|
-
|
|
130
|
+
logger.warn(`[cleanupFiles] Error deleting ${safeFileName}:`, error?.message || String(error));
|
|
124
131
|
}
|
|
125
132
|
}
|
|
126
133
|
}));
|
|
127
134
|
|
|
128
|
-
|
|
135
|
+
logger.info(`[cleanupFiles] Cleaned up ${files.length} files`);
|
|
129
136
|
};
|
|
130
137
|
|
|
131
138
|
async function downloadMediaAndCreateFile(code, reply) {
|
|
@@ -138,7 +145,7 @@ async function downloadMediaAndCreateFile(code, reply) {
|
|
|
138
145
|
if (!resultMedia) return [];
|
|
139
146
|
|
|
140
147
|
if (!resultMedia.media || !resultMedia.media.key) {
|
|
141
|
-
|
|
148
|
+
logger.info('[downloadMediaAndCreateFile] No valid media found for message:', reply.message_id);
|
|
142
149
|
return [];
|
|
143
150
|
}
|
|
144
151
|
|
|
@@ -147,19 +154,21 @@ async function downloadMediaAndCreateFile(code, reply) {
|
|
|
147
154
|
|
|
148
155
|
const [subType, fileName] = key.split('/');
|
|
149
156
|
|
|
150
|
-
const sanitizedCode = sanitizeFilename(code);
|
|
151
|
-
const sanitizedSubType = sanitizeFilename(subType);
|
|
152
|
-
const sanitizedFileName = sanitizeFilename(fileName);
|
|
157
|
+
const sanitizedCode = sanitizeFilename(code, 20);
|
|
158
|
+
const sanitizedSubType = sanitizeFilename(subType, 10);
|
|
159
|
+
const sanitizedFileName = sanitizeFilename(fileName, 50);
|
|
153
160
|
|
|
154
161
|
const sourceFile = `${sanitizedCode}-${sanitizedSubType}-${sanitizedFileName}`;
|
|
155
162
|
const downloadPath = path.join(__dirname, 'assets', 'tmp', sourceFile);
|
|
156
163
|
|
|
164
|
+
logger.info('[downloadMediaAndCreateFile] Downloading file', { sourceFile, downloadPath, bucketName, key });
|
|
165
|
+
|
|
157
166
|
await fs.mkdir(path.dirname(downloadPath), { recursive: true });
|
|
158
167
|
await downloadFileFromS3(bucketName, key, downloadPath);
|
|
159
168
|
|
|
160
169
|
const { name: baseName } = path.parse(sourceFile);
|
|
161
170
|
const fileNames = (subType === 'document' || subType === 'application')
|
|
162
|
-
? await convertPdfToImages(baseName)
|
|
171
|
+
? await convertPdfToImages(baseName, downloadPath)
|
|
163
172
|
: [downloadPath];
|
|
164
173
|
|
|
165
174
|
if (subType === 'document' || subType === 'application') {
|
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
const llmConfig = require('../config/llmConfig.js');
|
|
2
|
+
const { logger } = require('../utils/logger');
|
|
2
3
|
const fs = require('fs');
|
|
3
4
|
const mime = require('mime-types');
|
|
4
5
|
|
|
5
6
|
|
|
6
|
-
async function analyzeImage(imagePath, isSticker = false) {
|
|
7
|
+
async function analyzeImage(imagePath, isSticker = false, contentType = null) {
|
|
7
8
|
try {
|
|
8
9
|
const anthropicClient = llmConfig.anthropicClient;
|
|
9
10
|
if (!anthropicClient || !anthropicClient.messages) {
|
|
10
|
-
|
|
11
|
+
logger.warn('[llmsHelper] Anthropics client not configured; skipping image analysis');
|
|
11
12
|
return {
|
|
12
13
|
description: 'Image could not be analyzed - Anthropic client not configured',
|
|
13
14
|
medical_analysis: 'QUALITY_INSUFFICIENT',
|
|
@@ -19,7 +20,7 @@ async function analyzeImage(imagePath, isSticker = false) {
|
|
|
19
20
|
|
|
20
21
|
// Skip only WBMP images (unsupported format)
|
|
21
22
|
if (imagePath.toLowerCase().includes('.wbmp')) {
|
|
22
|
-
|
|
23
|
+
logger.info('Skipping WBMP image or sticker analysis:', imagePath);
|
|
23
24
|
return {
|
|
24
25
|
description: 'Unsupported image format',
|
|
25
26
|
medical_analysis: 'NOT_MEDICAL',
|
|
@@ -29,10 +30,16 @@ async function analyzeImage(imagePath, isSticker = false) {
|
|
|
29
30
|
};
|
|
30
31
|
}
|
|
31
32
|
|
|
32
|
-
|
|
33
|
-
|
|
33
|
+
let mimeType = contentType;
|
|
34
|
+
if (!mimeType) {
|
|
35
|
+
if (imagePath.toLowerCase().endsWith('.webp')) {
|
|
36
|
+
mimeType = 'image/webp';
|
|
37
|
+
} else {
|
|
38
|
+
mimeType = mime.lookup(imagePath) || 'image/jpeg';
|
|
39
|
+
}
|
|
40
|
+
}
|
|
34
41
|
if (mimeType === 'image/vnd.wap.wbmp') {
|
|
35
|
-
|
|
42
|
+
logger.info('Skipping image with MIME type:', mimeType);
|
|
36
43
|
return {
|
|
37
44
|
description: 'Unsupported image format',
|
|
38
45
|
medical_analysis: 'NOT_MEDICAL',
|
|
@@ -113,7 +120,7 @@ Only extract tables - ignore any other content in the image.`;
|
|
|
113
120
|
type: 'image',
|
|
114
121
|
source: {
|
|
115
122
|
type: 'base64',
|
|
116
|
-
media_type:
|
|
123
|
+
media_type: mimeType,
|
|
117
124
|
data: base64Image,
|
|
118
125
|
},
|
|
119
126
|
},
|
|
@@ -180,7 +187,7 @@ Ejemplo 1:
|
|
|
180
187
|
type: 'image',
|
|
181
188
|
source: {
|
|
182
189
|
type: 'base64',
|
|
183
|
-
media_type:
|
|
190
|
+
media_type: mimeType,
|
|
184
191
|
data: base64Image,
|
|
185
192
|
},
|
|
186
193
|
},
|
|
@@ -208,7 +215,7 @@ Ejemplo 1:
|
|
|
208
215
|
type: 'image',
|
|
209
216
|
source: {
|
|
210
217
|
type: 'base64',
|
|
211
|
-
media_type:
|
|
218
|
+
media_type: mimeType,
|
|
212
219
|
data: base64Image,
|
|
213
220
|
},
|
|
214
221
|
},
|
|
@@ -231,7 +238,7 @@ Ejemplo 1:
|
|
|
231
238
|
return {description: description, medical_analysis: messageAnalysisStr,
|
|
232
239
|
medical_relevance: isRelevant, has_table: isTable, table_data: table};
|
|
233
240
|
} catch (error) {
|
|
234
|
-
|
|
241
|
+
logger.error('Error analyzing image:', error);
|
|
235
242
|
throw error;
|
|
236
243
|
}
|
|
237
244
|
}
|
|
@@ -2,6 +2,7 @@ const path = require('path');
|
|
|
2
2
|
const fs = require('fs');
|
|
3
3
|
const AWS = require('../config/awsConfig.js');
|
|
4
4
|
const { sanitizeMediaFilename } = require('../utils/sanitizer.js');
|
|
5
|
+
const { logger } = require('../utils/logger');
|
|
5
6
|
|
|
6
7
|
async function uploadMediaToS3(buffer, messageID, titleFile, bucketName, contentType, messageType) {
|
|
7
8
|
const extension = getFileExtension(contentType);
|
|
@@ -9,13 +10,13 @@ async function uploadMediaToS3(buffer, messageID, titleFile, bucketName, content
|
|
|
9
10
|
const fileName = sanitizedTitle
|
|
10
11
|
? `${messageType}/${messageID}_${sanitizedTitle}.${extension}`
|
|
11
12
|
: `${messageType}/${messageID}.${extension}`;
|
|
12
|
-
|
|
13
|
+
logger.info(titleFile, messageType);
|
|
13
14
|
|
|
14
15
|
try {
|
|
15
16
|
await AWS.uploadBufferToS3(buffer, bucketName, fileName, contentType);
|
|
16
17
|
return fileName;
|
|
17
18
|
} catch (error) {
|
|
18
|
-
|
|
19
|
+
logger.error('Failed to upload media to S3:', error.stack);
|
|
19
20
|
throw error;
|
|
20
21
|
}
|
|
21
22
|
}
|