@peopl-health/nexus 1.5.5 → 1.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,28 @@
1
+ const runtimeConfig = require('./runtimeConfig');
2
+
3
+ let anthropicClient = null;
4
+
5
+ const resolveAnthropicClient = () => {
6
+ if (anthropicClient) return anthropicClient;
7
+
8
+ const apiKey = runtimeConfig.get('ANTHROPIC_API_KEY') || process.env.ANTHROPIC_API_KEY;
9
+ if (!apiKey) return null;
10
+
11
+ try {
12
+ const { Anthropic } = require('@anthropic-ai/sdk');
13
+ anthropicClient = new Anthropic({ apiKey });
14
+ console.log('[llmConfig] Anthropic client initialised');
15
+ } catch (error) {
16
+ console.warn('[llmConfig] Failed to initialise Anthropic client. Install @anthropic-ai/sdk if you plan to use vision features.', error?.message || error);
17
+ anthropicClient = null;
18
+ }
19
+
20
+ return anthropicClient;
21
+ };
22
+
1
23
  module.exports = {
2
- openaiClient: null
24
+ openaiClient: null,
25
+ get anthropicClient() {
26
+ return resolveAnthropicClient();
27
+ }
3
28
  };
@@ -1,6 +1,7 @@
1
1
  const { airtable, getBase } = require('../config/airtableConfig');
2
2
  const { replyAssistant } = require('../services/assistantService');
3
3
  const { createProvider } = require('../adapters/registry');
4
+ const runtimeConfig = require('../config/runtimeConfig');
4
5
 
5
6
  const mongoose = require('mongoose');
6
7
  const OpenAI = require('openai');
@@ -374,7 +375,12 @@ class NexusMessaging {
374
375
  typeof messageData.message?.conversation === 'string' ? messageData.message.conversation : null,
375
376
  typeof messageData.Body === 'string' ? messageData.Body : null,
376
377
  typeof messageData.raw?.message?.conversation === 'string' ? messageData.raw.message.conversation : null,
377
- typeof messageData.raw?.Body === 'string' ? messageData.raw.Body : null
378
+ typeof messageData.raw?.Body === 'string' ? messageData.raw.Body : null,
379
+ typeof messageData.caption === 'string' ? messageData.caption : null,
380
+ typeof messageData.media?.caption === 'string' ? messageData.media.caption : null,
381
+ Array.isArray(messageData.media) && typeof messageData.media[0]?.caption === 'string'
382
+ ? messageData.media[0].caption
383
+ : null
378
384
  ].find((value) => typeof value === 'string' && value.trim().length > 0) || null;
379
385
 
380
386
  return { from, message };
@@ -416,7 +422,109 @@ class NexusMessaging {
416
422
  }
417
423
 
418
424
  async handleMedia(messageData) {
419
- return await this._handleWithPipeline('media', 'onMedia', messageData);
425
+ await this._ensureMediaPersistence(messageData);
426
+
427
+ return await this._handleWithPipeline('media', 'onMedia', messageData, async (ctx) => {
428
+ return await this.handleMediaWithAssistant(ctx);
429
+ });
430
+ }
431
+
432
+ async handleMediaWithAssistant(messageData) {
433
+ try {
434
+ const { from, message } = this._extractAssistantInputs(messageData);
435
+
436
+ if (!from) {
437
+ console.warn('Unable to resolve sender for media message, skipping automatic reply.');
438
+ return;
439
+ }
440
+
441
+ const mediaDescriptor = (() => {
442
+ const media = Array.isArray(messageData.media) ? messageData.media[0] : messageData.media;
443
+ if (!media) return null;
444
+ if (typeof media.mediaType === 'string') return media.mediaType;
445
+ if (typeof media.type === 'string') return media.type;
446
+ if (typeof media.contentType === 'string') return media.contentType;
447
+ return null;
448
+ })();
449
+
450
+ const fallbackMessage = message && message.trim().length > 0
451
+ ? message
452
+ : `Media received (${mediaDescriptor || 'attachment'})`;
453
+
454
+ const response = await replyAssistant(from, fallbackMessage);
455
+
456
+ if (response) {
457
+ await this.sendMessage({
458
+ to: from,
459
+ message: response
460
+ });
461
+ }
462
+ } catch (error) {
463
+ console.error('Error in handleMediaWithAssistant:', error);
464
+ }
465
+ }
466
+
467
+ async _ensureMediaPersistence(messageData) {
468
+ try {
469
+ const raw = messageData?.raw;
470
+ if (!raw || raw.__nexusMediaProcessed) return;
471
+
472
+ const numMedia = parseInt(raw.NumMedia || '0', 10);
473
+ if (!numMedia || numMedia <= 0 || !raw.MediaUrl0) return;
474
+
475
+ const bucketName = runtimeConfig.get('AWS_S3_BUCKET_NAME') || process.env.AWS_S3_BUCKET_NAME;
476
+ if (!bucketName) {
477
+ console.warn('[NexusMessaging] AWS_S3_BUCKET_NAME not configured. Skipping media persistence.');
478
+ return;
479
+ }
480
+
481
+ const { processTwilioMediaMessage } = require('../helpers/twilioMediaProcessor');
482
+ const { logger } = require('../utils/logger');
483
+ const mediaItems = await processTwilioMediaMessage(raw, logger, bucketName);
484
+
485
+ if (!mediaItems || mediaItems.length === 0) {
486
+ console.warn('[NexusMessaging] Media processing returned no items for incoming message.');
487
+ return;
488
+ }
489
+
490
+ raw.__nexusMediaProcessed = true;
491
+
492
+ const [primary, ...rest] = mediaItems;
493
+ const mediaPayload = rest.length > 0
494
+ ? { ...primary, metadata: { ...(primary.metadata || {}), attachments: rest } }
495
+ : primary;
496
+
497
+ messageData.media = messageData.media || mediaPayload;
498
+ messageData.fileUrl = messageData.fileUrl || mediaPayload.metadata?.presignedUrl || null;
499
+ messageData.fileType = messageData.fileType || mediaPayload.mediaType;
500
+ messageData.caption = messageData.caption || primary.caption;
501
+ if (!messageData.message && messageData.caption) {
502
+ messageData.message = messageData.caption;
503
+ }
504
+ messageData.isMedia = true;
505
+
506
+ if (!this.messageStorage) {
507
+ const { convertTwilioToInternalFormat } = require('../helpers/twilioHelper');
508
+ const { getMessageValues, insertMessage } = require('../models/messageModel');
509
+
510
+ const messageObj = convertTwilioToInternalFormat(raw);
511
+ const values = getMessageValues(
512
+ messageObj,
513
+ messageData.message || messageData.caption || '',
514
+ null,
515
+ true
516
+ );
517
+ values.media = mediaPayload;
518
+
519
+ await insertMessage(values);
520
+ console.log('[NexusMessaging] Media message stored via legacy inserter', {
521
+ messageId: values.message_id,
522
+ numero: values.numero
523
+ });
524
+ }
525
+ } catch (error) {
526
+ console.error('[NexusMessaging] Failed to ensure media persistence:', error);
527
+ }
420
528
  }
421
529
 
422
530
  async handleCommand(messageData) {
@@ -211,18 +211,23 @@ async function processMessage(code, reply, thread) {
211
211
  continue;
212
212
  }
213
213
  if (fileName.includes('image') || fileName.includes('document') || fileName.includes('application')) {
214
- const imageAnalysis = await analyzeImage(fileName);
214
+ let imageAnalysis = null;
215
+ try {
216
+ imageAnalysis = await analyzeImage(fileName);
217
+ } catch (error) {
218
+ console.warn('[assistantHelper] analyzeImage failed:', error?.message || error);
219
+ }
215
220
  console.log(imageAnalysis);
216
221
  const invalidAnalysis = ['NOT_MEDICAL', 'QUALITY_INSUFFICIENT'];
217
- if (imageAnalysis.medical_relevance) {
222
+ if (imageAnalysis?.medical_relevance) {
218
223
  url = await generatePresignedUrl(reply.media.bucketName, reply.media.key);
219
224
  }
220
- if (imageAnalysis.has_table) {
225
+ if (imageAnalysis?.has_table && imageAnalysis.table_data) {
221
226
  messagesChat.push({
222
227
  type: 'text',
223
228
  text: imageAnalysis.table_data,
224
229
  });
225
- } else if (!invalidAnalysis.some(tag => imageAnalysis.medical_analysis.includes(tag))) {
230
+ } else if (imageAnalysis?.medical_analysis && !invalidAnalysis.some(tag => imageAnalysis.medical_analysis.includes(tag))) {
226
231
  messagesChat.push({
227
232
  type: 'text',
228
233
  text: imageAnalysis.medical_analysis,
@@ -1,10 +1,21 @@
1
- const { anthropicClient } = require('../config/llmConfig.js');
1
+ const llmConfig = require('../config/llmConfig.js');
2
2
  const fs = require('fs');
3
3
  const mime = require('mime-types');
4
4
 
5
5
 
6
6
  async function analyzeImage(imagePath) {
7
7
  try {
8
+ const anthropicClient = llmConfig.anthropicClient;
9
+ if (!anthropicClient || !anthropicClient.messages) {
10
+ console.warn('[llmsHelper] Anthropics client not configured; skipping image analysis');
11
+ return {
12
+ medical_analysis: 'QUALITY_INSUFFICIENT',
13
+ medical_relevance: false,
14
+ has_table: false,
15
+ table_data: null
16
+ };
17
+ }
18
+
8
19
  // Skip WBMP images and stickers
9
20
  if (imagePath.toLowerCase().includes('.wbmp') || imagePath.toLowerCase().includes('sticker')) {
10
21
  console.log('Skipping WBMP image or sticker analysis:', imagePath);
@@ -120,6 +120,8 @@ class MongoStorage {
120
120
  ? { ...primary, metadata: { ...(primary.metadata || {}), attachments: rest } }
121
121
  : primary;
122
122
 
123
+ rawMessage.__nexusMediaProcessed = true;
124
+
123
125
  console.log('[MongoStorage] Media processed successfully', {
124
126
  primaryType: mediaPayload.mediaType,
125
127
  mediaCount: mediaItems.length,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@peopl-health/nexus",
3
- "version": "1.5.5",
3
+ "version": "1.5.7",
4
4
  "description": "Core messaging and assistant library for WhatsApp communication platforms",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -67,7 +67,8 @@
67
67
  "baileys": "^6.4.0",
68
68
  "express": "4.21.2",
69
69
  "openai": "^4.0.0",
70
- "twilio": "5.6.0"
70
+ "twilio": "5.6.0",
71
+ "@anthropic-ai/sdk": "^0.32.0"
71
72
  },
72
73
  "devDependencies": {
73
74
  "@types/node": "^20.5.0",