@peopl-health/nexus 2.4.5-fix → 2.4.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -198,36 +198,43 @@ const processMediaFiles = async (code, reply, provider) => {
|
|
|
198
198
|
return { messagesChat, url, tempFiles };
|
|
199
199
|
};
|
|
200
200
|
|
|
201
|
-
const
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
201
|
+
const processThreadMessage = async (code, replies, provider) => {
|
|
202
|
+
const replyArray = Array.isArray(replies) ? replies : [replies];
|
|
203
|
+
|
|
204
|
+
const results = await Promise.all(
|
|
205
|
+
replyArray.map(async (reply, i) => {
|
|
206
|
+
let tempFiles = [];
|
|
207
|
+
try {
|
|
208
|
+
const isPatient = reply.origin === 'patient';
|
|
209
|
+
const [textMessages, mediaResult] = await Promise.all([
|
|
210
|
+
Promise.resolve(processTextMessage(reply)),
|
|
211
|
+
processMediaFiles(code, reply, provider)
|
|
212
|
+
]);
|
|
213
|
+
|
|
214
|
+
const { messagesChat: mediaMessages, url, tempFiles: mediaFiles } = mediaResult;
|
|
215
|
+
tempFiles = mediaFiles;
|
|
216
|
+
|
|
217
|
+
const allMessages = [...textMessages, ...mediaMessages];
|
|
218
|
+
const role = reply.origin === 'patient' ? 'user' : 'assistant';
|
|
219
|
+
const messages = allMessages.map(content => ({ role, content }));
|
|
220
|
+
|
|
221
|
+
logger.info('processThreadMessage', {
|
|
222
|
+
index: i + 1,
|
|
223
|
+
total: replyArray.length,
|
|
224
|
+
isPatient,
|
|
225
|
+
hasUrl: !!url
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
return { isPatient, url, messages, reply, tempFiles };
|
|
229
|
+
} catch (error) {
|
|
230
|
+
logger.error('processThreadMessage', error, { message_id: reply.message_id, origin: reply.origin });
|
|
231
|
+
await cleanupFiles(tempFiles);
|
|
232
|
+
return { isPatient: false, url: null, messages: [], reply, tempFiles: [] };
|
|
233
|
+
}
|
|
234
|
+
})
|
|
235
|
+
);
|
|
236
|
+
|
|
237
|
+
return results;
|
|
231
238
|
};
|
|
232
239
|
|
|
233
240
|
module.exports = {
|
|
@@ -235,5 +242,5 @@ module.exports = {
|
|
|
235
242
|
processImageFile,
|
|
236
243
|
processAudioFile,
|
|
237
244
|
processMediaFiles,
|
|
238
|
-
|
|
245
|
+
processThreadMessage
|
|
239
246
|
};
|
|
@@ -82,22 +82,28 @@ class OpenAIAssistantsProvider {
|
|
|
82
82
|
await this.client.beta.threads.del(this._ensureId(threadId));
|
|
83
83
|
}
|
|
84
84
|
|
|
85
|
-
async addMessage({ threadId, role = 'user', content, metadata }) {
|
|
86
|
-
const
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
85
|
+
async addMessage({ threadId, messages, role = 'user', content, metadata }) {
|
|
86
|
+
const id = this._ensureId(threadId);
|
|
87
|
+
const messagesToAdd = messages || [{ role, content, metadata }];
|
|
88
|
+
|
|
89
|
+
const results = [];
|
|
90
|
+
for (const msg of messagesToAdd) {
|
|
91
|
+
const formattedContent = this._normalizeContent(msg.content);
|
|
92
|
+
const payload = {
|
|
93
|
+
role: msg.role || 'user',
|
|
94
|
+
content: formattedContent,
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
if (msg.metadata && Object.keys(msg.metadata).length > 0) {
|
|
98
|
+
payload.metadata = msg.metadata;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const result = await this._retryWithRateLimit(() =>
|
|
102
|
+
this.client.beta.threads.messages.create(id, payload)
|
|
103
|
+
);
|
|
104
|
+
results.push(result);
|
|
96
105
|
}
|
|
97
|
-
|
|
98
|
-
return this._retryWithRateLimit(() =>
|
|
99
|
-
this.client.beta.threads.messages.create(this._ensureId(threadId), payload)
|
|
100
|
-
);
|
|
106
|
+
return messages ? results : results[0];
|
|
101
107
|
}
|
|
102
108
|
|
|
103
109
|
async listMessages({ threadId, runId, order = 'desc', limit } = {}) {
|
|
@@ -197,23 +197,26 @@ class OpenAIResponsesProvider {
|
|
|
197
197
|
return await this._delete(`/conversations/${id}`);
|
|
198
198
|
}
|
|
199
199
|
|
|
200
|
-
async addMessage({ threadId, role = 'user', content, metadata }) {
|
|
200
|
+
async addMessage({ threadId, messages, role = 'user', content, metadata }) {
|
|
201
201
|
const id = this._ensurethreadId(threadId);
|
|
202
|
+
const messagesToAdd = messages || [{ role, content, metadata }];
|
|
202
203
|
|
|
203
|
-
const
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
204
|
+
const payloads = messagesToAdd.map(msg =>
|
|
205
|
+
this._cleanObject({
|
|
206
|
+
role: msg.role || 'user',
|
|
207
|
+
content: this._normalizeContent(msg.role || 'user', msg.content),
|
|
208
|
+
type: 'message',
|
|
209
|
+
})
|
|
210
|
+
).filter(p => p.content);
|
|
208
211
|
|
|
209
|
-
if (
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
});
|
|
216
|
-
}
|
|
212
|
+
if (payloads.length === 0) return null;
|
|
213
|
+
|
|
214
|
+
return this._retryWithRateLimit(async () => {
|
|
215
|
+
if (this.conversations?.items?.create) {
|
|
216
|
+
return await this.conversations.items.create(id, { items: payloads });
|
|
217
|
+
}
|
|
218
|
+
return await this._post(`/conversations/${id}/items`, { items: payloads });
|
|
219
|
+
});
|
|
217
220
|
}
|
|
218
221
|
|
|
219
222
|
async listMessages({ threadId, order = 'desc', limit } = {}) {
|
|
@@ -12,8 +12,8 @@ const { getCurRow } = require('../helpers/assistantHelper.js');
|
|
|
12
12
|
const { runAssistantAndWait, runAssistantWithRetries } = require('../helpers/assistantHelper.js');
|
|
13
13
|
const { getThread, getThreadInfo } = require('../helpers/threadHelper.js');
|
|
14
14
|
const { withTracing } = require('../utils/tracingDecorator.js');
|
|
15
|
-
const {
|
|
16
|
-
const { getLastMessages } = require('../helpers/messageHelper.js');
|
|
15
|
+
const { processThreadMessage } = require('../helpers/processHelper.js');
|
|
16
|
+
const { getLastMessages, updateMessageRecord } = require('../helpers/messageHelper.js');
|
|
17
17
|
const { combineImagesToPDF, cleanupFiles } = require('../helpers/filesHelper.js');
|
|
18
18
|
const { logger } = require('../middleware/requestId');
|
|
19
19
|
|
|
@@ -297,19 +297,24 @@ const replyAssistantCore = async (code, message_ = null, thread_ = null, runOpti
|
|
|
297
297
|
const provider = createProvider({ variant: process.env.VARIANT || 'assistants' });
|
|
298
298
|
|
|
299
299
|
timings.processMessages = Date.now();
|
|
300
|
-
logger.log(`[replyAssistantCore] Processing ${patientReply.length} messages
|
|
300
|
+
logger.log(`[replyAssistantCore] Processing ${patientReply.length} messages in parallel`);
|
|
301
301
|
|
|
302
|
-
|
|
303
|
-
const urls = [];
|
|
302
|
+
const processResults = await processThreadMessage(code, patientReply, provider);
|
|
304
303
|
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
304
|
+
const patientMsg = processResults.some(r => r.isPatient);
|
|
305
|
+
const urls = processResults.filter(r => r.url).map(r => ({ url: r.url }));
|
|
306
|
+
const allMessagesToAdd = processResults.flatMap(r => r.messages || []);
|
|
307
|
+
const allTempFiles = processResults.flatMap(r => r.tempFiles || []);
|
|
308
|
+
|
|
309
|
+
if (allMessagesToAdd.length > 0) {
|
|
310
|
+
const threadId = thread.getConversationId();
|
|
311
|
+
logger.log(`[replyAssistantCore] Adding ${allMessagesToAdd.length} messages to thread in batch`);
|
|
312
|
+
await provider.addMessage({ threadId, messages: allMessagesToAdd });
|
|
311
313
|
}
|
|
312
314
|
|
|
315
|
+
await Promise.all(processResults.map(r => updateMessageRecord(r.reply, thread)));
|
|
316
|
+
await cleanupFiles(allTempFiles);
|
|
317
|
+
|
|
313
318
|
timings.processMessages = Date.now() - timings.processMessages;
|
|
314
319
|
|
|
315
320
|
if (urls.length > 0) {
|