@5minds/node-red-dashboard-2-processcube-chat 0.1.2 → 0.1.3-develop-7a3c44-mff0zplx
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
<template>
|
|
2
2
|
<div class="deep-chat-container">
|
|
3
3
|
<deep-chat
|
|
4
|
+
ref="deepChat"
|
|
4
5
|
:style="deepChatStyle"
|
|
5
6
|
:textInput="textInputConfig"
|
|
6
7
|
:introMessage="introMessageConfig"
|
|
@@ -26,6 +27,7 @@ export default {
|
|
|
26
27
|
data() {
|
|
27
28
|
return {
|
|
28
29
|
conversation: [],
|
|
30
|
+
signals: undefined,
|
|
29
31
|
};
|
|
30
32
|
},
|
|
31
33
|
|
|
@@ -59,12 +61,18 @@ export default {
|
|
|
59
61
|
};
|
|
60
62
|
},
|
|
61
63
|
},
|
|
64
|
+
mounted() {
|
|
65
|
+
this.$socket.on('msg-input:' + this.id, (msg) => {
|
|
66
|
+
this.handleNodeREDResponse(msg);
|
|
67
|
+
});
|
|
68
|
+
},
|
|
62
69
|
beforeUnmount() {
|
|
63
70
|
this.$socket.off('msg-input:' + this.id);
|
|
64
71
|
},
|
|
65
72
|
|
|
66
73
|
methods: {
|
|
67
74
|
async handleConnection(body, signals) {
|
|
75
|
+
this.signals = signals;
|
|
68
76
|
try {
|
|
69
77
|
// Extract messages and files from FormData
|
|
70
78
|
let newMessages = [];
|
|
@@ -85,9 +93,22 @@ export default {
|
|
|
85
93
|
}
|
|
86
94
|
|
|
87
95
|
// Process files if present
|
|
88
|
-
if (files.length > 0
|
|
89
|
-
const
|
|
90
|
-
|
|
96
|
+
if (files.length > 0) {
|
|
97
|
+
const processedFiles = await this.processFiles(files);
|
|
98
|
+
|
|
99
|
+
if (newMessages.length > 0) {
|
|
100
|
+
// Add files to existing message
|
|
101
|
+
const lastMessage = newMessages[newMessages.length - 1];
|
|
102
|
+
lastMessage.files = processedFiles;
|
|
103
|
+
} else {
|
|
104
|
+
// Create new message for files only
|
|
105
|
+
newMessages.push({
|
|
106
|
+
role: 'user',
|
|
107
|
+
text: '', // Empty text when only files are sent
|
|
108
|
+
files: processedFiles,
|
|
109
|
+
});
|
|
110
|
+
console.log('lus777', newMessages);
|
|
111
|
+
}
|
|
91
112
|
}
|
|
92
113
|
} else if (body.messages) {
|
|
93
114
|
newMessages = body.messages;
|
|
@@ -101,7 +122,7 @@ export default {
|
|
|
101
122
|
this.sendToNodeRED(payload, signals);
|
|
102
123
|
} catch (error) {
|
|
103
124
|
console.error('Error in handleConnection:', error);
|
|
104
|
-
this.sendErrorResponse(
|
|
125
|
+
this.sendErrorResponse('Sorry, there was an error processing your message.');
|
|
105
126
|
}
|
|
106
127
|
},
|
|
107
128
|
|
|
@@ -306,13 +327,9 @@ export default {
|
|
|
306
327
|
|
|
307
328
|
sendToNodeRED(payload, signals, fallbackMessage = null) {
|
|
308
329
|
this.$socket.emit('widget-action', this.id, { payload });
|
|
309
|
-
|
|
310
|
-
this.$socket.once('msg-input:' + this.id, (msg) => {
|
|
311
|
-
this.handleNodeREDResponse(msg, signals);
|
|
312
|
-
});
|
|
313
330
|
},
|
|
314
331
|
|
|
315
|
-
handleNodeREDResponse(msg
|
|
332
|
+
handleNodeREDResponse(msg) {
|
|
316
333
|
try {
|
|
317
334
|
// Store the complete ChatGPT response in conversation
|
|
318
335
|
const fullResponse = msg.payload;
|
|
@@ -336,30 +353,34 @@ export default {
|
|
|
336
353
|
// For the chat display, show appropriate response
|
|
337
354
|
if (fullResponse.message?.tool_calls) {
|
|
338
355
|
// If ChatGPT wants to call tools, show a message
|
|
339
|
-
|
|
356
|
+
this.$refs.deepChat.addMessage({
|
|
340
357
|
text: 'Function call requested...',
|
|
341
358
|
role: 'ai',
|
|
342
359
|
});
|
|
343
360
|
} else if (fullResponse.message?.content || fullResponse.content) {
|
|
344
|
-
|
|
345
|
-
signals.onResponse({
|
|
361
|
+
const message = {
|
|
346
362
|
text: fullResponse.message?.content || fullResponse.content,
|
|
347
363
|
role: 'ai',
|
|
348
|
-
}
|
|
364
|
+
};
|
|
365
|
+
|
|
366
|
+
// Normal text response
|
|
367
|
+
if (msg.payload.intermediateMessage) {
|
|
368
|
+
this.$refs.deepChat.addMessage(message);
|
|
369
|
+
} else {
|
|
370
|
+
this.signals.onResponse(message);
|
|
371
|
+
}
|
|
349
372
|
}
|
|
350
373
|
} catch (error) {
|
|
351
374
|
console.error('Error handling response:', error);
|
|
352
|
-
this.sendErrorResponse(
|
|
375
|
+
this.sendErrorResponse('Error processing response');
|
|
353
376
|
}
|
|
354
377
|
},
|
|
355
378
|
|
|
356
|
-
sendErrorResponse(
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
});
|
|
362
|
-
}
|
|
379
|
+
sendErrorResponse(message) {
|
|
380
|
+
this.$refs.deepChat.addMessage({
|
|
381
|
+
text: message,
|
|
382
|
+
role: 'ai',
|
|
383
|
+
}, true);
|
|
363
384
|
},
|
|
364
385
|
},
|
|
365
386
|
};
|