@5minds/node-red-dashboard-2-processcube-chat 0.1.1-develop-e94793-mcvyrm7g → 0.1.1-develop-3537a6-mf54lair
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/nodes/ui-deepchat.html +87 -102
- package/nodes/ui-deepchat.js +13 -126
- package/package.json +1 -1
- package/resources/ui-deepchat.umd.js +61 -61
- package/resources/ui-deepchat.umd.js.map +1 -1
- package/ui/components/UIDeepChat.vue +335 -290
|
@@ -1,30 +1,23 @@
|
|
|
1
1
|
<template>
|
|
2
|
-
<div
|
|
3
|
-
<deep-chat
|
|
4
|
-
ref="deepChat"
|
|
2
|
+
<div class="deep-chat-container">
|
|
3
|
+
<deep-chat
|
|
5
4
|
:style="deepChatStyle"
|
|
6
|
-
:
|
|
7
|
-
:
|
|
8
|
-
:speech-to-text="speechToTextConfig"
|
|
9
|
-
:camera="cameraConfig"
|
|
10
|
-
:microphone="microphoneConfig"
|
|
11
|
-
:mixed-files="attachmentsConfig"
|
|
12
|
-
:avatars="config.avatars"
|
|
13
|
-
:names="config.names"
|
|
14
|
-
:timestamps="config.timestamps"
|
|
15
|
-
:stream="config.stream"
|
|
5
|
+
:textInput="textInputConfig"
|
|
6
|
+
:introMessage="introMessageConfig"
|
|
16
7
|
:connect="connectConfig"
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
8
|
+
:speechToText="props.speechToText"
|
|
9
|
+
:camera="props.camera"
|
|
10
|
+
:mixedFiles="props.attachments"
|
|
11
|
+
:avatars="props.avatars"
|
|
12
|
+
:names="props.names"
|
|
13
|
+
:timestamps="props.timestamps"
|
|
14
|
+
:stream="props.stream"
|
|
22
15
|
></deep-chat>
|
|
23
16
|
</div>
|
|
24
17
|
</template>
|
|
25
18
|
|
|
26
19
|
<script>
|
|
27
|
-
import 'deep-chat'
|
|
20
|
+
import 'deep-chat';
|
|
28
21
|
|
|
29
22
|
export default {
|
|
30
23
|
name: 'UIDeepChat',
|
|
@@ -32,311 +25,363 @@ export default {
|
|
|
32
25
|
inject: ['$socket'],
|
|
33
26
|
data() {
|
|
34
27
|
return {
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
placeholder: 'Type a message...',
|
|
38
|
-
apiUrl: '',
|
|
39
|
-
apiKey: '',
|
|
40
|
-
model: 'gpt-3.5-turbo',
|
|
41
|
-
textInput: true,
|
|
42
|
-
speechToText: false,
|
|
43
|
-
camera: false,
|
|
44
|
-
microphone: false,
|
|
45
|
-
attachments: false,
|
|
46
|
-
avatars: true,
|
|
47
|
-
names: true,
|
|
48
|
-
timestamps: false,
|
|
49
|
-
stream: false
|
|
50
|
-
},
|
|
51
|
-
messages: []
|
|
52
|
-
}
|
|
28
|
+
conversation: [],
|
|
29
|
+
};
|
|
53
30
|
},
|
|
54
|
-
|
|
31
|
+
|
|
55
32
|
computed: {
|
|
56
|
-
|
|
33
|
+
deepChatStyle() {
|
|
57
34
|
return {
|
|
58
35
|
width: '100%',
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
36
|
+
maxWidth: '600px',
|
|
37
|
+
height: '80vh',
|
|
38
|
+
borderRadius: '8px',
|
|
39
|
+
};
|
|
62
40
|
},
|
|
63
41
|
|
|
64
|
-
deepChatStyle() {
|
|
65
|
-
return {
|
|
66
|
-
width: '300px',
|
|
67
|
-
height: '300px',
|
|
68
|
-
flex: '1',
|
|
69
|
-
border: 'none',
|
|
70
|
-
borderRadius: '8px'
|
|
71
|
-
}
|
|
72
|
-
},
|
|
73
|
-
|
|
74
|
-
speechToTextConfig() {
|
|
75
|
-
return this.config.speechToText ? {
|
|
76
|
-
button: true,
|
|
77
|
-
displayInterimResults: true
|
|
78
|
-
} : false
|
|
79
|
-
},
|
|
80
|
-
|
|
81
|
-
cameraConfig() {
|
|
82
|
-
return this.config.camera ? {
|
|
83
|
-
button: true
|
|
84
|
-
} : false
|
|
85
|
-
},
|
|
86
|
-
|
|
87
|
-
microphoneConfig() {
|
|
88
|
-
return this.config.microphone ? {
|
|
89
|
-
button: true,
|
|
90
|
-
audio: true
|
|
91
|
-
} : false
|
|
92
|
-
},
|
|
93
|
-
|
|
94
|
-
attachmentsConfig() {
|
|
95
|
-
return this.config.attachments ? {
|
|
96
|
-
button: true,
|
|
97
|
-
acceptedFormats: ".jpeg,.jpg,.png,.gif,.pdf,.txt,.doc,.docx"
|
|
98
|
-
} : false
|
|
99
|
-
},
|
|
100
|
-
|
|
101
42
|
textInputConfig() {
|
|
102
|
-
if (!this.config.textInput) return false
|
|
103
|
-
|
|
104
43
|
return {
|
|
105
44
|
placeholder: {
|
|
106
|
-
text: this.
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
},
|
|
110
|
-
|
|
111
|
-
speechToTextConfig() {
|
|
112
|
-
return this.config.speechToText ? {
|
|
113
|
-
button: true,
|
|
114
|
-
displayInterimResults: true
|
|
115
|
-
} : false
|
|
45
|
+
text: this.props.placeholder || 'Type a message...',
|
|
46
|
+
},
|
|
47
|
+
};
|
|
116
48
|
},
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
return
|
|
120
|
-
|
|
121
|
-
}
|
|
122
|
-
},
|
|
123
|
-
|
|
124
|
-
microphoneConfig() {
|
|
125
|
-
return this.config.microphone ? {
|
|
126
|
-
button: true,
|
|
127
|
-
audio: true
|
|
128
|
-
} : false
|
|
129
|
-
},
|
|
130
|
-
|
|
131
|
-
attachmentsConfig() {
|
|
132
|
-
return this.config.attachments ? {
|
|
133
|
-
button: true,
|
|
134
|
-
acceptedFormats: ".jpeg,.jpg,.png,.gif,.pdf,.txt,.doc,.docx"
|
|
135
|
-
} : false
|
|
49
|
+
|
|
50
|
+
introMessageConfig() {
|
|
51
|
+
return {
|
|
52
|
+
text: this.props.introMessage || 'Hello! How can I help you today?',
|
|
53
|
+
};
|
|
136
54
|
},
|
|
137
|
-
|
|
55
|
+
|
|
138
56
|
connectConfig() {
|
|
139
|
-
if (this.config.apiUrl && this.config.apiKey) {
|
|
140
|
-
return {
|
|
141
|
-
url: this.config.apiUrl,
|
|
142
|
-
method: 'POST',
|
|
143
|
-
headers: {
|
|
144
|
-
'Authorization': `Bearer ${this.config.apiKey}`,
|
|
145
|
-
'Content-Type': 'application/json'
|
|
146
|
-
},
|
|
147
|
-
body: {
|
|
148
|
-
model: this.config.model,
|
|
149
|
-
max_tokens: 2000,
|
|
150
|
-
temperature: 0.7
|
|
151
|
-
},
|
|
152
|
-
stream: this.config.stream
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
// Custom Handler für Node-RED Integration
|
|
157
57
|
return {
|
|
158
|
-
handler: this.
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
}
|
|
162
|
-
},
|
|
163
|
-
|
|
164
|
-
mounted() {
|
|
165
|
-
this.setupSocketListeners()
|
|
166
|
-
this.applyConfiguration()
|
|
58
|
+
handler: this.handleConnection,
|
|
59
|
+
};
|
|
60
|
+
},
|
|
167
61
|
},
|
|
168
|
-
|
|
169
62
|
beforeUnmount() {
|
|
170
|
-
this.
|
|
63
|
+
this.$socket.off('msg-input:' + this.id);
|
|
171
64
|
},
|
|
172
|
-
|
|
65
|
+
|
|
173
66
|
methods: {
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
this.$socket.on('msg-input:' + this.id, (msg) => {
|
|
193
|
-
if (msg.payload) {
|
|
194
|
-
this.addMessage(msg.payload, msg.role || 'ai', msg.html, msg.files)
|
|
195
|
-
}
|
|
196
|
-
if (msg.config) {
|
|
197
|
-
this.config = { ...this.config, ...msg.config }
|
|
198
|
-
this.updateConfiguration()
|
|
199
|
-
}
|
|
200
|
-
if (msg.clear) {
|
|
201
|
-
this.clearMessages()
|
|
202
|
-
}
|
|
203
|
-
})
|
|
204
|
-
},
|
|
205
|
-
|
|
206
|
-
removeSocketListeners() {
|
|
207
|
-
this.$socket.off('deepchat-newMessage:' + this.id)
|
|
208
|
-
this.$socket.off('deepchat-updateConfig:' + this.id)
|
|
209
|
-
this.$socket.off('deepchat-clearMessages:' + this.id)
|
|
210
|
-
this.$socket.off('msg-input:' + this.id)
|
|
211
|
-
},
|
|
212
|
-
|
|
213
|
-
applyConfiguration() {
|
|
214
|
-
// Konfiguration aus props übernehmen
|
|
215
|
-
if (this.props) {
|
|
216
|
-
Object.keys(this.props).forEach(key => {
|
|
217
|
-
if (this.config.hasOwnProperty(key)) {
|
|
218
|
-
this.config[key] = this.props[key]
|
|
67
|
+
async handleConnection(body, signals) {
|
|
68
|
+
try {
|
|
69
|
+
// Extract messages and files from FormData
|
|
70
|
+
let newMessages = [];
|
|
71
|
+
let files = [];
|
|
72
|
+
|
|
73
|
+
if (body instanceof FormData) {
|
|
74
|
+
for (let [key, value] of body.entries()) {
|
|
75
|
+
if (key.startsWith('message')) {
|
|
76
|
+
try {
|
|
77
|
+
const messageContent = JSON.parse(value);
|
|
78
|
+
newMessages.push(messageContent);
|
|
79
|
+
} catch (e) {
|
|
80
|
+
console.error('Error parsing message:', e);
|
|
81
|
+
}
|
|
82
|
+
} else if (key === 'files') {
|
|
83
|
+
files.push(value);
|
|
84
|
+
}
|
|
219
85
|
}
|
|
220
|
-
|
|
86
|
+
|
|
87
|
+
// Process files if present
|
|
88
|
+
if (files.length > 0) {
|
|
89
|
+
const processedFiles = await this.processFiles(files);
|
|
90
|
+
|
|
91
|
+
if (newMessages.length > 0) {
|
|
92
|
+
// Add files to existing message
|
|
93
|
+
const lastMessage = newMessages[newMessages.length - 1];
|
|
94
|
+
lastMessage.files = processedFiles;
|
|
95
|
+
} else {
|
|
96
|
+
// Create new message for files only
|
|
97
|
+
newMessages.push({
|
|
98
|
+
role: 'user',
|
|
99
|
+
text: '', // Empty text when only files are sent
|
|
100
|
+
files: processedFiles,
|
|
101
|
+
});
|
|
102
|
+
console.log('lus777', newMessages);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
} else if (body.messages) {
|
|
106
|
+
newMessages = body.messages;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Add to conversation history
|
|
110
|
+
this.conversation.push(...newMessages);
|
|
111
|
+
|
|
112
|
+
// Send to Node-RED
|
|
113
|
+
const payload = this.formatForChatGPT(this.conversation);
|
|
114
|
+
this.sendToNodeRED(payload, signals);
|
|
115
|
+
} catch (error) {
|
|
116
|
+
console.error('Error in handleConnection:', error);
|
|
117
|
+
this.sendErrorResponse(signals, 'Sorry, there was an error processing your message.');
|
|
221
118
|
}
|
|
222
119
|
},
|
|
223
|
-
|
|
224
|
-
|
|
120
|
+
|
|
121
|
+
async processFiles(files) {
|
|
225
122
|
try {
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
123
|
+
return await Promise.all(
|
|
124
|
+
files.map(async (file) => {
|
|
125
|
+
if (!(file instanceof File)) return file;
|
|
126
|
+
|
|
127
|
+
if (file.type.startsWith('image/')) {
|
|
128
|
+
return await this.processImageFile(file);
|
|
129
|
+
} else if (file.type.startsWith('audio/')) {
|
|
130
|
+
return await this.processAudioFile(file);
|
|
131
|
+
} else {
|
|
132
|
+
// Handle other file types (PDFs, documents, etc.)
|
|
133
|
+
return await this.processDocumentFile(file);
|
|
134
|
+
}
|
|
135
|
+
})
|
|
136
|
+
);
|
|
230
137
|
} catch (error) {
|
|
231
|
-
console.error('Error
|
|
138
|
+
console.error('Error processing files:', error);
|
|
139
|
+
return [];
|
|
232
140
|
}
|
|
233
141
|
},
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
142
|
+
|
|
143
|
+
processImageFile(file) {
|
|
144
|
+
return new Promise((resolve, reject) => {
|
|
145
|
+
const reader = new FileReader();
|
|
146
|
+
reader.onload = (e) =>
|
|
147
|
+
resolve({
|
|
148
|
+
name: file.name,
|
|
149
|
+
type: file.type,
|
|
150
|
+
size: file.size,
|
|
151
|
+
src: e.target.result,
|
|
152
|
+
});
|
|
153
|
+
reader.onerror = () => reject(new Error('Failed to read image'));
|
|
154
|
+
reader.readAsDataURL(file);
|
|
155
|
+
});
|
|
240
156
|
},
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
157
|
+
|
|
158
|
+
processAudioFile(file) {
|
|
159
|
+
return new Promise((resolve, reject) => {
|
|
160
|
+
const reader = new FileReader();
|
|
161
|
+
reader.onload = (e) => {
|
|
162
|
+
try {
|
|
163
|
+
const result = e.target.result;
|
|
164
|
+
const commaIndex = result.indexOf(',');
|
|
165
|
+
if (commaIndex === -1) {
|
|
166
|
+
throw new Error('Invalid data URL format: missing comma separator');
|
|
167
|
+
}
|
|
168
|
+
const base64Data = result.split(',')[1]; // Remove data URL prefix
|
|
169
|
+
|
|
170
|
+
resolve({
|
|
171
|
+
name: file.name,
|
|
172
|
+
type: file.type,
|
|
173
|
+
size: file.size,
|
|
174
|
+
base64Data: base64Data,
|
|
175
|
+
});
|
|
176
|
+
} catch (error) {
|
|
177
|
+
reject(error);
|
|
178
|
+
}
|
|
179
|
+
};
|
|
180
|
+
reader.onerror = () => reject(new Error('Failed to read audio'));
|
|
181
|
+
reader.readAsDataURL(file);
|
|
182
|
+
});
|
|
255
183
|
},
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
184
|
+
|
|
185
|
+
processDocumentFile(file) {
|
|
186
|
+
return new Promise((resolve, reject) => {
|
|
187
|
+
const reader = new FileReader();
|
|
188
|
+
reader.onload = (e) => {
|
|
189
|
+
try {
|
|
190
|
+
const base64Data = e.target.result.split(',')[1]; // Remove data URL prefix
|
|
191
|
+
|
|
192
|
+
resolve({
|
|
193
|
+
name: file.name,
|
|
194
|
+
type: file.type,
|
|
195
|
+
size: file.size,
|
|
196
|
+
fileData: base64Data, // Use fileData for documents
|
|
197
|
+
});
|
|
198
|
+
} catch (error) {
|
|
199
|
+
reject(error);
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
reader.onerror = () => reject(new Error('Failed to read document'));
|
|
203
|
+
reader.readAsDataURL(file);
|
|
204
|
+
});
|
|
264
205
|
},
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
}
|
|
206
|
+
|
|
207
|
+
formatForChatGPT(conversation, textOnly = false) {
|
|
208
|
+
const payload = {
|
|
209
|
+
messages: conversation.map((msg) => this.formatMessage(msg, textOnly)),
|
|
210
|
+
model: this.props.model,
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
// Add ChatGPT API extensions
|
|
214
|
+
if (this.props.tools) {
|
|
215
|
+
payload.tools = this.props.tools;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
if (this.props.tool_choice) {
|
|
219
|
+
payload.tool_choice = this.props.tool_choice;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
if (this.props.temperature !== undefined) {
|
|
223
|
+
payload.temperature = this.props.temperature;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
if (this.props.max_tokens) {
|
|
227
|
+
payload.max_tokens = this.props.max_tokens;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
if (this.props.top_p !== undefined) {
|
|
231
|
+
payload.top_p = this.props.top_p;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
if (this.props.frequency_penalty !== undefined) {
|
|
235
|
+
payload.frequency_penalty = this.props.frequency_penalty;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
if (this.props.presence_penalty !== undefined) {
|
|
239
|
+
payload.presence_penalty = this.props.presence_penalty;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
if (this.props.response_format) {
|
|
243
|
+
payload.response_format = this.props.response_format;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
return payload;
|
|
247
|
+
},
|
|
248
|
+
|
|
249
|
+
formatMessage(msg, textOnly = false) {
|
|
250
|
+
const message = {
|
|
251
|
+
role: msg.role === 'ai' ? 'assistant' : 'user',
|
|
252
|
+
};
|
|
253
|
+
|
|
254
|
+
// Handle tool calls (for assistant messages)
|
|
255
|
+
if (msg.tool_calls) {
|
|
256
|
+
message.tool_calls = msg.tool_calls;
|
|
257
|
+
message.content = msg.content || null;
|
|
258
|
+
return message;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// Handle tool responses (for tool messages)
|
|
262
|
+
if (msg.role === 'tool') {
|
|
263
|
+
message.role = 'tool';
|
|
264
|
+
message.content = msg.content;
|
|
265
|
+
message.tool_call_id = msg.tool_call_id;
|
|
266
|
+
return message;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// Handle messages with files
|
|
270
|
+
if (msg.files && msg.files.length > 0) {
|
|
271
|
+
message.content = this.buildContentArray(msg, textOnly);
|
|
272
|
+
return message;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
// Handle text-only messages
|
|
276
|
+
message.content = msg.text || msg.content || '';
|
|
277
|
+
return message;
|
|
278
|
+
},
|
|
279
|
+
|
|
280
|
+
buildContentArray(msg, textOnly) {
|
|
281
|
+
const content = [{ type: 'text', text: msg.text || '' }];
|
|
282
|
+
|
|
283
|
+
msg.files.forEach((file) => {
|
|
284
|
+
if (file.type && file.type.startsWith('image/') && file.src) {
|
|
285
|
+
content.push({
|
|
286
|
+
type: 'image_url',
|
|
287
|
+
image_url: { url: file.src },
|
|
288
|
+
});
|
|
289
|
+
} else if (!textOnly && file.type && file.type.startsWith('audio/') && file.base64Data) {
|
|
290
|
+
const format = this.getAudioFormat(file.type);
|
|
291
|
+
content.push({
|
|
292
|
+
type: 'input_audio',
|
|
293
|
+
input_audio: {
|
|
294
|
+
data: file.base64Data,
|
|
295
|
+
format: format,
|
|
296
|
+
},
|
|
297
|
+
});
|
|
298
|
+
} else if (!textOnly && file.fileData) {
|
|
299
|
+
content.push({
|
|
300
|
+
type: 'file',
|
|
301
|
+
file: {
|
|
302
|
+
filename: file.name,
|
|
303
|
+
file_data: file.fileData,
|
|
304
|
+
},
|
|
305
|
+
});
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
return content;
|
|
271
310
|
},
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
311
|
+
|
|
312
|
+
getAudioFormat(mimeType) {
|
|
313
|
+
if (mimeType.includes('mp3')) return 'mp3';
|
|
314
|
+
if (mimeType.includes('wav')) return 'wav';
|
|
315
|
+
if (mimeType.includes('webm')) return 'webm';
|
|
316
|
+
if (mimeType.includes('m4a')) return 'm4a';
|
|
317
|
+
return 'wav';
|
|
279
318
|
},
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
319
|
+
|
|
320
|
+
sendToNodeRED(payload, signals, fallbackMessage = null) {
|
|
321
|
+
this.$socket.emit('widget-action', this.id, { payload });
|
|
322
|
+
|
|
323
|
+
this.$socket.once('msg-input:' + this.id, (msg) => {
|
|
324
|
+
this.handleNodeREDResponse(msg, signals);
|
|
325
|
+
});
|
|
283
326
|
},
|
|
284
|
-
|
|
285
|
-
handleError(error) {
|
|
286
|
-
console.error('Deep Chat Error:', error)
|
|
287
|
-
|
|
288
|
-
// Fehler an Node-RED melden
|
|
289
|
-
this.$socket.emit('chat-message', this.id, {
|
|
290
|
-
error: error.message || error,
|
|
291
|
-
timestamp: new Date().toISOString()
|
|
292
|
-
})
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
}
|
|
296
|
-
</script>
|
|
297
327
|
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
overflow: hidden;
|
|
303
|
-
background: var(--v-theme-surface);
|
|
304
|
-
}
|
|
328
|
+
handleNodeREDResponse(msg, signals) {
|
|
329
|
+
try {
|
|
330
|
+
// Store the complete ChatGPT response in conversation
|
|
331
|
+
const fullResponse = msg.payload;
|
|
305
332
|
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
}
|
|
333
|
+
// Create AI message with complete response data
|
|
334
|
+
const aiMessage = {
|
|
335
|
+
role: 'ai',
|
|
336
|
+
content: fullResponse.message?.content || fullResponse.content,
|
|
337
|
+
text: fullResponse.message?.content || fullResponse.content,
|
|
338
|
+
fullResponse: fullResponse,
|
|
339
|
+
};
|
|
314
340
|
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
--border-color: var(--v-theme-outline);
|
|
321
|
-
--text-color: var(--v-theme-on-surface);
|
|
322
|
-
--placeholder-color: var(--v-theme-on-surface-variant);
|
|
323
|
-
}
|
|
341
|
+
// Handle tool calls if present
|
|
342
|
+
if (fullResponse.message?.tool_calls) {
|
|
343
|
+
aiMessage.tool_calls = fullResponse.message.tool_calls;
|
|
344
|
+
aiMessage.content = fullResponse.message.content || null;
|
|
345
|
+
}
|
|
324
346
|
|
|
325
|
-
|
|
326
|
-
.ui-deepchat-container :deep(.text-input-container) {
|
|
327
|
-
border-top: 1px solid var(--v-theme-outline) !important;
|
|
328
|
-
background: var(--v-theme-surface) !important;
|
|
329
|
-
}
|
|
347
|
+
this.conversation.push(aiMessage);
|
|
330
348
|
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
349
|
+
// For the chat display, show appropriate response
|
|
350
|
+
if (fullResponse.message?.tool_calls) {
|
|
351
|
+
// If ChatGPT wants to call tools, show a message
|
|
352
|
+
signals.onResponse({
|
|
353
|
+
text: 'Function call requested...',
|
|
354
|
+
role: 'ai',
|
|
355
|
+
});
|
|
356
|
+
} else if (fullResponse.message?.content || fullResponse.content) {
|
|
357
|
+
// Normal text response
|
|
358
|
+
signals.onResponse({
|
|
359
|
+
text: fullResponse.message?.content || fullResponse.content,
|
|
360
|
+
role: 'ai',
|
|
361
|
+
});
|
|
362
|
+
}
|
|
363
|
+
} catch (error) {
|
|
364
|
+
console.error('Error handling response:', error);
|
|
365
|
+
this.sendErrorResponse(signals, 'Error processing response');
|
|
366
|
+
}
|
|
367
|
+
},
|
|
335
368
|
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
369
|
+
sendErrorResponse(signals, message) {
|
|
370
|
+
if (signals && signals.onResponse) {
|
|
371
|
+
signals.onResponse({
|
|
372
|
+
text: message,
|
|
373
|
+
role: 'ai',
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
},
|
|
377
|
+
},
|
|
378
|
+
};
|
|
379
|
+
</script>
|
|
380
|
+
|
|
381
|
+
<style scoped>
|
|
382
|
+
.deep-chat-container {
|
|
383
|
+
display: flex;
|
|
384
|
+
justify-content: center;
|
|
385
|
+
width: 100%;
|
|
341
386
|
}
|
|
342
|
-
</style>
|
|
387
|
+
</style>
|