@5minds/node-red-dashboard-2-processcube-chat 0.1.1-develop-3537a6-mf54lair → 0.1.1-develop-2edaee-mfeeq6ff

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  <template>
2
2
  <div class="deep-chat-container">
3
3
  <deep-chat
4
+ ref="deepChat"
4
5
  :style="deepChatStyle"
5
6
  :textInput="textInputConfig"
6
7
  :introMessage="introMessageConfig"
@@ -59,6 +60,13 @@ export default {
59
60
  };
60
61
  },
61
62
  },
63
+ mounted() {
64
+ console.log('mounted');
65
+ this.$socket.on('msg-input:' + this.id, (msg) => {
66
+ console.log(JSON.stringify(msg));
67
+ this.handleNodeREDResponse(msg);
68
+ });
69
+ },
62
70
  beforeUnmount() {
63
71
  this.$socket.off('msg-input:' + this.id);
64
72
  },
@@ -114,7 +122,7 @@ export default {
114
122
  this.sendToNodeRED(payload, signals);
115
123
  } catch (error) {
116
124
  console.error('Error in handleConnection:', error);
117
- this.sendErrorResponse(signals, 'Sorry, there was an error processing your message.');
125
+ this.sendErrorResponse('Sorry, there was an error processing your message.');
118
126
  }
119
127
  },
120
128
 
@@ -319,13 +327,9 @@ export default {
319
327
 
320
328
  sendToNodeRED(payload, signals, fallbackMessage = null) {
321
329
  this.$socket.emit('widget-action', this.id, { payload });
322
-
323
- this.$socket.once('msg-input:' + this.id, (msg) => {
324
- this.handleNodeREDResponse(msg, signals);
325
- });
326
330
  },
327
331
 
328
- handleNodeREDResponse(msg, signals) {
332
+ handleNodeREDResponse(msg) {
329
333
  try {
330
334
  // Store the complete ChatGPT response in conversation
331
335
  const fullResponse = msg.payload;
@@ -349,30 +353,28 @@ export default {
349
353
  // For the chat display, show appropriate response
350
354
  if (fullResponse.message?.tool_calls) {
351
355
  // If ChatGPT wants to call tools, show a message
352
- signals.onResponse({
356
+ this.$refs.deepChat.addMessage({
353
357
  text: 'Function call requested...',
354
358
  role: 'ai',
355
359
  });
356
360
  } else if (fullResponse.message?.content || fullResponse.content) {
357
361
  // Normal text response
358
- signals.onResponse({
362
+ this.$refs.deepChat.addMessage({
359
363
  text: fullResponse.message?.content || fullResponse.content,
360
364
  role: 'ai',
361
365
  });
362
366
  }
363
367
  } catch (error) {
364
368
  console.error('Error handling response:', error);
365
- this.sendErrorResponse(signals, 'Error processing response');
369
+ this.sendErrorResponse('Error processing response');
366
370
  }
367
371
  },
368
372
 
369
- sendErrorResponse(signals, message) {
370
- if (signals && signals.onResponse) {
371
- signals.onResponse({
372
- text: message,
373
- role: 'ai',
374
- });
375
- }
373
+ sendErrorResponse(message) {
374
+ this.$refs.deepChat.addMessage({
375
+ text: message,
376
+ role: 'ai',
377
+ });
376
378
  },
377
379
  },
378
380
  };