@clikvn/agent-widget-embedded 0.0.3-dev → 0.0.4-dev

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@clikvn/agent-widget-embedded",
3
3
  "description": "This is agent widget",
4
- "version": "0.0.3-dev",
4
+ "version": "0.0.4-dev",
5
5
  "author": "Clik JSC",
6
6
  "license": "ISC",
7
7
  "type": "module",
@@ -32,6 +32,7 @@
32
32
  "@types/react-dom": "^18.3.1",
33
33
  "class-variance-authority": "^0.7.1",
34
34
  "clsx": "^2.1.1",
35
+ "device-detector-js": "^3.0.3",
35
36
  "framer-motion": "^11.18.0",
36
37
  "react": "^18.3.1",
37
38
  "react-dom": "^18.3.1",
@@ -477,14 +477,14 @@ export const MoreIcon = ({ size = 16 }: { size?: number }) => {
477
477
  );
478
478
  };
479
479
 
480
- export const TrashIcon = ({ size = 16 }: { size?: number }) => {
480
+ export const TrashIcon = ({ size = 16, color }: { size?: number, color?: string }) => {
481
481
  return (
482
482
  <svg
483
483
  height={size}
484
484
  strokeLinejoin="round"
485
485
  viewBox="0 0 16 16"
486
486
  width={size}
487
- style={{ color: 'currentcolor' }}
487
+ style={{ color: color || 'currentcolor' }}
488
488
  >
489
489
  <path
490
490
  fillRule="evenodd"
@@ -859,9 +859,9 @@ export const CheckCirclFillIcon = ({ size = 16 }: { size?: number }) => {
859
859
  export const MicrophoneIcon = ({ size = 16 }: { size?: number }) => {
860
860
  return (
861
861
  <svg
862
- height={size}
863
862
  strokeLinejoin="round"
864
863
  viewBox="0 0 490.9 490.9"
864
+ height={size}
865
865
  width={size}
866
866
  style={{ color: 'currentcolor' }}
867
867
  >
@@ -881,3 +881,20 @@ export const MicrophoneIcon = ({ size = 16 }: { size?: number }) => {
881
881
  </svg>
882
882
  );
883
883
  };
884
+
885
+ export const CircleDotIcon = ({ size = 16, color = 'red' }: { size?: number, color?: string }) => (
886
+ <svg
887
+ xmlns="http://www.w3.org/2000/svg"
888
+ height={size}
889
+ width={size}
890
+ viewBox="0 0 24 24"
891
+ fill="none"
892
+ stroke={color}
893
+ stroke-width="2"
894
+ stroke-linecap="round"
895
+ stroke-linejoin="round"
896
+ >
897
+ <circle cx="12" cy="12" r="10" />
898
+ <circle cx="12" cy="12" r="1" />
899
+ </svg>
900
+ );
@@ -243,7 +243,18 @@ const NonMemoizedMarkdown: FC<{
243
243
  content.indexOf('.jpeg') >= 0 ||
244
244
  content.indexOf('.png') >= 0 ||
245
245
  content.indexOf('.gif') >= 0);
246
- if (isImageUrl) {
246
+ const isAudioUrl =
247
+ content &&
248
+ (content.indexOf('.mp3') >= 0 ||
249
+ content.indexOf('.wav') >= 0 ||
250
+ content.indexOf('.ogg') >= 0);
251
+ if (isAudioUrl) {
252
+ return (
253
+ <audio controls>
254
+ <source src={content} />
255
+ </audio>
256
+ );
257
+ } else if (isImageUrl) {
247
258
  return (
248
259
  <a href={content} target="_blank" rel="noopener noreferrer">
249
260
  <img
@@ -12,15 +12,25 @@ import {
12
12
  cn,
13
13
  generateExtendedFileName,
14
14
  generateUUID,
15
+ sleep,
15
16
  } from '../../utils/commonUtils';
16
17
  import { PreviewAttachment } from './PreviewAttachment';
17
- import { ArrowUpIcon, PaperclipIcon, StopIcon } from './Icons';
18
+ import {
19
+ ArrowUpIcon,
20
+ CircleDotIcon,
21
+ MicrophoneIcon,
22
+ PaperclipIcon,
23
+ PlusIcon,
24
+ StopIcon,
25
+ TrashIcon,
26
+ } from './Icons';
18
27
  import { ChatMessageType, IFileUpload } from '../../types/flowise.type';
19
28
  import { BotType } from '../../types/bot.type';
20
29
 
21
30
  import { createAttachments } from '../../services/chat.service';
22
31
  import { Button } from './ui/Button';
23
32
  import { Textarea } from './ui/Textarea';
33
+ import { useAudioRecording } from '../../hooks/useAudioRecording';
24
34
 
25
35
  const suggestedActions = [
26
36
  {
@@ -81,6 +91,24 @@ export const MultimodalInput: FC<PropsType> = ({
81
91
  bot,
82
92
  apiHost,
83
93
  }) => {
94
+ const {
95
+ isRecording,
96
+ setIsRecording,
97
+ onRecordingCancelled,
98
+ onRecordingStopped,
99
+ elapsedTime,
100
+ isLoadingRecording,
101
+ } = useAudioRecording({
102
+ addRecordingToPreviews: async (blob: Blob) => {
103
+ try {
104
+ const audioFile = await toAudioBase64(blob);
105
+ handleSubmit(undefined, [audioFile]);
106
+ setIsRecording(false);
107
+ } catch (error) {
108
+ console.error('Error uploading files!', error);
109
+ }
110
+ },
111
+ });
84
112
  const textareaRef = useRef<HTMLTextAreaElement | null>(null);
85
113
  const { width } = useWindowSize();
86
114
  useEffect(() => {
@@ -130,7 +158,7 @@ export const MultimodalInput: FC<PropsType> = ({
130
158
  handleSubmit(undefined, attachments);
131
159
  setLocalStorageInput('');
132
160
  if (setAttachments) {
133
- setAttachments((currentAttachments: IFileUpload[]) => []);
161
+ setAttachments((_) => []);
134
162
  if (fileInputRef.current) {
135
163
  (fileInputRef.current as HTMLInputElement).value = '';
136
164
  }
@@ -157,6 +185,33 @@ export const MultimodalInput: FC<PropsType> = ({
157
185
  [chatId]
158
186
  );
159
187
 
188
+ const toAudioBase64 = (blob: Blob) => {
189
+ return new Promise<IFileUpload>((resolve) => {
190
+ let mimeType = '';
191
+ const pos = blob.type.indexOf(';');
192
+ if (pos === -1) {
193
+ mimeType = blob.type;
194
+ } else {
195
+ mimeType = blob.type.substring(0, pos);
196
+ }
197
+
198
+ // read blob and add to previews
199
+ const reader = new FileReader();
200
+ reader.readAsDataURL(blob);
201
+ reader.onloadend = () => {
202
+ const base64data = reader.result as string;
203
+ const upload: IFileUpload = {
204
+ tempId: generateUUID(),
205
+ data: base64data,
206
+ type: 'audio',
207
+ name: `audio_${Date.now()}.wav`,
208
+ mime: mimeType,
209
+ };
210
+ resolve(upload);
211
+ };
212
+ });
213
+ };
214
+
160
215
  const toBase64 = async (
161
216
  file: File,
162
217
  type = 'file'
@@ -213,7 +268,6 @@ export const MultimodalInput: FC<PropsType> = ({
213
268
  const handleFileChange = useCallback(
214
269
  async (event: ChangeEvent<HTMLInputElement>) => {
215
270
  const files = Array.from(event.target.files || []);
216
-
217
271
  try {
218
272
  const uploadPromises = files.map((file) => checkUploadFile(file));
219
273
  const uploadedAttachments = await Promise.all(uploadPromises);
@@ -347,24 +401,67 @@ export const MultimodalInput: FC<PropsType> = ({
347
401
  className="rounded-full p-1.5 h-fit absolute bottom-2 right-2 m-0.5 border dark:border-zinc-600"
348
402
  onClick={(event) => {
349
403
  event.preventDefault();
350
- submitForm();
404
+ if (isRecording) {
405
+ onRecordingStopped();
406
+ } else {
407
+ submitForm();
408
+ }
351
409
  }}
352
- disabled={input.length === 0 || !!uploadQueue.length}
410
+ disabled={
411
+ !isRecording && (input.length === 0 || !!uploadQueue.length)
412
+ } // zero input or uploading
353
413
  >
354
414
  <ArrowUpIcon size={14} />
355
415
  </Button>
356
416
  )}
357
-
417
+ {isRecording ? (
418
+ <>
419
+ <div
420
+ className="rounded-full bg-background flex absolute p-1 bottom-2 right-[80px]"
421
+ data-testid="input"
422
+ >
423
+ <div className="flex items-center gap-3">
424
+ <span>
425
+ <CircleDotIcon color="red" />
426
+ </span>
427
+ <span>{elapsedTime || '00:00'}</span>
428
+ {isLoadingRecording && <span className="ml-1.5">Sending...</span>}
429
+ </div>
430
+ </div>
431
+ <Button
432
+ className="rounded-full p-1.5 h-fit absolute bottom-2 right-11 m-0.5 dark:border-zinc-700"
433
+ variant="outline"
434
+ onClick={(event) => {
435
+ event.preventDefault();
436
+ onRecordingCancelled();
437
+ }}
438
+ >
439
+ <TrashIcon size={14} color="red" />
440
+ </Button>
441
+ </>
442
+ ) : (
443
+ <Button
444
+ className="rounded-full p-1.5 h-fit absolute bottom-2 right-11 m-0.5 dark:border-zinc-700"
445
+ onClick={(event) => {
446
+ event.preventDefault();
447
+ setIsRecording(true);
448
+ }}
449
+ variant="outline"
450
+ disabled={isLoading}
451
+ >
452
+ <MicrophoneIcon size={14} />
453
+ </Button>
454
+ )}
358
455
  <Button
359
- className="rounded-full p-1.5 h-fit absolute bottom-2 right-11 m-0.5 dark:border-zinc-700"
456
+ className="rounded-full p-1.5 h-fit absolute bottom-2 left-2 m-0.5 dark:border-zinc-700"
360
457
  onClick={(event) => {
361
458
  event.preventDefault();
362
459
  fileInputRef.current?.click();
363
460
  }}
364
461
  variant="outline"
365
- disabled={isLoading}
462
+ disabled={isLoading || isRecording}
366
463
  >
367
- <PaperclipIcon size={14} />
464
+ <PlusIcon size={14} />
368
465
  </Button>
369
466
  </div>
370
467
  );
@@ -1,5 +1,6 @@
1
1
  import { IFileUpload } from '../../types/flowise.type';
2
2
  import { LoaderIcon } from './Icons';
3
+ import React from 'react';
3
4
 
4
5
  export const PreviewAttachment = ({
5
6
  attachment,
@@ -11,9 +12,13 @@ export const PreviewAttachment = ({
11
12
  const { name, data, mime, tempId } = attachment;
12
13
  return (
13
14
  <div className="flex flex-col gap-2">
14
- <div className="w-30 p-2 max-w-[200px] bg-muted rounded-md relative flex flex-col items-center justify-center">
15
+ <div className="w-30 p-0 max-w-[400px] bg-muted rounded-md relative flex flex-col items-center justify-center">
15
16
  {data ? (
16
- mime.startsWith('image') ? (
17
+ mime.startsWith('audio') ? (
18
+ <audio controls>
19
+ <source src={data} type={mime} />
20
+ </audio>
21
+ ) : mime.startsWith('image') ? (
17
22
  <img
18
23
  key={tempId}
19
24
  src={data}
@@ -33,9 +38,9 @@ export const PreviewAttachment = ({
33
38
  </div>
34
39
  )}
35
40
  </div>
36
- <div className="text-xs text-zinc-500 max-w-[200px] truncate ">
37
- {name}
38
- </div>
41
+ {/*<div className="text-xs text-zinc-500 max-w-[200px] truncate ">*/}
42
+ {/* {name}*/}
43
+ {/*</div>*/}
39
44
  </div>
40
45
  );
41
46
  };
@@ -0,0 +1,54 @@
1
+ import { useEffect, useState } from 'react';
2
+ import {
3
+ cancelAudioRecording,
4
+ startAudioRecording,
5
+ stopAudioRecording,
6
+ } from '../utils/audioRecording';
7
+
8
+ type Props = {
9
+ addRecordingToPreviews: (blob: Blob) => void;
10
+ };
11
+
12
+ export const useAudioRecording = (props: Props) => {
13
+ const { addRecordingToPreviews } = props;
14
+ const [elapsedTime, setElapsedTime] = useState('00:00');
15
+ const [recordingNotSupported, setRecordingNotSupported] = useState(false);
16
+ const [isLoadingRecording, setIsLoadingRecording] = useState(false);
17
+ const [isRecording, setIsRecording] = useState(false);
18
+
19
+ useEffect(() => {
20
+ if (isRecording) {
21
+ onRecordingStarted();
22
+ }
23
+ }, [isRecording]);
24
+ const onRecordingStarted = () => {
25
+ setIsRecording(true);
26
+ startAudioRecording(
27
+ setIsRecording,
28
+ setRecordingNotSupported,
29
+ setElapsedTime
30
+ );
31
+ };
32
+
33
+ const onRecordingCancelled = () => {
34
+ if (!recordingNotSupported) cancelAudioRecording();
35
+ setIsRecording(false);
36
+ setRecordingNotSupported(false);
37
+ };
38
+
39
+ const onRecordingStopped = () => {
40
+ setIsLoadingRecording(true);
41
+ stopAudioRecording(addRecordingToPreviews);
42
+ };
43
+
44
+ return {
45
+ elapsedTime,
46
+ recordingNotSupported,
47
+ isLoadingRecording,
48
+ isRecording,
49
+ setIsRecording,
50
+ onRecordingCancelled,
51
+ onRecordingStopped,
52
+ onRecordingStarted,
53
+ };
54
+ };
@@ -3,6 +3,7 @@ import { useCallback, useEffect, useRef, useState } from 'react';
3
3
  import { StreamResponse } from '../models/FlowiseClient';
4
4
  import { predict } from '../services/chat.service';
5
5
  import {
6
+ ChatMessageMetadataType,
6
7
  ChatMessageType,
7
8
  IFileUpload,
8
9
  PredictionData,
@@ -83,6 +84,13 @@ export const useChat = (props: PropsType): ReturnType => {
83
84
  } else if (chunk.event == 'sourceDocuments') {
84
85
  newMessage.sourceDocuments = chunk.data as SourceDocument[];
85
86
  mutateMessages([...msgs, { ...newMessage }]);
87
+ } else if (chunk.event == 'metadata') {
88
+ newMessage.metaData = chunk.data as ChatMessageMetadataType;
89
+ const lastMsg = msgs[msgs.length - 1];
90
+ if (lastMsg) {
91
+ lastMsg.content = newMessage.metaData.question;
92
+ }
93
+ mutateMessages([...msgs, { ...newMessage }]);
86
94
  }
87
95
  };
88
96
 
@@ -189,20 +197,20 @@ export const useChat = (props: PropsType): ReturnType => {
189
197
  async (event?: { preventDefault?: () => void }, files?: IFileUpload[]) => {
190
198
  event?.preventDefault?.();
191
199
 
192
- if (!input) return;
200
+ if (!input && !files) return;
193
201
 
194
202
  const msgs = messagesRef.current.concat({
195
203
  id: generateUUID(),
196
204
  chatId: chatIdRef.current,
197
205
  role: 'userMessage',
198
- content: input,
206
+ content: input || '',
199
207
  createdDate: new Date().toISOString(),
200
208
  fileUploads: files || [],
201
209
  });
202
210
 
203
211
  triggerRequest(msgs, {
204
212
  chatId: chatIdRef.current,
205
- question: input,
213
+ question: input || '',
206
214
  uploads: files || [],
207
215
  chatflowId: bot?.id,
208
216
  overrideConfig: overrideConfig?.overrideConfig,
@@ -31,6 +31,13 @@ export interface ChatMessageType {
31
31
  fileUploads?: IFileUpload[];
32
32
  fileAnnotations?: FileAnnotation[];
33
33
  agentReasoning?: AgentReasoning[];
34
+ metaData?: ChatMessageMetadataType;
35
+ }
36
+
37
+ export interface ChatMessageMetadataType {
38
+ chatId: string;
39
+ chatMessageId?: string;
40
+ question?: string;
34
41
  }
35
42
 
36
43
  export interface IAction {