@rimori/client 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/dist/components/CRUDModal.js +0 -1
- package/dist/components/ai/Assistant.d.ts +9 -0
- package/dist/components/ai/Assistant.js +59 -0
- package/dist/components/ai/Avatar.d.ts +11 -0
- package/dist/components/ai/Avatar.js +39 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.js +86 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/ai/utils.d.ts +6 -0
- package/dist/components/ai/utils.js +14 -0
- package/dist/components/audio/Playbutton.js +4 -5
- package/dist/components/avatar/Assistant.d.ts +9 -0
- package/dist/components/avatar/Assistant.js +59 -0
- package/dist/components/avatar/Avatar.d.ts +12 -0
- package/dist/components/avatar/Avatar.js +42 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.js +84 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/avatar/utils.d.ts +6 -0
- package/dist/components/avatar/utils.js +14 -0
- package/dist/components.d.ts +9 -0
- package/dist/components.js +10 -0
- package/dist/controller/AIController.d.ts +4 -3
- package/dist/controller/AIController.js +32 -8
- package/dist/controller/ObjectController.d.ts +2 -2
- package/dist/controller/ObjectController.js +4 -5
- package/dist/controller/SettingsController.d.ts +3 -1
- package/dist/controller/SettingsController.js +9 -0
- package/dist/controller/SharedContentController.js +6 -6
- package/dist/controller/SidePluginController.d.ts +14 -0
- package/dist/{plugin/VoiceController.js → controller/SidePluginController.js} +18 -15
- package/dist/controller/VoiceController.js +1 -1
- package/dist/core.d.ts +9 -0
- package/dist/core.js +10 -0
- package/dist/hooks/UseChatHook.js +2 -2
- package/dist/index.d.ts +3 -2
- package/dist/index.js +4 -2
- package/dist/plugin/PluginController.d.ts +4 -12
- package/dist/plugin/PluginController.js +43 -70
- package/dist/plugin/RimoriClient.d.ts +87 -27
- package/dist/plugin/RimoriClient.js +101 -67
- package/dist/plugin/fromRimori/EventBus.d.ts +98 -0
- package/dist/plugin/fromRimori/EventBus.js +240 -0
- package/dist/providers/PluginProvider.d.ts +1 -0
- package/dist/providers/PluginProvider.js +64 -12
- package/dist/worker/WorkerSetup.d.ts +6 -0
- package/dist/worker/WorkerSetup.js +79 -0
- package/package.json +16 -3
- package/src/components/CRUDModal.tsx +1 -3
- package/src/components/ai/Assistant.tsx +96 -0
- package/src/components/ai/Avatar.tsx +61 -0
- package/src/components/ai/EmbeddedAssistent/AudioInputField.tsx +64 -0
- package/src/components/ai/EmbeddedAssistent/CircleAudioAvatar.tsx +75 -0
- package/src/components/ai/EmbeddedAssistent/TTS/MessageSender.ts +91 -0
- package/src/components/ai/EmbeddedAssistent/TTS/Player.ts +192 -0
- package/src/components/ai/EmbeddedAssistent/VoiceRecoder.tsx +56 -0
- package/src/components/ai/utils.ts +23 -0
- package/src/components/audio/Playbutton.tsx +4 -5
- package/src/components.ts +10 -0
- package/src/controller/AIController.ts +84 -60
- package/src/controller/ObjectController.ts +4 -6
- package/src/controller/SettingsController.ts +10 -1
- package/src/controller/SharedContentController.ts +6 -6
- package/src/controller/SidePluginController.ts +36 -0
- package/src/controller/VoiceController.ts +1 -1
- package/src/core.ts +10 -0
- package/src/hooks/UseChatHook.ts +2 -2
- package/src/index.ts +4 -2
- package/src/plugin/PluginController.ts +46 -76
- package/src/plugin/RimoriClient.ts +151 -76
- package/src/plugin/fromRimori/EventBus.ts +301 -0
- package/src/plugin/fromRimori/readme.md +2 -0
- package/src/providers/PluginProvider.tsx +70 -14
- package/src/worker/WorkerSetup.ts +80 -0
- package/dist/CRUDModal.d.ts +0 -16
- package/dist/CRUDModal.js +0 -31
- package/dist/MarkdownEditor.d.ts +0 -8
- package/dist/MarkdownEditor.js +0 -46
- package/dist/audio/Playbutton.d.ts +0 -14
- package/dist/audio/Playbutton.js +0 -73
- package/dist/components/hooks/UseChatHook.d.ts +0 -15
- package/dist/components/hooks/UseChatHook.js +0 -21
- package/dist/plugin/AIController copy.d.ts +0 -22
- package/dist/plugin/AIController copy.js +0 -68
- package/dist/plugin/AIController.d.ts +0 -22
- package/dist/plugin/AIController.js +0 -68
- package/dist/plugin/ObjectController.d.ts +0 -34
- package/dist/plugin/ObjectController.js +0 -77
- package/dist/plugin/SettingController.d.ts +0 -13
- package/dist/plugin/SettingController.js +0 -55
- package/dist/plugin/VoiceController.d.ts +0 -2
- package/dist/providers/EventEmitter.d.ts +0 -11
- package/dist/providers/EventEmitter.js +0 -41
- package/dist/providers/EventEmitterContext.d.ts +0 -6
- package/dist/providers/EventEmitterContext.js +0 -19
- package/dist/utils/DifficultyConverter.d.ts +0 -3
- package/dist/utils/DifficultyConverter.js +0 -7
- package/dist/utils/constants.d.ts +0 -4
- package/dist/utils/constants.js +0 -12
- package/dist/utils/plugin/Client.d.ts +0 -72
- package/dist/utils/plugin/Client.js +0 -118
- package/dist/utils/plugin/PluginController.d.ts +0 -36
- package/dist/utils/plugin/PluginController.js +0 -119
- package/dist/utils/plugin/PluginUtils.d.ts +0 -2
- package/dist/utils/plugin/PluginUtils.js +0 -23
- package/dist/utils/plugin/RimoriClient.d.ts +0 -72
- package/dist/utils/plugin/RimoriClient.js +0 -118
- package/dist/utils/plugin/ThemeSetter.d.ts +0 -1
- package/dist/utils/plugin/ThemeSetter.js +0 -13
- package/dist/utils/plugin/WhereClauseBuilder.d.ts +0 -24
- package/dist/utils/plugin/WhereClauseBuilder.js +0 -79
- package/dist/utils/plugin/providers/EventEmitter.d.ts +0 -11
- package/dist/utils/plugin/providers/EventEmitter.js +0 -41
- package/dist/utils/plugin/providers/EventEmitterContext.d.ts +0 -6
- package/dist/utils/plugin/providers/EventEmitterContext.js +0 -19
- package/dist/utils/plugin/providers/PluginProvider.d.ts +0 -8
- package/dist/utils/plugin/providers/PluginProvider.js +0 -49
- package/src/providers/EventEmitter.ts +0 -48
- package/src/providers/EventEmitterContext.tsx +0 -27
- package/src/utils/constants.ts +0 -18
package/README.md
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# Rimori Client Package
|
|
2
|
+
|
|
3
|
+
This is the React connection package required by plugins to be able to
|
|
4
|
+
communicate with the Rimori platform.
|
|
5
|
+
|
|
6
|
+
## Usage
|
|
7
|
+
|
|
8
|
+
In order to use the package first install the package with
|
|
9
|
+
|
|
10
|
+
```bash
|
|
11
|
+
npm i @rimori/client
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
Then wrap your app the following way to get started:
|
|
15
|
+
|
|
16
|
+
```typescript
|
|
17
|
+
import { lazy } from "react";
|
|
18
|
+
import { PluginProvider } from "@rimori/client";
|
|
19
|
+
import { HashRouter, Route, Routes } from "react-router-dom";
|
|
20
|
+
|
|
21
|
+
// adding the theme setter
|
|
22
|
+
|
|
23
|
+
const queryClient = new QueryClient();
|
|
24
|
+
|
|
25
|
+
// load all pages lazy for fast loading speed
|
|
26
|
+
const SettingsPage = lazy(() => import("./pages/settings/SettingsPage"));
|
|
27
|
+
const DiscussionsPage = lazy(() => import("./pages/discussions/page"));
|
|
28
|
+
|
|
29
|
+
const App = () => (
|
|
30
|
+
// this provides connectivity to Rimori
|
|
31
|
+
<PluginProvider pluginId="my-rimori-plugin-id">
|
|
32
|
+
//allows using the routes set the plugin settings
|
|
33
|
+
<HashRouter future={{ v7_startTransition: true, v7_relativeSplatPath: true }}>
|
|
34
|
+
<Routes>
|
|
35
|
+
// the plugins pages
|
|
36
|
+
<Route path="/discussions" element={<DiscussionsPage />} />
|
|
37
|
+
// the settings page
|
|
38
|
+
<Route path="/settings" element={<SettingsPage />} />
|
|
39
|
+
</Routes>
|
|
40
|
+
</HashRouter>
|
|
41
|
+
</PluginProvider>
|
|
42
|
+
);
|
|
43
|
+
|
|
44
|
+
export default App;
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
Inside the pages simply use the `usePlugin` hook.
|
|
48
|
+
|
|
49
|
+
```typescript
|
|
50
|
+
const { getSettings, ... } = usePlugin();
|
|
51
|
+
```
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
"use client";
|
|
2
1
|
import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime";
|
|
3
2
|
import { useEffect, useRef } from "react";
|
|
4
3
|
export function CRUDModal({ actionbuttons, children, title, buttonText, className, closeAble = true, show = false, onClose }) {
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { FirstMessages } from './utils';
|
|
2
|
+
interface Props {
|
|
3
|
+
voiceId: any;
|
|
4
|
+
avatarImageUrl: string;
|
|
5
|
+
onComplete: (result: any) => void;
|
|
6
|
+
autoStartConversation?: FirstMessages;
|
|
7
|
+
}
|
|
8
|
+
export declare function AssistantChat({ avatarImageUrl, voiceId, onComplete, autoStartConversation }: Props): import("react/jsx-runtime").JSX.Element;
|
|
9
|
+
export {};
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import React, { useEffect, useMemo } from 'react';
|
|
3
|
+
import { CircleAudioAvatar } from './EmbeddedAssistent/CircleAudioAvatar';
|
|
4
|
+
import { AudioInputField } from './EmbeddedAssistent/AudioInputField';
|
|
5
|
+
import { MessageSender } from './EmbeddedAssistent/TTS/MessageSender';
|
|
6
|
+
import Markdown from 'react-markdown';
|
|
7
|
+
import { useChat } from '../../hooks/UseChatHook';
|
|
8
|
+
import { usePlugin } from '../../components';
|
|
9
|
+
import { getFirstMessages } from './utils';
|
|
10
|
+
export function AssistantChat({ avatarImageUrl, voiceId, onComplete, autoStartConversation }) {
|
|
11
|
+
var _a;
|
|
12
|
+
const [oralCommunication, setOralCommunication] = React.useState(true);
|
|
13
|
+
const { llm, event } = usePlugin();
|
|
14
|
+
const sender = useMemo(() => new MessageSender(llm.getVoice, voiceId), []);
|
|
15
|
+
const { messages, append, isLoading, setMessages } = useChat();
|
|
16
|
+
const lastAssistantMessage = (_a = [...messages].filter((m) => m.role === 'assistant').pop()) === null || _a === void 0 ? void 0 : _a.content;
|
|
17
|
+
useEffect(() => {
|
|
18
|
+
sender.setOnLoudnessChange((value) => event.emit('self.avatar.triggerLoudness', value));
|
|
19
|
+
if (!autoStartConversation) {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
setMessages(getFirstMessages(autoStartConversation));
|
|
23
|
+
// append([{ role: 'user', content: autoStartConversation.userMessage }]);
|
|
24
|
+
if (autoStartConversation.assistantMessage) {
|
|
25
|
+
// console.log("autostartmessages", { autoStartConversation, isLoading });
|
|
26
|
+
sender.handleNewText(autoStartConversation.assistantMessage, isLoading);
|
|
27
|
+
}
|
|
28
|
+
}, []);
|
|
29
|
+
useEffect(() => {
|
|
30
|
+
var _a;
|
|
31
|
+
let message = lastAssistantMessage;
|
|
32
|
+
if (message !== ((_a = messages[messages.length - 1]) === null || _a === void 0 ? void 0 : _a.content)) {
|
|
33
|
+
message = undefined;
|
|
34
|
+
}
|
|
35
|
+
sender.handleNewText(message, isLoading);
|
|
36
|
+
}, [messages, isLoading]);
|
|
37
|
+
const lastMessage = messages[messages.length - 1];
|
|
38
|
+
useEffect(() => {
|
|
39
|
+
console.log("lastMessage", lastMessage);
|
|
40
|
+
const toolInvocations = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.toolInvocations;
|
|
41
|
+
if (toolInvocations && toolInvocations.length > 0) {
|
|
42
|
+
console.log("toolInvocations", toolInvocations);
|
|
43
|
+
onComplete(toolInvocations[0].args);
|
|
44
|
+
}
|
|
45
|
+
}, [lastMessage]);
|
|
46
|
+
if ((lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.toolInvocations) && lastMessage.toolInvocations.length > 0) {
|
|
47
|
+
console.log("lastMessage test2", lastMessage);
|
|
48
|
+
const args = lastMessage.toolInvocations[0].args;
|
|
49
|
+
const success = args.explanationUnderstood === "TRUE" || args.studentKnowsTopic === "TRUE";
|
|
50
|
+
return _jsxs("div", { className: "px-5 pt-5 overflow-y-auto text-center", style: { height: "478px" }, children: [_jsx("h1", { className: 'text-center mt-5 mb-5', children: success ? "Great job!" : "You failed" }), _jsx("p", { children: args.improvementHints })] });
|
|
51
|
+
}
|
|
52
|
+
return (_jsxs("div", { children: [oralCommunication && _jsx(CircleAudioAvatar, { imageUrl: avatarImageUrl, className: 'mx-auto my-10' }), _jsx("div", { className: "w-full", children: lastAssistantMessage && _jsx("div", { className: "px-5 pt-5 overflow-y-auto remirror-theme", style: { height: "4k78px" }, children: _jsx(Markdown, { children: lastAssistantMessage }) }) }), _jsx(AudioInputField, { blockSubmission: isLoading, onSubmit: message => {
|
|
53
|
+
append([{ role: 'user', content: message, id: messages.length.toString() }]);
|
|
54
|
+
}, onAudioControl: voice => {
|
|
55
|
+
setOralCommunication(voice);
|
|
56
|
+
sender.setVolume(voice ? 1 : 0);
|
|
57
|
+
} })] }));
|
|
58
|
+
}
|
|
59
|
+
;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { Tool } from '../../core';
|
|
2
|
+
import { FirstMessages } from './utils';
|
|
3
|
+
interface Props {
|
|
4
|
+
title?: string;
|
|
5
|
+
voiceId: any;
|
|
6
|
+
avatarImageUrl: string;
|
|
7
|
+
agentTools: Tool[];
|
|
8
|
+
autoStartConversation?: FirstMessages;
|
|
9
|
+
}
|
|
10
|
+
export declare function Avatar({ avatarImageUrl, voiceId, title, agentTools, autoStartConversation }: Props): import("react/jsx-runtime").JSX.Element;
|
|
11
|
+
export {};
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect, useMemo } from 'react';
|
|
3
|
+
import { VoiceRecorder } from './EmbeddedAssistent/VoiceRecoder';
|
|
4
|
+
import { MessageSender } from './EmbeddedAssistent/TTS/MessageSender';
|
|
5
|
+
import { CircleAudioAvatar } from './EmbeddedAssistent/CircleAudioAvatar';
|
|
6
|
+
import { useChat } from '../../hooks/UseChatHook';
|
|
7
|
+
import { usePlugin } from '../../components';
|
|
8
|
+
import { getFirstMessages } from './utils';
|
|
9
|
+
export function Avatar({ avatarImageUrl, voiceId, title, agentTools, autoStartConversation }) {
|
|
10
|
+
const { llm, event } = usePlugin();
|
|
11
|
+
const sender = useMemo(() => new MessageSender(llm.getVoice, voiceId), []);
|
|
12
|
+
const { messages, append, isLoading, lastMessage, setMessages } = useChat(agentTools);
|
|
13
|
+
useEffect(() => {
|
|
14
|
+
console.log("messages", messages);
|
|
15
|
+
}, [messages]);
|
|
16
|
+
useEffect(() => {
|
|
17
|
+
sender.setOnLoudnessChange((value) => event.emit('self.avatar.triggerLoudness', value));
|
|
18
|
+
if (!autoStartConversation)
|
|
19
|
+
return;
|
|
20
|
+
setMessages(getFirstMessages(autoStartConversation));
|
|
21
|
+
// append([{ role: 'user', content: autoStartConversation.userMessage }]);
|
|
22
|
+
if (autoStartConversation.assistantMessage) {
|
|
23
|
+
// console.log("autostartmessages", { autoStartConversation, isLoading });
|
|
24
|
+
sender.handleNewText(autoStartConversation.assistantMessage, isLoading);
|
|
25
|
+
}
|
|
26
|
+
else if (autoStartConversation.userMessage) {
|
|
27
|
+
append([{ role: 'user', content: autoStartConversation.userMessage, id: messages.length.toString() }]);
|
|
28
|
+
}
|
|
29
|
+
}, []);
|
|
30
|
+
useEffect(() => {
|
|
31
|
+
if ((lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.role) === 'assistant') {
|
|
32
|
+
sender.handleNewText(lastMessage.content, isLoading);
|
|
33
|
+
}
|
|
34
|
+
}, [lastMessage, isLoading]);
|
|
35
|
+
return (_jsxs("div", { className: 'pb-8', children: [title && _jsx("p", { className: "text-center mt-5 w-3/4 mx-auto rounded-lg dark:text-gray-100", children: title }), _jsx(CircleAudioAvatar, { imageUrl: avatarImageUrl, width: "250px", className: 'mx-auto' }), _jsx("div", { className: 'w-16 h-16 flex text-4xl shadow-lg flex-row justify-center items-center rounded-full mx-auto bg-gray-400 dark:bg-gray-800', children: _jsx(VoiceRecorder, { className: 'w-7', iconSize: '300', onVoiceRecorded: (message) => {
|
|
36
|
+
append([{ role: 'user', content: "Message(" + Math.floor((messages.length + 1) / 2) + "): " + message, id: messages.length.toString() }]);
|
|
37
|
+
} }) })] }));
|
|
38
|
+
}
|
|
39
|
+
;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
interface AudioInputFieldProps {
|
|
2
|
+
onSubmit: (text: string) => void;
|
|
3
|
+
onAudioControl?: (voice: boolean) => void;
|
|
4
|
+
blockSubmission?: boolean;
|
|
5
|
+
}
|
|
6
|
+
export declare function AudioInputField({ onSubmit, onAudioControl, blockSubmission }: AudioInputFieldProps): import("react/jsx-runtime").JSX.Element;
|
|
7
|
+
export {};
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import { useState } from 'react';
|
|
3
|
+
import { VoiceRecorder } from './VoiceRecoder';
|
|
4
|
+
import { BiSolidRightArrow } from "react-icons/bi";
|
|
5
|
+
import { HiMiniSpeakerXMark, HiMiniSpeakerWave } from "react-icons/hi2";
|
|
6
|
+
export function AudioInputField({ onSubmit, onAudioControl, blockSubmission = false }) {
|
|
7
|
+
const [text, setText] = useState('');
|
|
8
|
+
const [audioEnabled, setAudioEnabled] = useState(true);
|
|
9
|
+
const handleSubmit = (manualText) => {
|
|
10
|
+
if (blockSubmission)
|
|
11
|
+
return;
|
|
12
|
+
const sendableText = manualText || text;
|
|
13
|
+
if (sendableText.trim()) {
|
|
14
|
+
onSubmit(sendableText);
|
|
15
|
+
setTimeout(() => {
|
|
16
|
+
setText('');
|
|
17
|
+
}, 100);
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
const handleKeyDown = (e) => {
|
|
21
|
+
if (blockSubmission)
|
|
22
|
+
return;
|
|
23
|
+
if (e.key === 'Enter' && e.ctrlKey) {
|
|
24
|
+
setText(text + '\n');
|
|
25
|
+
}
|
|
26
|
+
else if (e.key === 'Enter') {
|
|
27
|
+
handleSubmit();
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
return (_jsxs("div", { className: "flex items-center bg-gray-600 pt-2 pb-2 p-2", children: [onAudioControl && _jsx("button", { onClick: () => {
|
|
31
|
+
onAudioControl(!audioEnabled);
|
|
32
|
+
setAudioEnabled(!audioEnabled);
|
|
33
|
+
}, className: "cursor-default", children: audioEnabled ? _jsx(HiMiniSpeakerWave, { className: 'w-9 h-9 cursor-pointer' }) : _jsx(HiMiniSpeakerXMark, { className: 'w-9 h-9 cursor-pointer' }) }), _jsx(VoiceRecorder, { onVoiceRecorded: (m) => {
|
|
34
|
+
console.log('onVoiceRecorded', m);
|
|
35
|
+
handleSubmit(m);
|
|
36
|
+
} }), _jsx("textarea", { value: text, onChange: (e) => setText(e.target.value), onKeyDown: handleKeyDown, className: "flex-1 border-none rounded-lg p-2 text-gray-800 focus::outline-none", placeholder: 'Type a message...', disabled: blockSubmission }), _jsx("button", { onClick: () => handleSubmit(), className: "cursor-default", disabled: blockSubmission, children: _jsx(BiSolidRightArrow, { className: 'w-9 h-10 cursor-pointer' }) })] }));
|
|
37
|
+
}
|
|
38
|
+
;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect, useRef } from 'react';
|
|
3
|
+
import { EventBus } from '../../../core';
|
|
4
|
+
export function CircleAudioAvatar({ imageUrl, className, width = "150px" }) {
|
|
5
|
+
const canvasRef = useRef(null);
|
|
6
|
+
useEffect(() => {
|
|
7
|
+
const canvas = canvasRef.current;
|
|
8
|
+
if (canvas) {
|
|
9
|
+
const ctx = canvas.getContext('2d');
|
|
10
|
+
if (ctx) {
|
|
11
|
+
const image = new Image();
|
|
12
|
+
image.src = imageUrl;
|
|
13
|
+
image.onload = () => {
|
|
14
|
+
draw(ctx, canvas, image, 0);
|
|
15
|
+
};
|
|
16
|
+
const handleLoudness = (event) => {
|
|
17
|
+
draw(ctx, canvas, image, event.data.loudness);
|
|
18
|
+
};
|
|
19
|
+
// Subscribe to loudness changes
|
|
20
|
+
const listenerId = EventBus.on('self.avatar.triggerLoudness', handleLoudness);
|
|
21
|
+
return () => {
|
|
22
|
+
EventBus.off(listenerId);
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}, [imageUrl]);
|
|
27
|
+
// Function to draw on the canvas
|
|
28
|
+
const draw = (ctx, canvas, image, loudness) => {
|
|
29
|
+
if (canvas && ctx) {
|
|
30
|
+
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
31
|
+
// Draw pulsing circle
|
|
32
|
+
const radius = Math.min(canvas.width, canvas.height) / 3;
|
|
33
|
+
const centerX = canvas.width / 2;
|
|
34
|
+
const centerY = canvas.height / 2;
|
|
35
|
+
const pulseRadius = radius + loudness / 2.5; // Adjust the divisor for sensitivity
|
|
36
|
+
ctx.beginPath();
|
|
37
|
+
ctx.arc(centerX, centerY, pulseRadius, 0, Math.PI * 2, true);
|
|
38
|
+
ctx.strokeStyle = 'rgba(0, 0, 0, 0.5)';
|
|
39
|
+
ctx.lineWidth = 5;
|
|
40
|
+
ctx.stroke();
|
|
41
|
+
// Draw image circle
|
|
42
|
+
ctx.save();
|
|
43
|
+
ctx.beginPath();
|
|
44
|
+
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
45
|
+
ctx.closePath();
|
|
46
|
+
ctx.clip();
|
|
47
|
+
ctx.drawImage(image, centerX - radius, centerY - radius, radius * 2, radius * 2);
|
|
48
|
+
ctx.restore();
|
|
49
|
+
// Draw circular frame around the image
|
|
50
|
+
ctx.beginPath();
|
|
51
|
+
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
52
|
+
ctx.strokeStyle = 'rgba(20,20, 20, 0.9)';
|
|
53
|
+
ctx.lineWidth = 5; // Adjust the width of the frame as needed
|
|
54
|
+
ctx.stroke();
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
return _jsx("canvas", { ref: canvasRef, className: className, width: 500, height: 500, style: { width } });
|
|
58
|
+
}
|
|
59
|
+
;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
type VoiceBackend = (text: string, voice?: string, speed?: number) => Promise<Blob>;
|
|
2
|
+
export declare class MessageSender {
|
|
3
|
+
private player;
|
|
4
|
+
private fetchedSentences;
|
|
5
|
+
private lastLoading;
|
|
6
|
+
private voice;
|
|
7
|
+
private model;
|
|
8
|
+
private voiceBackend;
|
|
9
|
+
constructor(voiceBackend: VoiceBackend, voice?: string, model?: string);
|
|
10
|
+
private getCompletedSentences;
|
|
11
|
+
handleNewText(currentText: string | undefined, isLoading: boolean): Promise<void>;
|
|
12
|
+
private generateSpeech;
|
|
13
|
+
play(): void;
|
|
14
|
+
stop(): void;
|
|
15
|
+
private reset;
|
|
16
|
+
setVolume(volume: number): void;
|
|
17
|
+
setOnLoudnessChange(callback: (value: number) => void): void;
|
|
18
|
+
}
|
|
19
|
+
export {};
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import { ChunkedAudioPlayer } from './Player';
|
|
11
|
+
export class MessageSender {
|
|
12
|
+
constructor(voiceBackend, voice = 'alloy', model = 'openai') {
|
|
13
|
+
this.player = new ChunkedAudioPlayer();
|
|
14
|
+
this.fetchedSentences = new Set();
|
|
15
|
+
this.lastLoading = false;
|
|
16
|
+
this.voiceBackend = voiceBackend;
|
|
17
|
+
this.voice = voice;
|
|
18
|
+
this.model = model;
|
|
19
|
+
}
|
|
20
|
+
getCompletedSentences(currentText, isLoading) {
|
|
21
|
+
// Split the text based on the following characters: .,?!
|
|
22
|
+
// Only split on : when followed by a space
|
|
23
|
+
const pattern = /(.+?[,.?!]|.+?:\s+|.+?\n+)/g;
|
|
24
|
+
const result = [];
|
|
25
|
+
let match;
|
|
26
|
+
while ((match = pattern.exec(currentText)) !== null) {
|
|
27
|
+
const sentence = match[0].trim();
|
|
28
|
+
if (sentence.length > 0) {
|
|
29
|
+
result.push(sentence);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (!isLoading) {
|
|
33
|
+
const lastFullSentence = result[result.length - 1];
|
|
34
|
+
const leftoverIndex = currentText.lastIndexOf(lastFullSentence) + lastFullSentence.length;
|
|
35
|
+
if (leftoverIndex < currentText.length) {
|
|
36
|
+
result.push(currentText.slice(leftoverIndex).trim());
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return result;
|
|
40
|
+
}
|
|
41
|
+
handleNewText(currentText, isLoading) {
|
|
42
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
43
|
+
if (!this.lastLoading && isLoading) {
|
|
44
|
+
this.reset();
|
|
45
|
+
}
|
|
46
|
+
this.lastLoading = isLoading;
|
|
47
|
+
if (!currentText) {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
const sentences = this.getCompletedSentences(currentText, isLoading);
|
|
51
|
+
for (let i = 0; i < sentences.length; i++) {
|
|
52
|
+
const sentence = sentences[i];
|
|
53
|
+
if (!this.fetchedSentences.has(sentence)) {
|
|
54
|
+
this.fetchedSentences.add(sentence);
|
|
55
|
+
const audioData = yield this.generateSpeech(sentence);
|
|
56
|
+
yield this.player.addChunk(audioData, i);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
generateSpeech(sentence) {
|
|
62
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
63
|
+
const blob = yield this.voiceBackend(sentence, this.voice, 1.0);
|
|
64
|
+
return yield blob.arrayBuffer();
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
play() {
|
|
68
|
+
this.player.playAgain();
|
|
69
|
+
}
|
|
70
|
+
stop() {
|
|
71
|
+
this.player.stopPlayback();
|
|
72
|
+
}
|
|
73
|
+
reset() {
|
|
74
|
+
this.stop();
|
|
75
|
+
this.fetchedSentences.clear();
|
|
76
|
+
this.player.reset();
|
|
77
|
+
}
|
|
78
|
+
setVolume(volume) {
|
|
79
|
+
this.player.setVolume(volume);
|
|
80
|
+
}
|
|
81
|
+
setOnLoudnessChange(callback) {
|
|
82
|
+
this.player.setOnLoudnessChange((loudness) => {
|
|
83
|
+
callback(loudness);
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export declare class ChunkedAudioPlayer {
|
|
2
|
+
private audioContext;
|
|
3
|
+
private chunkQueue;
|
|
4
|
+
private isPlaying;
|
|
5
|
+
private analyser;
|
|
6
|
+
private dataArray;
|
|
7
|
+
private shouldMonitorLoudness;
|
|
8
|
+
private isMonitoring;
|
|
9
|
+
private handle;
|
|
10
|
+
private volume;
|
|
11
|
+
private loudnessCallback;
|
|
12
|
+
private currentIndex;
|
|
13
|
+
private startedPlaying;
|
|
14
|
+
constructor();
|
|
15
|
+
private init;
|
|
16
|
+
setOnLoudnessChange(callback: (value: number) => void): void;
|
|
17
|
+
setVolume(volume: number): void;
|
|
18
|
+
addChunk(chunk: ArrayBuffer, position: number): Promise<void>;
|
|
19
|
+
private playChunks;
|
|
20
|
+
stopPlayback(): void;
|
|
21
|
+
private playChunk;
|
|
22
|
+
playAgain(): Promise<void>;
|
|
23
|
+
private monitorLoudness;
|
|
24
|
+
reset(): void;
|
|
25
|
+
}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
export class ChunkedAudioPlayer {
|
|
11
|
+
constructor() {
|
|
12
|
+
this.chunkQueue = [];
|
|
13
|
+
this.isPlaying = false;
|
|
14
|
+
this.shouldMonitorLoudness = true;
|
|
15
|
+
this.isMonitoring = false;
|
|
16
|
+
this.handle = 0;
|
|
17
|
+
this.volume = 1.0;
|
|
18
|
+
this.loudnessCallback = () => { };
|
|
19
|
+
this.currentIndex = 0;
|
|
20
|
+
this.startedPlaying = false;
|
|
21
|
+
this.init();
|
|
22
|
+
}
|
|
23
|
+
init() {
|
|
24
|
+
this.audioContext = new AudioContext();
|
|
25
|
+
this.analyser = this.audioContext.createAnalyser();
|
|
26
|
+
this.analyser.fftSize = 256; // Set the FFT size (smaller values provide faster updates, larger ones give better resolution)
|
|
27
|
+
const bufferLength = this.analyser.frequencyBinCount;
|
|
28
|
+
this.dataArray = new Uint8Array(bufferLength); // Array to hold frequency data
|
|
29
|
+
}
|
|
30
|
+
setOnLoudnessChange(callback) {
|
|
31
|
+
this.loudnessCallback = callback;
|
|
32
|
+
}
|
|
33
|
+
setVolume(volume) {
|
|
34
|
+
this.volume = volume;
|
|
35
|
+
}
|
|
36
|
+
addChunk(chunk, position) {
|
|
37
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
38
|
+
console.log('Adding chunk', position, chunk);
|
|
39
|
+
this.chunkQueue[position] = chunk;
|
|
40
|
+
// console.log("received chunk", {
|
|
41
|
+
// chunkQueue: this.chunkQueue.length,
|
|
42
|
+
// isPlaying: this.isPlaying,
|
|
43
|
+
// })
|
|
44
|
+
if (position === 0 && !this.startedPlaying) {
|
|
45
|
+
this.startedPlaying = true;
|
|
46
|
+
this.playChunks();
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
playChunks() {
|
|
51
|
+
// console.log({ isPlaying: this.isPlaying });
|
|
52
|
+
if (this.isPlaying)
|
|
53
|
+
return;
|
|
54
|
+
if (!this.chunkQueue[this.currentIndex]) {
|
|
55
|
+
// wait until the correct chunk arrives
|
|
56
|
+
setTimeout(() => this.playChunks(), 10);
|
|
57
|
+
}
|
|
58
|
+
this.isPlaying = true;
|
|
59
|
+
this.playChunk(this.chunkQueue[this.currentIndex]).then(() => {
|
|
60
|
+
this.isPlaying = false;
|
|
61
|
+
this.currentIndex++;
|
|
62
|
+
if (this.chunkQueue[this.currentIndex]) {
|
|
63
|
+
this.shouldMonitorLoudness = true;
|
|
64
|
+
this.playChunks();
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
// console.log('Playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
68
|
+
setTimeout(() => {
|
|
69
|
+
// console.log('Check again if really playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
70
|
+
if (this.chunkQueue.length > this.currentIndex) {
|
|
71
|
+
this.playChunks();
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
this.startedPlaying = false;
|
|
75
|
+
this.shouldMonitorLoudness = false;
|
|
76
|
+
}
|
|
77
|
+
}, 1000);
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
stopPlayback() {
|
|
82
|
+
// console.log('Stopping playback');
|
|
83
|
+
// Implement logic to stop the current playback
|
|
84
|
+
this.isPlaying = false;
|
|
85
|
+
this.chunkQueue = [];
|
|
86
|
+
this.startedPlaying = false;
|
|
87
|
+
this.shouldMonitorLoudness = false;
|
|
88
|
+
cancelAnimationFrame(this.handle);
|
|
89
|
+
}
|
|
90
|
+
playChunk(chunk) {
|
|
91
|
+
console.log({ queue: this.chunkQueue });
|
|
92
|
+
if (!chunk) {
|
|
93
|
+
return Promise.resolve();
|
|
94
|
+
}
|
|
95
|
+
// console.log('Playing chunk', chunk);
|
|
96
|
+
return new Promise((resolve) => {
|
|
97
|
+
const source = this.audioContext.createBufferSource();
|
|
98
|
+
this.audioContext.decodeAudioData(chunk.slice(0)).then((audioBuffer) => {
|
|
99
|
+
source.buffer = audioBuffer;
|
|
100
|
+
// Create a GainNode for volume control
|
|
101
|
+
const gainNode = this.audioContext.createGain();
|
|
102
|
+
gainNode.gain.value = this.volume;
|
|
103
|
+
// Connect the source to the GainNode, then to the analyser node, then to the destination (speakers)
|
|
104
|
+
source.connect(gainNode);
|
|
105
|
+
gainNode.connect(this.analyser);
|
|
106
|
+
this.analyser.connect(this.audioContext.destination);
|
|
107
|
+
source.start(0);
|
|
108
|
+
// console.log('Playing chunk', this.currentIndex);
|
|
109
|
+
gainNode.gain.value = this.volume;
|
|
110
|
+
source.onended = () => {
|
|
111
|
+
// console.log('Chunk ended');
|
|
112
|
+
resolve();
|
|
113
|
+
};
|
|
114
|
+
// Start monitoring loudness only once
|
|
115
|
+
if (!this.isMonitoring) {
|
|
116
|
+
this.isMonitoring = true;
|
|
117
|
+
this.shouldMonitorLoudness = true;
|
|
118
|
+
this.monitorLoudness();
|
|
119
|
+
}
|
|
120
|
+
});
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
playAgain() {
|
|
124
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
125
|
+
console.log('Playing again');
|
|
126
|
+
if (this.chunkQueue.length > 0 && !this.isPlaying) {
|
|
127
|
+
this.playChunks();
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
monitorLoudness() {
|
|
132
|
+
// Stop monitoring when the flag is false
|
|
133
|
+
if (!this.shouldMonitorLoudness) {
|
|
134
|
+
// console.log('Loudness monitoring stopped.');
|
|
135
|
+
cancelAnimationFrame(this.handle);
|
|
136
|
+
this.loudnessCallback(0);
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
// Get the time domain data from the analyser (this is a snapshot of the waveform)
|
|
140
|
+
this.analyser.getByteTimeDomainData(this.dataArray);
|
|
141
|
+
// Calculate the RMS (root mean square) of the waveform values to get the perceived loudness
|
|
142
|
+
let sum = 0;
|
|
143
|
+
for (let i = 0; i < this.dataArray.length; i++) {
|
|
144
|
+
const value = this.dataArray[i] / 128.0 - 1.0; // Normalize between -1 and 1
|
|
145
|
+
sum += value * value;
|
|
146
|
+
}
|
|
147
|
+
const rms = Math.sqrt(sum / this.dataArray.length);
|
|
148
|
+
// Handle the case where RMS is 0 to avoid log10(0)
|
|
149
|
+
if (rms === 0) {
|
|
150
|
+
// console.log('Current loudness: Silent');
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
let loudnessInDb = 20 * Math.log10(rms); // Convert to dB
|
|
154
|
+
// console.log('Current loudness:' + loudnessInDb);
|
|
155
|
+
const minDb = -57;
|
|
156
|
+
const maxDb = -15;
|
|
157
|
+
if (loudnessInDb < minDb) {
|
|
158
|
+
loudnessInDb = minDb;
|
|
159
|
+
}
|
|
160
|
+
if (loudnessInDb > maxDb) {
|
|
161
|
+
loudnessInDb = maxDb;
|
|
162
|
+
}
|
|
163
|
+
const loudnessScale = ((loudnessInDb - minDb) / (maxDb - minDb)) * 100;
|
|
164
|
+
// console.log("root:corrent loudness", loudnessScale);
|
|
165
|
+
this.loudnessCallback(loudnessScale);
|
|
166
|
+
}
|
|
167
|
+
// Call this method again at regular intervals if you want continuous loudness monitoring
|
|
168
|
+
this.handle = requestAnimationFrame(() => this.monitorLoudness());
|
|
169
|
+
}
|
|
170
|
+
reset() {
|
|
171
|
+
// console.log('Resetting player');
|
|
172
|
+
this.stopPlayback();
|
|
173
|
+
this.currentIndex = 0;
|
|
174
|
+
this.shouldMonitorLoudness = true;
|
|
175
|
+
//reset to the beginning when the class gets initialized
|
|
176
|
+
this.isMonitoring = false;
|
|
177
|
+
this.isPlaying = false;
|
|
178
|
+
this.init();
|
|
179
|
+
}
|
|
180
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
11
|
+
import { useState, useRef, forwardRef, useImperativeHandle } from 'react';
|
|
12
|
+
import { FaMicrophone } from 'react-icons/fa6';
|
|
13
|
+
import { usePlugin } from '../../../components';
|
|
14
|
+
export const VoiceRecorder = forwardRef(({ onVoiceRecorded, iconSize, className }, ref) => {
|
|
15
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
16
|
+
const mediaRecorderRef = useRef(null);
|
|
17
|
+
const audioChunksRef = useRef([]);
|
|
18
|
+
const { llm } = usePlugin();
|
|
19
|
+
const startRecording = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
20
|
+
const stream = yield navigator.mediaDevices.getUserMedia({ audio: true });
|
|
21
|
+
const mediaRecorder = new MediaRecorder(stream);
|
|
22
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
23
|
+
mediaRecorder.ondataavailable = (event) => {
|
|
24
|
+
audioChunksRef.current.push(event.data);
|
|
25
|
+
};
|
|
26
|
+
mediaRecorder.onstop = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
27
|
+
const audioBlob = new Blob(audioChunksRef.current);
|
|
28
|
+
audioChunksRef.current = [];
|
|
29
|
+
onVoiceRecorded(yield llm.getTextFromVoice(audioBlob));
|
|
30
|
+
});
|
|
31
|
+
mediaRecorder.start();
|
|
32
|
+
setIsRecording(true);
|
|
33
|
+
});
|
|
34
|
+
const stopRecording = () => {
|
|
35
|
+
if (mediaRecorderRef.current) {
|
|
36
|
+
mediaRecorderRef.current.stop();
|
|
37
|
+
setIsRecording(false);
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
useImperativeHandle(ref, () => ({
|
|
41
|
+
startRecording,
|
|
42
|
+
stopRecording,
|
|
43
|
+
}));
|
|
44
|
+
return (_jsx("div", { className: className, children: _jsx("button", { onClick: isRecording ? stopRecording : startRecording, children: _jsx(FaMicrophone, { size: iconSize, className: "h-7 w-7 mr-2 " + (isRecording ? "text-red-600" : "") }) }) }));
|
|
45
|
+
});
|