@rimori/client 1.0.3 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/dist/components/CRUDModal.js +0 -1
- package/dist/components/ai/Assistant.d.ts +9 -0
- package/dist/components/ai/Assistant.js +59 -0
- package/dist/components/ai/Avatar.d.ts +11 -0
- package/dist/components/ai/Avatar.js +39 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.js +86 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/ai/utils.d.ts +6 -0
- package/dist/components/ai/utils.js +14 -0
- package/dist/components/audio/Playbutton.js +4 -5
- package/dist/components/avatar/Assistant.d.ts +9 -0
- package/dist/components/avatar/Assistant.js +59 -0
- package/dist/components/avatar/Avatar.d.ts +12 -0
- package/dist/components/avatar/Avatar.js +42 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.js +84 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/avatar/utils.d.ts +6 -0
- package/dist/components/avatar/utils.js +14 -0
- package/dist/components.d.ts +9 -0
- package/dist/components.js +10 -0
- package/dist/controller/AIController.d.ts +4 -3
- package/dist/controller/AIController.js +32 -8
- package/dist/controller/ObjectController.d.ts +2 -2
- package/dist/controller/ObjectController.js +4 -5
- package/dist/controller/SettingsController.d.ts +2 -1
- package/dist/controller/SettingsController.js +9 -0
- package/dist/controller/SharedContentController.js +6 -6
- package/dist/core.d.ts +9 -0
- package/dist/core.js +10 -0
- package/dist/hooks/UseChatHook.js +2 -2
- package/dist/index.d.ts +3 -2
- package/dist/index.js +4 -2
- package/dist/plugin/PluginController.d.ts +4 -12
- package/dist/plugin/PluginController.js +43 -70
- package/dist/plugin/RimoriClient.d.ts +85 -32
- package/dist/plugin/RimoriClient.js +98 -77
- package/dist/plugin/fromRimori/EventBus.d.ts +98 -0
- package/dist/plugin/fromRimori/EventBus.js +240 -0
- package/dist/providers/PluginProvider.d.ts +1 -0
- package/dist/providers/PluginProvider.js +10 -12
- package/dist/worker/WorkerSetup.d.ts +6 -0
- package/dist/worker/WorkerSetup.js +79 -0
- package/package.json +16 -3
- package/src/components/CRUDModal.tsx +1 -3
- package/src/components/ai/Assistant.tsx +96 -0
- package/src/components/ai/Avatar.tsx +61 -0
- package/src/components/ai/EmbeddedAssistent/AudioInputField.tsx +64 -0
- package/src/components/ai/EmbeddedAssistent/CircleAudioAvatar.tsx +75 -0
- package/src/components/ai/EmbeddedAssistent/TTS/MessageSender.ts +91 -0
- package/src/components/ai/EmbeddedAssistent/TTS/Player.ts +192 -0
- package/src/components/ai/EmbeddedAssistent/VoiceRecoder.tsx +56 -0
- package/src/components/ai/utils.ts +23 -0
- package/src/components/audio/Playbutton.tsx +4 -5
- package/src/components.ts +10 -0
- package/src/controller/AIController.ts +84 -60
- package/src/controller/ObjectController.ts +4 -6
- package/src/controller/SettingsController.ts +9 -1
- package/src/controller/SharedContentController.ts +6 -6
- package/src/core.ts +10 -0
- package/src/hooks/UseChatHook.ts +2 -2
- package/src/index.ts +4 -2
- package/src/plugin/PluginController.ts +46 -76
- package/src/plugin/RimoriClient.ts +147 -85
- package/src/plugin/fromRimori/EventBus.ts +301 -0
- package/src/plugin/fromRimori/readme.md +2 -0
- package/src/providers/PluginProvider.tsx +12 -14
- package/src/worker/WorkerSetup.ts +80 -0
- package/dist/CRUDModal.d.ts +0 -16
- package/dist/CRUDModal.js +0 -31
- package/dist/MarkdownEditor.d.ts +0 -8
- package/dist/MarkdownEditor.js +0 -46
- package/dist/audio/Playbutton.d.ts +0 -14
- package/dist/audio/Playbutton.js +0 -73
- package/dist/components/hooks/UseChatHook.d.ts +0 -15
- package/dist/components/hooks/UseChatHook.js +0 -21
- package/dist/controller/PluginController.d.ts +0 -14
- package/dist/controller/PluginController.js +0 -30
- package/dist/plugin/AIController copy.d.ts +0 -22
- package/dist/plugin/AIController copy.js +0 -68
- package/dist/plugin/AIController.d.ts +0 -22
- package/dist/plugin/AIController.js +0 -68
- package/dist/plugin/ObjectController.d.ts +0 -34
- package/dist/plugin/ObjectController.js +0 -77
- package/dist/plugin/SettingController.d.ts +0 -13
- package/dist/plugin/SettingController.js +0 -55
- package/dist/plugin/VoiceController.d.ts +0 -2
- package/dist/plugin/VoiceController.js +0 -27
- package/dist/providers/EventEmitter.d.ts +0 -11
- package/dist/providers/EventEmitter.js +0 -41
- package/dist/providers/EventEmitterContext.d.ts +0 -6
- package/dist/providers/EventEmitterContext.js +0 -19
- package/dist/utils/DifficultyConverter.d.ts +0 -3
- package/dist/utils/DifficultyConverter.js +0 -7
- package/dist/utils/constants.d.ts +0 -4
- package/dist/utils/constants.js +0 -12
- package/dist/utils/plugin/Client.d.ts +0 -72
- package/dist/utils/plugin/Client.js +0 -118
- package/dist/utils/plugin/PluginController.d.ts +0 -36
- package/dist/utils/plugin/PluginController.js +0 -119
- package/dist/utils/plugin/PluginUtils.d.ts +0 -2
- package/dist/utils/plugin/PluginUtils.js +0 -23
- package/dist/utils/plugin/RimoriClient.d.ts +0 -72
- package/dist/utils/plugin/RimoriClient.js +0 -118
- package/dist/utils/plugin/ThemeSetter.d.ts +0 -1
- package/dist/utils/plugin/ThemeSetter.js +0 -13
- package/dist/utils/plugin/WhereClauseBuilder.d.ts +0 -24
- package/dist/utils/plugin/WhereClauseBuilder.js +0 -79
- package/dist/utils/plugin/providers/EventEmitter.d.ts +0 -11
- package/dist/utils/plugin/providers/EventEmitter.js +0 -41
- package/dist/utils/plugin/providers/EventEmitterContext.d.ts +0 -6
- package/dist/utils/plugin/providers/EventEmitterContext.js +0 -19
- package/dist/utils/plugin/providers/PluginProvider.d.ts +0 -8
- package/dist/utils/plugin/providers/PluginProvider.js +0 -49
- package/src/providers/EventEmitter.ts +0 -48
- package/src/providers/EventEmitterContext.tsx +0 -27
- package/src/utils/constants.ts +0 -18
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export function getFirstMessages(instructions) {
|
|
2
|
+
const messages = [];
|
|
3
|
+
if (instructions.instructions) {
|
|
4
|
+
messages.push({ id: '1', role: 'system', content: instructions.instructions });
|
|
5
|
+
}
|
|
6
|
+
if (instructions.userMessage) {
|
|
7
|
+
messages.push({ id: '2', role: 'user', content: instructions.userMessage });
|
|
8
|
+
}
|
|
9
|
+
if (instructions.assistantMessage) {
|
|
10
|
+
messages.push({ id: '3', role: 'assistant', content: instructions.assistantMessage });
|
|
11
|
+
}
|
|
12
|
+
console.log("getFirstMessages", messages);
|
|
13
|
+
return messages;
|
|
14
|
+
}
|
|
@@ -12,7 +12,7 @@ import { useState, useEffect } from 'react';
|
|
|
12
12
|
import { FaPlayCircle, FaStopCircle } from "react-icons/fa";
|
|
13
13
|
import { usePlugin } from "../../providers/PluginProvider";
|
|
14
14
|
import { Spinner } from '../Spinner';
|
|
15
|
-
import {
|
|
15
|
+
import { EventBus } from '../../plugin/fromRimori/EventBus';
|
|
16
16
|
export const AudioPlayOptions = [0.8, 0.9, 1.0, 1.1, 1.2, 1.5];
|
|
17
17
|
let isFetchingAudio = false;
|
|
18
18
|
export const AudioPlayer = ({ text, voice, language, hide, playListenerEvent, initialSpeed = 1.0, playOnMount = false, enableSpeedAdjustment = false, }) => {
|
|
@@ -20,12 +20,11 @@ export const AudioPlayer = ({ text, voice, language, hide, playListenerEvent, in
|
|
|
20
20
|
const [speed, setSpeed] = useState(initialSpeed);
|
|
21
21
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
22
22
|
const [isLoading, setIsLoading] = useState(false);
|
|
23
|
-
const {
|
|
24
|
-
const emitter = EmitterSingleton;
|
|
23
|
+
const { llm } = usePlugin();
|
|
25
24
|
useEffect(() => {
|
|
26
25
|
if (!playListenerEvent)
|
|
27
26
|
return;
|
|
28
|
-
|
|
27
|
+
EventBus.on(playListenerEvent, () => togglePlayback());
|
|
29
28
|
}, [playListenerEvent]);
|
|
30
29
|
useEffect(() => {
|
|
31
30
|
audioUrl && setAudioUrl(null);
|
|
@@ -36,7 +35,7 @@ export const AudioPlayer = ({ text, voice, language, hide, playListenerEvent, in
|
|
|
36
35
|
// Function to generate audio from text using API
|
|
37
36
|
const generateAudio = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
38
37
|
setIsLoading(true);
|
|
39
|
-
const blob = yield
|
|
38
|
+
const blob = yield llm.getVoice(text, voice || (language ? "aws_default" : "openai_alloy"), 1, language);
|
|
40
39
|
setAudioUrl(URL.createObjectURL(blob));
|
|
41
40
|
setIsLoading(false);
|
|
42
41
|
});
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { FirstMessages } from './utils';
|
|
2
|
+
interface Props {
|
|
3
|
+
voiceId: any;
|
|
4
|
+
avatarImageUrl: string;
|
|
5
|
+
onComplete: (result: any) => void;
|
|
6
|
+
autoStartConversation?: FirstMessages;
|
|
7
|
+
}
|
|
8
|
+
export declare function AssistantChat({ avatarImageUrl, voiceId, onComplete, autoStartConversation }: Props): import("react/jsx-runtime").JSX.Element;
|
|
9
|
+
export {};
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import React, { useEffect, useMemo } from 'react';
|
|
3
|
+
import { CircleAudioAvatar } from './EmbeddedAssistent/CircleAudioAvatar';
|
|
4
|
+
import { AudioInputField } from './EmbeddedAssistent/AudioInputField';
|
|
5
|
+
import { MessageSender } from './EmbeddedAssistent/TTS/MessageSender';
|
|
6
|
+
import Markdown from 'react-markdown';
|
|
7
|
+
import { useChat } from '../../hooks/UseChatHook';
|
|
8
|
+
import { usePlugin } from '../../components';
|
|
9
|
+
import { getFirstMessages } from './utils';
|
|
10
|
+
export function AssistantChat({ avatarImageUrl, voiceId, onComplete, autoStartConversation }) {
|
|
11
|
+
var _a;
|
|
12
|
+
const [oralCommunication, setOralCommunication] = React.useState(true);
|
|
13
|
+
const { llm, event } = usePlugin();
|
|
14
|
+
const sender = useMemo(() => new MessageSender(llm.getVoice, voiceId), []);
|
|
15
|
+
const { messages, append, isLoading, setMessages } = useChat();
|
|
16
|
+
const lastAssistantMessage = (_a = [...messages].filter((m) => m.role === 'assistant').pop()) === null || _a === void 0 ? void 0 : _a.content;
|
|
17
|
+
useEffect(() => {
|
|
18
|
+
sender.setOnLoudnessChange((value) => event.emit('self.avatar.triggerLoudness', value));
|
|
19
|
+
if (!autoStartConversation) {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
setMessages(getFirstMessages(autoStartConversation));
|
|
23
|
+
// append([{ role: 'user', content: autoStartConversation.userMessage }]);
|
|
24
|
+
if (autoStartConversation.assistantMessage) {
|
|
25
|
+
// console.log("autostartmessages", { autoStartConversation, isLoading });
|
|
26
|
+
sender.handleNewText(autoStartConversation.assistantMessage, isLoading);
|
|
27
|
+
}
|
|
28
|
+
}, []);
|
|
29
|
+
useEffect(() => {
|
|
30
|
+
var _a;
|
|
31
|
+
let message = lastAssistantMessage;
|
|
32
|
+
if (message !== ((_a = messages[messages.length - 1]) === null || _a === void 0 ? void 0 : _a.content)) {
|
|
33
|
+
message = undefined;
|
|
34
|
+
}
|
|
35
|
+
sender.handleNewText(message, isLoading);
|
|
36
|
+
}, [messages, isLoading]);
|
|
37
|
+
const lastMessage = messages[messages.length - 1];
|
|
38
|
+
useEffect(() => {
|
|
39
|
+
console.log("lastMessage", lastMessage);
|
|
40
|
+
const toolInvocations = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.toolInvocations;
|
|
41
|
+
if (toolInvocations && toolInvocations.length > 0) {
|
|
42
|
+
console.log("toolInvocations", toolInvocations);
|
|
43
|
+
onComplete(toolInvocations[0].args);
|
|
44
|
+
}
|
|
45
|
+
}, [lastMessage]);
|
|
46
|
+
if ((lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.toolInvocations) && lastMessage.toolInvocations.length > 0) {
|
|
47
|
+
console.log("lastMessage test2", lastMessage);
|
|
48
|
+
const args = lastMessage.toolInvocations[0].args;
|
|
49
|
+
const success = args.explanationUnderstood === "TRUE" || args.studentKnowsTopic === "TRUE";
|
|
50
|
+
return _jsxs("div", { className: "px-5 pt-5 overflow-y-auto text-center", style: { height: "478px" }, children: [_jsx("h1", { className: 'text-center mt-5 mb-5', children: success ? "Great job!" : "You failed" }), _jsx("p", { children: args.improvementHints })] });
|
|
51
|
+
}
|
|
52
|
+
return (_jsxs("div", { children: [oralCommunication && _jsx(CircleAudioAvatar, { imageUrl: avatarImageUrl, className: 'mx-auto my-10' }), _jsx("div", { className: "w-full", children: lastAssistantMessage && _jsx("div", { className: "px-5 pt-5 overflow-y-auto remirror-theme", style: { height: "4k78px" }, children: _jsx(Markdown, { children: lastAssistantMessage }) }) }), _jsx(AudioInputField, { blockSubmission: isLoading, onSubmit: message => {
|
|
53
|
+
append([{ role: 'user', content: message, id: messages.length.toString() }]);
|
|
54
|
+
}, onAudioControl: voice => {
|
|
55
|
+
setOralCommunication(voice);
|
|
56
|
+
sender.setVolume(voice ? 1 : 0);
|
|
57
|
+
} })] }));
|
|
58
|
+
}
|
|
59
|
+
;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { Tool } from '../../core';
|
|
2
|
+
import { FirstMessages } from './utils';
|
|
3
|
+
interface Props {
|
|
4
|
+
title?: string;
|
|
5
|
+
voiceId: any;
|
|
6
|
+
avatarImageUrl: string;
|
|
7
|
+
agentTools: Tool[];
|
|
8
|
+
onComplete: (result: Record<string, string>) => void;
|
|
9
|
+
autoStartConversation?: FirstMessages;
|
|
10
|
+
}
|
|
11
|
+
export declare function Avatar({ avatarImageUrl, voiceId, onComplete, title, agentTools, autoStartConversation }: Props): import("react/jsx-runtime").JSX.Element;
|
|
12
|
+
export {};
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect, useMemo } from 'react';
|
|
3
|
+
import { VoiceRecorder } from './EmbeddedAssistent/VoiceRecoder';
|
|
4
|
+
import { MessageSender } from './EmbeddedAssistent/TTS/MessageSender';
|
|
5
|
+
import { CircleAudioAvatar } from './EmbeddedAssistent/CircleAudioAvatar';
|
|
6
|
+
import { useChat } from '../../hooks/UseChatHook';
|
|
7
|
+
import { usePlugin } from '../../components';
|
|
8
|
+
import { getFirstMessages } from './utils';
|
|
9
|
+
export function Avatar({ avatarImageUrl, voiceId, onComplete, title, agentTools, autoStartConversation }) {
|
|
10
|
+
var _a;
|
|
11
|
+
const { llm, event } = usePlugin();
|
|
12
|
+
const sender = useMemo(() => new MessageSender(llm.getVoice, voiceId), []);
|
|
13
|
+
const { messages, append, isLoading, lastMessage, setMessages } = useChat(agentTools);
|
|
14
|
+
useEffect(() => {
|
|
15
|
+
console.log("messages", messages);
|
|
16
|
+
}, [messages]);
|
|
17
|
+
useEffect(() => {
|
|
18
|
+
sender.setOnLoudnessChange((value) => event.emit('self.avatar.triggerLoudness', value));
|
|
19
|
+
if (!autoStartConversation)
|
|
20
|
+
return;
|
|
21
|
+
setMessages(getFirstMessages(autoStartConversation));
|
|
22
|
+
// append([{ role: 'user', content: autoStartConversation.userMessage }]);
|
|
23
|
+
if (autoStartConversation.assistantMessage) {
|
|
24
|
+
// console.log("autostartmessages", { autoStartConversation, isLoading });
|
|
25
|
+
sender.handleNewText(autoStartConversation.assistantMessage, isLoading);
|
|
26
|
+
}
|
|
27
|
+
}, []);
|
|
28
|
+
useEffect(() => {
|
|
29
|
+
if ((lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.role) === 'assistant') {
|
|
30
|
+
sender.handleNewText(lastMessage.content, isLoading);
|
|
31
|
+
}
|
|
32
|
+
}, [lastMessage, isLoading]);
|
|
33
|
+
const invocation = (_a = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.toolInvocations) === null || _a === void 0 ? void 0 : _a[0];
|
|
34
|
+
useEffect(() => {
|
|
35
|
+
if (invocation)
|
|
36
|
+
onComplete(invocation.args);
|
|
37
|
+
}, [lastMessage]);
|
|
38
|
+
return (_jsxs("div", { className: 'pb-8', children: [title && _jsx("p", { className: "text-center mt-5 w-3/4 mx-auto rounded-lg dark:text-gray-100", children: title }), _jsx(CircleAudioAvatar, { imageUrl: avatarImageUrl, width: "250px", className: 'mx-auto' }), _jsx("div", { className: 'w-16 h-16 flex text-4xl shadow-lg flex-row justify-center items-center rounded-full mx-auto bg-gray-400 dark:bg-gray-800', children: _jsx(VoiceRecorder, { className: 'w-7', iconSize: '300', onVoiceRecorded: (message) => {
|
|
39
|
+
append([{ role: 'user', content: "Message(" + Math.floor((messages.length + 1) / 2) + "): " + message, id: messages.length.toString() }]);
|
|
40
|
+
} }) })] }));
|
|
41
|
+
}
|
|
42
|
+
;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
interface AudioInputFieldProps {
|
|
2
|
+
onSubmit: (text: string) => void;
|
|
3
|
+
onAudioControl?: (voice: boolean) => void;
|
|
4
|
+
blockSubmission?: boolean;
|
|
5
|
+
}
|
|
6
|
+
export declare function AudioInputField({ onSubmit, onAudioControl, blockSubmission }: AudioInputFieldProps): import("react/jsx-runtime").JSX.Element;
|
|
7
|
+
export {};
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import { useState } from 'react';
|
|
3
|
+
import { VoiceRecorder } from './VoiceRecoder';
|
|
4
|
+
import { BiSolidRightArrow } from "react-icons/bi";
|
|
5
|
+
import { HiMiniSpeakerXMark, HiMiniSpeakerWave } from "react-icons/hi2";
|
|
6
|
+
export function AudioInputField({ onSubmit, onAudioControl, blockSubmission = false }) {
|
|
7
|
+
const [text, setText] = useState('');
|
|
8
|
+
const [audioEnabled, setAudioEnabled] = useState(true);
|
|
9
|
+
const handleSubmit = (manualText) => {
|
|
10
|
+
if (blockSubmission)
|
|
11
|
+
return;
|
|
12
|
+
const sendableText = manualText || text;
|
|
13
|
+
if (sendableText.trim()) {
|
|
14
|
+
onSubmit(sendableText);
|
|
15
|
+
setTimeout(() => {
|
|
16
|
+
setText('');
|
|
17
|
+
}, 100);
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
const handleKeyDown = (e) => {
|
|
21
|
+
if (blockSubmission)
|
|
22
|
+
return;
|
|
23
|
+
if (e.key === 'Enter' && e.ctrlKey) {
|
|
24
|
+
setText(text + '\n');
|
|
25
|
+
}
|
|
26
|
+
else if (e.key === 'Enter') {
|
|
27
|
+
handleSubmit();
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
return (_jsxs("div", { className: "flex items-center bg-gray-600 pt-2 pb-2 p-2", children: [onAudioControl && _jsx("button", { onClick: () => {
|
|
31
|
+
onAudioControl(!audioEnabled);
|
|
32
|
+
setAudioEnabled(!audioEnabled);
|
|
33
|
+
}, className: "cursor-default", children: audioEnabled ? _jsx(HiMiniSpeakerWave, { className: 'w-9 h-9 cursor-pointer' }) : _jsx(HiMiniSpeakerXMark, { className: 'w-9 h-9 cursor-pointer' }) }), _jsx(VoiceRecorder, { onVoiceRecorded: (m) => {
|
|
34
|
+
console.log('onVoiceRecorded', m);
|
|
35
|
+
handleSubmit(m);
|
|
36
|
+
} }), _jsx("textarea", { value: text, onChange: (e) => setText(e.target.value), onKeyDown: handleKeyDown, className: "flex-1 border-none rounded-lg p-2 text-gray-800 focus::outline-none", placeholder: 'Type a message...', disabled: blockSubmission }), _jsx("button", { onClick: () => handleSubmit(), className: "cursor-default", disabled: blockSubmission, children: _jsx(BiSolidRightArrow, { className: 'w-9 h-10 cursor-pointer' }) })] }));
|
|
37
|
+
}
|
|
38
|
+
;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useEffect, useRef } from 'react';
|
|
3
|
+
import { EventBus } from '../../../core';
|
|
4
|
+
export function CircleAudioAvatar({ imageUrl, className, width = "150px" }) {
|
|
5
|
+
const canvasRef = useRef(null);
|
|
6
|
+
useEffect(() => {
|
|
7
|
+
const canvas = canvasRef.current;
|
|
8
|
+
if (canvas) {
|
|
9
|
+
const ctx = canvas.getContext('2d');
|
|
10
|
+
if (ctx) {
|
|
11
|
+
const image = new Image();
|
|
12
|
+
image.src = imageUrl;
|
|
13
|
+
image.onload = () => {
|
|
14
|
+
draw(ctx, canvas, image, 0);
|
|
15
|
+
};
|
|
16
|
+
const handleLoudness = (event) => {
|
|
17
|
+
draw(ctx, canvas, image, event.data.loudness);
|
|
18
|
+
};
|
|
19
|
+
// Subscribe to loudness changes
|
|
20
|
+
const listenerId = EventBus.on('self.avatar.triggerLoudness', handleLoudness);
|
|
21
|
+
return () => {
|
|
22
|
+
EventBus.off(listenerId);
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}, [imageUrl]);
|
|
27
|
+
// Function to draw on the canvas
|
|
28
|
+
const draw = (ctx, canvas, image, loudness) => {
|
|
29
|
+
if (canvas && ctx) {
|
|
30
|
+
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
31
|
+
// Draw pulsing circle
|
|
32
|
+
const radius = Math.min(canvas.width, canvas.height) / 3;
|
|
33
|
+
const centerX = canvas.width / 2;
|
|
34
|
+
const centerY = canvas.height / 2;
|
|
35
|
+
const pulseRadius = radius + loudness / 2.5; // Adjust the divisor for sensitivity
|
|
36
|
+
ctx.beginPath();
|
|
37
|
+
ctx.arc(centerX, centerY, pulseRadius, 0, Math.PI * 2, true);
|
|
38
|
+
ctx.strokeStyle = 'rgba(0, 0, 0, 0.5)';
|
|
39
|
+
ctx.lineWidth = 5;
|
|
40
|
+
ctx.stroke();
|
|
41
|
+
// Draw image circle
|
|
42
|
+
ctx.save();
|
|
43
|
+
ctx.beginPath();
|
|
44
|
+
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
45
|
+
ctx.closePath();
|
|
46
|
+
ctx.clip();
|
|
47
|
+
ctx.drawImage(image, centerX - radius, centerY - radius, radius * 2, radius * 2);
|
|
48
|
+
ctx.restore();
|
|
49
|
+
// Draw circular frame around the image
|
|
50
|
+
ctx.beginPath();
|
|
51
|
+
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
52
|
+
ctx.strokeStyle = 'rgba(20,20, 20, 0.9)';
|
|
53
|
+
ctx.lineWidth = 5; // Adjust the width of the frame as needed
|
|
54
|
+
ctx.stroke();
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
return _jsx("canvas", { ref: canvasRef, className: className, width: 500, height: 500, style: { width } });
|
|
58
|
+
}
|
|
59
|
+
;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
type VoiceBackend = (text: string, voice?: string, speed?: number) => Promise<Blob>;
|
|
2
|
+
export declare class MessageSender {
|
|
3
|
+
private player;
|
|
4
|
+
private fetchedSentences;
|
|
5
|
+
private lastLoading;
|
|
6
|
+
private voice;
|
|
7
|
+
private model;
|
|
8
|
+
private voiceBackend;
|
|
9
|
+
constructor(voiceBackend: VoiceBackend, voice?: string, model?: string);
|
|
10
|
+
private getCompletedSentences;
|
|
11
|
+
handleNewText(currentText: string | undefined, isLoading: boolean): Promise<void>;
|
|
12
|
+
private generateSpeech;
|
|
13
|
+
play(): void;
|
|
14
|
+
stop(): void;
|
|
15
|
+
private reset;
|
|
16
|
+
setVolume(volume: number): void;
|
|
17
|
+
setOnLoudnessChange(callback: (value: number) => void): void;
|
|
18
|
+
}
|
|
19
|
+
export {};
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
import { ChunkedAudioPlayer } from './Player';
|
|
11
|
+
export class MessageSender {
|
|
12
|
+
constructor(voiceBackend, voice = 'alloy', model = 'openai') {
|
|
13
|
+
this.player = new ChunkedAudioPlayer();
|
|
14
|
+
this.fetchedSentences = new Set();
|
|
15
|
+
this.lastLoading = false;
|
|
16
|
+
this.voiceBackend = voiceBackend;
|
|
17
|
+
this.voice = voice;
|
|
18
|
+
this.model = model;
|
|
19
|
+
}
|
|
20
|
+
getCompletedSentences(currentText, isLoading) {
|
|
21
|
+
const pattern = /(.+?[,.?!:\n]+)/g;
|
|
22
|
+
const result = [];
|
|
23
|
+
let match;
|
|
24
|
+
while ((match = pattern.exec(currentText)) !== null) {
|
|
25
|
+
const sentence = match[0].trim();
|
|
26
|
+
if (sentence.length > 0) {
|
|
27
|
+
result.push(sentence);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
if (!isLoading) {
|
|
31
|
+
const lastFullSentence = result[result.length - 1];
|
|
32
|
+
const leftoverIndex = currentText.lastIndexOf(lastFullSentence) + lastFullSentence.length;
|
|
33
|
+
if (leftoverIndex < currentText.length) {
|
|
34
|
+
result.push(currentText.slice(leftoverIndex).trim());
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
return result;
|
|
38
|
+
}
|
|
39
|
+
handleNewText(currentText, isLoading) {
|
|
40
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
41
|
+
if (!this.lastLoading && isLoading) {
|
|
42
|
+
this.reset();
|
|
43
|
+
}
|
|
44
|
+
this.lastLoading = isLoading;
|
|
45
|
+
if (!currentText) {
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
const sentences = this.getCompletedSentences(currentText, isLoading);
|
|
49
|
+
for (let i = 0; i < sentences.length; i++) {
|
|
50
|
+
const sentence = sentences[i];
|
|
51
|
+
if (!this.fetchedSentences.has(sentence)) {
|
|
52
|
+
this.fetchedSentences.add(sentence);
|
|
53
|
+
const audioData = yield this.generateSpeech(sentence);
|
|
54
|
+
yield this.player.addChunk(audioData, i);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
generateSpeech(sentence) {
|
|
60
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
61
|
+
const blob = yield this.voiceBackend(sentence, this.voice, 1.0);
|
|
62
|
+
return yield blob.arrayBuffer();
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
play() {
|
|
66
|
+
this.player.playAgain();
|
|
67
|
+
}
|
|
68
|
+
stop() {
|
|
69
|
+
this.player.stopPlayback();
|
|
70
|
+
}
|
|
71
|
+
reset() {
|
|
72
|
+
this.stop();
|
|
73
|
+
this.fetchedSentences.clear();
|
|
74
|
+
this.player.reset();
|
|
75
|
+
}
|
|
76
|
+
setVolume(volume) {
|
|
77
|
+
this.player.setVolume(volume);
|
|
78
|
+
}
|
|
79
|
+
setOnLoudnessChange(callback) {
|
|
80
|
+
this.player.setOnLoudnessChange((loudness) => {
|
|
81
|
+
callback(loudness);
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export declare class ChunkedAudioPlayer {
|
|
2
|
+
private audioContext;
|
|
3
|
+
private chunkQueue;
|
|
4
|
+
private isPlaying;
|
|
5
|
+
private analyser;
|
|
6
|
+
private dataArray;
|
|
7
|
+
private shouldMonitorLoudness;
|
|
8
|
+
private isMonitoring;
|
|
9
|
+
private handle;
|
|
10
|
+
private volume;
|
|
11
|
+
private loudnessCallback;
|
|
12
|
+
private currentIndex;
|
|
13
|
+
private startedPlaying;
|
|
14
|
+
constructor();
|
|
15
|
+
private init;
|
|
16
|
+
setOnLoudnessChange(callback: (value: number) => void): void;
|
|
17
|
+
setVolume(volume: number): void;
|
|
18
|
+
addChunk(chunk: ArrayBuffer, position: number): Promise<void>;
|
|
19
|
+
private playChunks;
|
|
20
|
+
stopPlayback(): void;
|
|
21
|
+
private playChunk;
|
|
22
|
+
playAgain(): Promise<void>;
|
|
23
|
+
private monitorLoudness;
|
|
24
|
+
reset(): void;
|
|
25
|
+
}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
export class ChunkedAudioPlayer {
|
|
11
|
+
constructor() {
|
|
12
|
+
this.chunkQueue = [];
|
|
13
|
+
this.isPlaying = false;
|
|
14
|
+
this.shouldMonitorLoudness = true;
|
|
15
|
+
this.isMonitoring = false;
|
|
16
|
+
this.handle = 0;
|
|
17
|
+
this.volume = 1.0;
|
|
18
|
+
this.loudnessCallback = () => { };
|
|
19
|
+
this.currentIndex = 0;
|
|
20
|
+
this.startedPlaying = false;
|
|
21
|
+
this.init();
|
|
22
|
+
}
|
|
23
|
+
init() {
|
|
24
|
+
this.audioContext = new AudioContext();
|
|
25
|
+
this.analyser = this.audioContext.createAnalyser();
|
|
26
|
+
this.analyser.fftSize = 256; // Set the FFT size (smaller values provide faster updates, larger ones give better resolution)
|
|
27
|
+
const bufferLength = this.analyser.frequencyBinCount;
|
|
28
|
+
this.dataArray = new Uint8Array(bufferLength); // Array to hold frequency data
|
|
29
|
+
}
|
|
30
|
+
setOnLoudnessChange(callback) {
|
|
31
|
+
this.loudnessCallback = callback;
|
|
32
|
+
}
|
|
33
|
+
setVolume(volume) {
|
|
34
|
+
this.volume = volume;
|
|
35
|
+
}
|
|
36
|
+
addChunk(chunk, position) {
|
|
37
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
38
|
+
console.log('Adding chunk', position, chunk);
|
|
39
|
+
this.chunkQueue[position] = chunk;
|
|
40
|
+
// console.log("received chunk", {
|
|
41
|
+
// chunkQueue: this.chunkQueue.length,
|
|
42
|
+
// isPlaying: this.isPlaying,
|
|
43
|
+
// })
|
|
44
|
+
if (position === 0 && !this.startedPlaying) {
|
|
45
|
+
this.startedPlaying = true;
|
|
46
|
+
this.playChunks();
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
playChunks() {
|
|
51
|
+
// console.log({ isPlaying: this.isPlaying });
|
|
52
|
+
if (this.isPlaying)
|
|
53
|
+
return;
|
|
54
|
+
if (!this.chunkQueue[this.currentIndex]) {
|
|
55
|
+
// wait until the correct chunk arrives
|
|
56
|
+
setTimeout(() => this.playChunks(), 10);
|
|
57
|
+
}
|
|
58
|
+
this.isPlaying = true;
|
|
59
|
+
this.playChunk(this.chunkQueue[this.currentIndex]).then(() => {
|
|
60
|
+
this.isPlaying = false;
|
|
61
|
+
this.currentIndex++;
|
|
62
|
+
if (this.chunkQueue[this.currentIndex]) {
|
|
63
|
+
this.shouldMonitorLoudness = true;
|
|
64
|
+
this.playChunks();
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
// console.log('Playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
68
|
+
setTimeout(() => {
|
|
69
|
+
// console.log('Check again if really playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
70
|
+
if (this.chunkQueue.length > this.currentIndex) {
|
|
71
|
+
this.playChunks();
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
this.startedPlaying = false;
|
|
75
|
+
this.shouldMonitorLoudness = false;
|
|
76
|
+
}
|
|
77
|
+
}, 1000);
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
stopPlayback() {
|
|
82
|
+
// console.log('Stopping playback');
|
|
83
|
+
// Implement logic to stop the current playback
|
|
84
|
+
this.isPlaying = false;
|
|
85
|
+
this.chunkQueue = [];
|
|
86
|
+
this.startedPlaying = false;
|
|
87
|
+
this.shouldMonitorLoudness = false;
|
|
88
|
+
cancelAnimationFrame(this.handle);
|
|
89
|
+
}
|
|
90
|
+
playChunk(chunk) {
|
|
91
|
+
console.log({ queue: this.chunkQueue });
|
|
92
|
+
if (!chunk) {
|
|
93
|
+
return Promise.resolve();
|
|
94
|
+
}
|
|
95
|
+
// console.log('Playing chunk', chunk);
|
|
96
|
+
return new Promise((resolve) => {
|
|
97
|
+
const source = this.audioContext.createBufferSource();
|
|
98
|
+
this.audioContext.decodeAudioData(chunk.slice(0)).then((audioBuffer) => {
|
|
99
|
+
source.buffer = audioBuffer;
|
|
100
|
+
// Create a GainNode for volume control
|
|
101
|
+
const gainNode = this.audioContext.createGain();
|
|
102
|
+
gainNode.gain.value = this.volume;
|
|
103
|
+
// Connect the source to the GainNode, then to the analyser node, then to the destination (speakers)
|
|
104
|
+
source.connect(gainNode);
|
|
105
|
+
gainNode.connect(this.analyser);
|
|
106
|
+
this.analyser.connect(this.audioContext.destination);
|
|
107
|
+
source.start(0);
|
|
108
|
+
// console.log('Playing chunk', this.currentIndex);
|
|
109
|
+
gainNode.gain.value = this.volume;
|
|
110
|
+
source.onended = () => {
|
|
111
|
+
// console.log('Chunk ended');
|
|
112
|
+
resolve();
|
|
113
|
+
};
|
|
114
|
+
// Start monitoring loudness only once
|
|
115
|
+
if (!this.isMonitoring) {
|
|
116
|
+
this.isMonitoring = true;
|
|
117
|
+
this.shouldMonitorLoudness = true;
|
|
118
|
+
this.monitorLoudness();
|
|
119
|
+
}
|
|
120
|
+
});
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
playAgain() {
|
|
124
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
125
|
+
console.log('Playing again');
|
|
126
|
+
if (this.chunkQueue.length > 0 && !this.isPlaying) {
|
|
127
|
+
this.playChunks();
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
monitorLoudness() {
|
|
132
|
+
// Stop monitoring when the flag is false
|
|
133
|
+
if (!this.shouldMonitorLoudness) {
|
|
134
|
+
// console.log('Loudness monitoring stopped.');
|
|
135
|
+
cancelAnimationFrame(this.handle);
|
|
136
|
+
this.loudnessCallback(0);
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
// Get the time domain data from the analyser (this is a snapshot of the waveform)
|
|
140
|
+
this.analyser.getByteTimeDomainData(this.dataArray);
|
|
141
|
+
// Calculate the RMS (root mean square) of the waveform values to get the perceived loudness
|
|
142
|
+
let sum = 0;
|
|
143
|
+
for (let i = 0; i < this.dataArray.length; i++) {
|
|
144
|
+
const value = this.dataArray[i] / 128.0 - 1.0; // Normalize between -1 and 1
|
|
145
|
+
sum += value * value;
|
|
146
|
+
}
|
|
147
|
+
const rms = Math.sqrt(sum / this.dataArray.length);
|
|
148
|
+
// Handle the case where RMS is 0 to avoid log10(0)
|
|
149
|
+
if (rms === 0) {
|
|
150
|
+
// console.log('Current loudness: Silent');
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
let loudnessInDb = 20 * Math.log10(rms); // Convert to dB
|
|
154
|
+
// console.log('Current loudness:' + loudnessInDb);
|
|
155
|
+
const minDb = -57;
|
|
156
|
+
const maxDb = -15;
|
|
157
|
+
if (loudnessInDb < minDb) {
|
|
158
|
+
loudnessInDb = minDb;
|
|
159
|
+
}
|
|
160
|
+
if (loudnessInDb > maxDb) {
|
|
161
|
+
loudnessInDb = maxDb;
|
|
162
|
+
}
|
|
163
|
+
const loudnessScale = ((loudnessInDb - minDb) / (maxDb - minDb)) * 100;
|
|
164
|
+
// console.log("root:corrent loudness", loudnessScale);
|
|
165
|
+
this.loudnessCallback(loudnessScale);
|
|
166
|
+
}
|
|
167
|
+
// Call this method again at regular intervals if you want continuous loudness monitoring
|
|
168
|
+
this.handle = requestAnimationFrame(() => this.monitorLoudness());
|
|
169
|
+
}
|
|
170
|
+
reset() {
|
|
171
|
+
// console.log('Resetting player');
|
|
172
|
+
this.stopPlayback();
|
|
173
|
+
this.currentIndex = 0;
|
|
174
|
+
this.shouldMonitorLoudness = true;
|
|
175
|
+
//reset to the beginning when the class gets initialized
|
|
176
|
+
this.isMonitoring = false;
|
|
177
|
+
this.isPlaying = false;
|
|
178
|
+
this.init();
|
|
179
|
+
}
|
|
180
|
+
}
|