@rimori/react-client 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +142 -1
- package/dist/{react-client/src/components → components}/ContextMenu.js +1 -2
- package/dist/components/Spinner.d.ts +0 -7
- package/dist/components/Spinner.js +1 -4
- package/dist/components/ai/Assistant.js +1 -1
- package/dist/components/ai/Avatar.d.ts +2 -3
- package/dist/components/ai/Avatar.js +6 -4
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.js +1 -1
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.js +1 -1
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.js +2 -2
- package/dist/components/audio/Playbutton.js +13 -9
- package/dist/hooks/I18nHooks.d.ts +1 -1
- package/dist/{react-client/src/plugin → hooks}/ThemeSetter.d.ts +1 -1
- package/dist/hooks/ThemeSetter.js +31 -0
- package/dist/hooks/UseChatHook.d.ts +2 -2
- package/dist/index.d.ts +9 -0
- package/dist/index.js +9 -0
- package/dist/{react-client/plugin → plugin}/ThemeSetter.d.ts +1 -1
- package/dist/plugin/ThemeSetter.js +31 -0
- package/dist/providers/PluginProvider.d.ts +2 -1
- package/dist/providers/PluginProvider.js +10 -7
- package/dist/{react-client/src/utils → utils}/FullscreenUtils.js +2 -2
- package/package.json +4 -7
- package/src/components/ContextMenu.tsx +2 -2
- package/src/components/ai/Avatar.tsx +9 -4
- package/src/components/ai/EmbeddedAssistent/AudioInputField.tsx +1 -1
- package/src/components/audio/Playbutton.tsx +28 -1
- package/src/hooks/ThemeSetter.ts +40 -0
- package/src/index.ts +10 -0
- package/src/providers/PluginProvider.tsx +12 -8
- package/tsconfig.json +6 -12
- package/dist/components/components/ContextMenu.d.ts +0 -10
- package/dist/components/components/ContextMenu.js +0 -135
- package/dist/react-client/plugin/ThemeSetter.js +0 -19
- package/dist/react-client/src/components/MarkdownEditor.d.ts +0 -8
- package/dist/react-client/src/components/MarkdownEditor.js +0 -48
- package/dist/react-client/src/components/Spinner.d.ts +0 -8
- package/dist/react-client/src/components/Spinner.js +0 -4
- package/dist/react-client/src/components/ai/Assistant.d.ts +0 -9
- package/dist/react-client/src/components/ai/Assistant.js +0 -58
- package/dist/react-client/src/components/ai/Avatar.d.ts +0 -14
- package/dist/react-client/src/components/ai/Avatar.js +0 -59
- package/dist/react-client/src/components/ai/EmbeddedAssistent/AudioInputField.d.ts +0 -7
- package/dist/react-client/src/components/ai/EmbeddedAssistent/AudioInputField.js +0 -37
- package/dist/react-client/src/components/ai/EmbeddedAssistent/CircleAudioAvatar.d.ts +0 -8
- package/dist/react-client/src/components/ai/EmbeddedAssistent/CircleAudioAvatar.js +0 -79
- package/dist/react-client/src/components/ai/EmbeddedAssistent/TTS/MessageSender.d.ts +0 -19
- package/dist/react-client/src/components/ai/EmbeddedAssistent/TTS/MessageSender.js +0 -91
- package/dist/react-client/src/components/ai/EmbeddedAssistent/TTS/Player.d.ts +0 -27
- package/dist/react-client/src/components/ai/EmbeddedAssistent/TTS/Player.js +0 -185
- package/dist/react-client/src/components/ai/utils.d.ts +0 -6
- package/dist/react-client/src/components/ai/utils.js +0 -13
- package/dist/react-client/src/components/audio/Playbutton.d.ts +0 -15
- package/dist/react-client/src/components/audio/Playbutton.js +0 -82
- package/dist/react-client/src/components/components/ContextMenu.d.ts +0 -10
- package/dist/react-client/src/components/components/ContextMenu.js +0 -135
- package/dist/react-client/src/hooks/I18nHooks.d.ts +0 -11
- package/dist/react-client/src/hooks/I18nHooks.js +0 -25
- package/dist/react-client/src/hooks/UseChatHook.d.ts +0 -10
- package/dist/react-client/src/hooks/UseChatHook.js +0 -29
- package/dist/react-client/src/plugin/ThemeSetter.js +0 -19
- package/dist/react-client/src/providers/PluginProvider.d.ts +0 -12
- package/dist/react-client/src/providers/PluginProvider.js +0 -142
- package/dist/react-client/src/utils/PluginUtils.d.ts +0 -2
- package/dist/react-client/src/utils/PluginUtils.js +0 -23
- package/dist/rimori-client/src/cli/types/DatabaseTypes.d.ts +0 -103
- package/dist/rimori-client/src/cli/types/DatabaseTypes.js +0 -2
- package/dist/rimori-client/src/controller/AIController.d.ts +0 -15
- package/dist/rimori-client/src/controller/AIController.js +0 -255
- package/dist/rimori-client/src/controller/AccomplishmentController.d.ts +0 -38
- package/dist/rimori-client/src/controller/AccomplishmentController.js +0 -112
- package/dist/rimori-client/src/controller/AudioController.d.ts +0 -37
- package/dist/rimori-client/src/controller/AudioController.js +0 -68
- package/dist/rimori-client/src/controller/ExerciseController.d.ts +0 -54
- package/dist/rimori-client/src/controller/ExerciseController.js +0 -74
- package/dist/rimori-client/src/controller/ObjectController.d.ts +0 -42
- package/dist/rimori-client/src/controller/ObjectController.js +0 -76
- package/dist/rimori-client/src/controller/SettingsController.d.ts +0 -79
- package/dist/rimori-client/src/controller/SettingsController.js +0 -118
- package/dist/rimori-client/src/controller/SharedContentController.d.ts +0 -106
- package/dist/rimori-client/src/controller/SharedContentController.js +0 -285
- package/dist/rimori-client/src/controller/TranslationController.d.ts +0 -38
- package/dist/rimori-client/src/controller/TranslationController.js +0 -106
- package/dist/rimori-client/src/controller/VoiceController.d.ts +0 -9
- package/dist/rimori-client/src/controller/VoiceController.js +0 -37
- package/dist/rimori-client/src/fromRimori/EventBus.d.ts +0 -101
- package/dist/rimori-client/src/fromRimori/EventBus.js +0 -263
- package/dist/rimori-client/src/fromRimori/PluginTypes.d.ts +0 -174
- package/dist/rimori-client/src/fromRimori/PluginTypes.js +0 -1
- package/dist/rimori-client/src/index.d.ts +0 -11
- package/dist/rimori-client/src/index.js +0 -10
- package/dist/rimori-client/src/plugin/CommunicationHandler.d.ts +0 -48
- package/dist/rimori-client/src/plugin/CommunicationHandler.js +0 -234
- package/dist/rimori-client/src/plugin/Logger.d.ts +0 -73
- package/dist/rimori-client/src/plugin/Logger.js +0 -308
- package/dist/rimori-client/src/plugin/RimoriClient.d.ts +0 -258
- package/dist/rimori-client/src/plugin/RimoriClient.js +0 -375
- package/dist/rimori-client/src/plugin/StandaloneClient.d.ts +0 -17
- package/dist/rimori-client/src/plugin/StandaloneClient.js +0 -115
- package/dist/rimori-client/src/utils/difficultyConverter.d.ts +0 -4
- package/dist/rimori-client/src/utils/difficultyConverter.js +0 -10
- package/dist/rimori-client/src/utils/endpoint.d.ts +0 -2
- package/dist/rimori-client/src/utils/endpoint.js +0 -2
- package/dist/utils/PluginUtils.d.ts +0 -2
- package/dist/utils/PluginUtils.js +0 -23
- package/index.ts +0 -6
- package/src/components/MarkdownEditor.tsx +0 -144
- package/src/components/Spinner.tsx +0 -29
- package/src/plugin/ThemeSetter.ts +0 -23
- package/src/utils/FullscreenUtils.ts +0 -22
- /package/dist/{react-client/src/components → components}/ContextMenu.d.ts +0 -0
- /package/dist/{react-client/src/components/ai/EmbeddedAssistent/VoiceRecoder.d.ts → components/ai/EmbeddedAssistent/VoiceRecorder.d.ts} +0 -0
- /package/dist/{react-client/src/components/ai/EmbeddedAssistent/VoiceRecoder.js → components/ai/EmbeddedAssistent/VoiceRecorder.js} +0 -0
- /package/dist/{react-client/src/utils → utils}/FullscreenUtils.d.ts +0 -0
- /package/src/components/ai/EmbeddedAssistent/{VoiceRecoder.tsx → VoiceRecorder.tsx} +0 -0
|
@@ -1,79 +0,0 @@
|
|
|
1
|
-
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
-
import { useEffect, useRef } from 'react';
|
|
3
|
-
import { EventBus } from '@rimori/client';
|
|
4
|
-
export function CircleAudioAvatar({ imageUrl, className, isDarkTheme = false, width = '150px', }) {
|
|
5
|
-
const canvasRef = useRef(null);
|
|
6
|
-
const currentLoudnessRef = useRef(0);
|
|
7
|
-
const targetLoudnessRef = useRef(0);
|
|
8
|
-
const animationFrameRef = useRef(null);
|
|
9
|
-
useEffect(() => {
|
|
10
|
-
const canvas = canvasRef.current;
|
|
11
|
-
if (canvas) {
|
|
12
|
-
const ctx = canvas.getContext('2d');
|
|
13
|
-
if (ctx) {
|
|
14
|
-
const image = new Image();
|
|
15
|
-
image.src = imageUrl;
|
|
16
|
-
let isMounted = true;
|
|
17
|
-
image.onload = () => {
|
|
18
|
-
if (!isMounted)
|
|
19
|
-
return;
|
|
20
|
-
draw(ctx, canvas, image, 0);
|
|
21
|
-
const animate = () => {
|
|
22
|
-
const decayRate = 0.06;
|
|
23
|
-
if (currentLoudnessRef.current > targetLoudnessRef.current) {
|
|
24
|
-
currentLoudnessRef.current = Math.max(targetLoudnessRef.current, currentLoudnessRef.current - decayRate * currentLoudnessRef.current);
|
|
25
|
-
}
|
|
26
|
-
else {
|
|
27
|
-
currentLoudnessRef.current = targetLoudnessRef.current;
|
|
28
|
-
}
|
|
29
|
-
draw(ctx, canvas, image, currentLoudnessRef.current);
|
|
30
|
-
animationFrameRef.current = requestAnimationFrame(animate);
|
|
31
|
-
};
|
|
32
|
-
animationFrameRef.current = requestAnimationFrame(animate);
|
|
33
|
-
};
|
|
34
|
-
const handleLoudness = ({ data }) => {
|
|
35
|
-
const newLoudness = data.loudness;
|
|
36
|
-
if (newLoudness > currentLoudnessRef.current) {
|
|
37
|
-
currentLoudnessRef.current = newLoudness;
|
|
38
|
-
}
|
|
39
|
-
targetLoudnessRef.current = newLoudness;
|
|
40
|
-
};
|
|
41
|
-
const listener = EventBus.on('self.avatar.triggerLoudness', handleLoudness);
|
|
42
|
-
return () => {
|
|
43
|
-
isMounted = false;
|
|
44
|
-
listener.off();
|
|
45
|
-
if (animationFrameRef.current) {
|
|
46
|
-
cancelAnimationFrame(animationFrameRef.current);
|
|
47
|
-
}
|
|
48
|
-
};
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
}, [imageUrl]);
|
|
52
|
-
const draw = (ctx, canvas, image, loudness) => {
|
|
53
|
-
if (canvas && ctx) {
|
|
54
|
-
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
55
|
-
const radius = Math.min(canvas.width, canvas.height) / 3;
|
|
56
|
-
const centerX = canvas.width / 2;
|
|
57
|
-
const centerY = canvas.height / 2;
|
|
58
|
-
const pulseRadius = radius + loudness / 2.5;
|
|
59
|
-
ctx.beginPath();
|
|
60
|
-
ctx.arc(centerX, centerY, pulseRadius, 0, Math.PI * 2, true);
|
|
61
|
-
ctx.strokeStyle = isDarkTheme ? 'rgba(255, 255, 255, 0.5)' : 'rgba(0, 0, 0, 0.5)';
|
|
62
|
-
ctx.lineWidth = 5;
|
|
63
|
-
ctx.stroke();
|
|
64
|
-
ctx.save();
|
|
65
|
-
ctx.beginPath();
|
|
66
|
-
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
67
|
-
ctx.closePath();
|
|
68
|
-
ctx.clip();
|
|
69
|
-
ctx.drawImage(image, centerX - radius, centerY - radius, radius * 2, radius * 2);
|
|
70
|
-
ctx.restore();
|
|
71
|
-
ctx.beginPath();
|
|
72
|
-
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
73
|
-
ctx.strokeStyle = isDarkTheme ? 'rgba(255, 255, 255, 0.9)' : 'rgba(0, 0, 0, 0.9)';
|
|
74
|
-
ctx.lineWidth = 5;
|
|
75
|
-
ctx.stroke();
|
|
76
|
-
}
|
|
77
|
-
};
|
|
78
|
-
return _jsx("canvas", { ref: canvasRef, className: className, width: 500, height: 500, style: { width } });
|
|
79
|
-
}
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
type VoiceBackend = (text: string, voice?: string, speed?: number) => Promise<Blob>;
|
|
2
|
-
export declare class MessageSender {
|
|
3
|
-
private player;
|
|
4
|
-
private fetchedSentences;
|
|
5
|
-
private lastLoading;
|
|
6
|
-
private voice;
|
|
7
|
-
private voiceBackend;
|
|
8
|
-
constructor(voiceBackend: VoiceBackend, voice: string);
|
|
9
|
-
private getCompletedSentences;
|
|
10
|
-
handleNewText(currentText: string | undefined, isLoading: boolean): Promise<void>;
|
|
11
|
-
private generateSpeech;
|
|
12
|
-
play(): void;
|
|
13
|
-
stop(): void;
|
|
14
|
-
private reset;
|
|
15
|
-
setVolume(volume: number): void;
|
|
16
|
-
setOnLoudnessChange(callback: (value: number) => void): void;
|
|
17
|
-
setOnEndOfSpeech(callback: () => void): void;
|
|
18
|
-
}
|
|
19
|
-
export {};
|
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
-
});
|
|
9
|
-
};
|
|
10
|
-
import { ChunkedAudioPlayer } from './Player';
|
|
11
|
-
export class MessageSender {
|
|
12
|
-
constructor(voiceBackend, voice) {
|
|
13
|
-
this.player = new ChunkedAudioPlayer();
|
|
14
|
-
this.fetchedSentences = new Set();
|
|
15
|
-
this.lastLoading = false;
|
|
16
|
-
if ((voice === null || voice === void 0 ? void 0 : voice.split('_').length) !== 2) {
|
|
17
|
-
throw new Error("Invalid voice id format '" + voice + "'. Voice id needs to look like <provider>_<voice_id>");
|
|
18
|
-
}
|
|
19
|
-
this.voiceBackend = voiceBackend;
|
|
20
|
-
this.voice = voice;
|
|
21
|
-
}
|
|
22
|
-
getCompletedSentences(currentText, isLoading) {
|
|
23
|
-
// Split the text based on the following characters: .?!
|
|
24
|
-
// Only split on : when followed by a space
|
|
25
|
-
const pattern = /(.+?[.?!]|.+?:\s+|.+?\n+)/g;
|
|
26
|
-
const result = [];
|
|
27
|
-
let match;
|
|
28
|
-
while ((match = pattern.exec(currentText)) !== null) {
|
|
29
|
-
const sentence = match[0].trim();
|
|
30
|
-
if (sentence.length > 0) {
|
|
31
|
-
result.push(sentence);
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
if (!isLoading) {
|
|
35
|
-
const lastFullSentence = result[result.length - 1];
|
|
36
|
-
const leftoverIndex = currentText.lastIndexOf(lastFullSentence) + lastFullSentence.length;
|
|
37
|
-
if (leftoverIndex < currentText.length) {
|
|
38
|
-
result.push(currentText.slice(leftoverIndex).trim());
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
return result;
|
|
42
|
-
}
|
|
43
|
-
handleNewText(currentText, isLoading) {
|
|
44
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
45
|
-
if (!this.lastLoading && isLoading) {
|
|
46
|
-
this.reset();
|
|
47
|
-
}
|
|
48
|
-
this.lastLoading = isLoading;
|
|
49
|
-
if (!currentText) {
|
|
50
|
-
return;
|
|
51
|
-
}
|
|
52
|
-
const sentences = this.getCompletedSentences(currentText, isLoading);
|
|
53
|
-
for (let i = 0; i < sentences.length; i++) {
|
|
54
|
-
const sentence = sentences[i];
|
|
55
|
-
if (!this.fetchedSentences.has(sentence)) {
|
|
56
|
-
this.fetchedSentences.add(sentence);
|
|
57
|
-
const audioData = yield this.generateSpeech(sentence);
|
|
58
|
-
yield this.player.addChunk(audioData, i);
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
});
|
|
62
|
-
}
|
|
63
|
-
generateSpeech(sentence) {
|
|
64
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
65
|
-
const blob = yield this.voiceBackend(sentence, this.voice, 1.0);
|
|
66
|
-
return yield blob.arrayBuffer();
|
|
67
|
-
});
|
|
68
|
-
}
|
|
69
|
-
play() {
|
|
70
|
-
this.player.playAgain();
|
|
71
|
-
}
|
|
72
|
-
stop() {
|
|
73
|
-
this.player.stopPlayback();
|
|
74
|
-
}
|
|
75
|
-
reset() {
|
|
76
|
-
this.stop();
|
|
77
|
-
this.fetchedSentences.clear();
|
|
78
|
-
this.player.reset();
|
|
79
|
-
}
|
|
80
|
-
setVolume(volume) {
|
|
81
|
-
this.player.setVolume(volume);
|
|
82
|
-
}
|
|
83
|
-
setOnLoudnessChange(callback) {
|
|
84
|
-
this.player.setOnLoudnessChange((loudness) => {
|
|
85
|
-
callback(loudness);
|
|
86
|
-
});
|
|
87
|
-
}
|
|
88
|
-
setOnEndOfSpeech(callback) {
|
|
89
|
-
this.player.setOnEndOfSpeech(callback);
|
|
90
|
-
}
|
|
91
|
-
}
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
export declare class ChunkedAudioPlayer {
|
|
2
|
-
private audioContext;
|
|
3
|
-
private chunkQueue;
|
|
4
|
-
private isPlaying;
|
|
5
|
-
private analyser;
|
|
6
|
-
private dataArray;
|
|
7
|
-
private shouldMonitorLoudness;
|
|
8
|
-
private isMonitoring;
|
|
9
|
-
private handle;
|
|
10
|
-
private volume;
|
|
11
|
-
private loudnessCallback;
|
|
12
|
-
private currentIndex;
|
|
13
|
-
private startedPlaying;
|
|
14
|
-
private onEndOfSpeech;
|
|
15
|
-
constructor();
|
|
16
|
-
private init;
|
|
17
|
-
setOnLoudnessChange(callback: (value: number) => void): void;
|
|
18
|
-
setVolume(volume: number): void;
|
|
19
|
-
addChunk(chunk: ArrayBuffer, position: number): Promise<void>;
|
|
20
|
-
private playChunks;
|
|
21
|
-
stopPlayback(): void;
|
|
22
|
-
private playChunk;
|
|
23
|
-
playAgain(): Promise<void>;
|
|
24
|
-
private monitorLoudness;
|
|
25
|
-
reset(): void;
|
|
26
|
-
setOnEndOfSpeech(callback: () => void): void;
|
|
27
|
-
}
|
|
@@ -1,185 +0,0 @@
|
|
|
1
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
-
});
|
|
9
|
-
};
|
|
10
|
-
export class ChunkedAudioPlayer {
|
|
11
|
-
constructor() {
|
|
12
|
-
this.chunkQueue = [];
|
|
13
|
-
this.isPlaying = false;
|
|
14
|
-
this.shouldMonitorLoudness = true;
|
|
15
|
-
this.isMonitoring = false;
|
|
16
|
-
this.handle = 0;
|
|
17
|
-
this.volume = 1.0;
|
|
18
|
-
this.loudnessCallback = () => { };
|
|
19
|
-
this.currentIndex = 0;
|
|
20
|
-
this.startedPlaying = false;
|
|
21
|
-
this.onEndOfSpeech = () => { };
|
|
22
|
-
this.init();
|
|
23
|
-
}
|
|
24
|
-
init() {
|
|
25
|
-
this.audioContext = new AudioContext();
|
|
26
|
-
this.analyser = this.audioContext.createAnalyser();
|
|
27
|
-
this.analyser.fftSize = 256; // Set the FFT size (smaller values provide faster updates, larger ones give better resolution)
|
|
28
|
-
const bufferLength = this.analyser.frequencyBinCount;
|
|
29
|
-
this.dataArray = new Uint8Array(bufferLength); // Array to hold frequency data
|
|
30
|
-
}
|
|
31
|
-
setOnLoudnessChange(callback) {
|
|
32
|
-
this.loudnessCallback = callback;
|
|
33
|
-
}
|
|
34
|
-
setVolume(volume) {
|
|
35
|
-
this.volume = volume;
|
|
36
|
-
}
|
|
37
|
-
addChunk(chunk, position) {
|
|
38
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
39
|
-
console.log('Adding chunk', position, chunk);
|
|
40
|
-
this.chunkQueue[position] = chunk;
|
|
41
|
-
// console.log("received chunk", {
|
|
42
|
-
// chunkQueue: this.chunkQueue.length,
|
|
43
|
-
// isPlaying: this.isPlaying,
|
|
44
|
-
// })
|
|
45
|
-
if (position === 0 && !this.startedPlaying) {
|
|
46
|
-
this.startedPlaying = true;
|
|
47
|
-
this.playChunks();
|
|
48
|
-
}
|
|
49
|
-
});
|
|
50
|
-
}
|
|
51
|
-
playChunks() {
|
|
52
|
-
// console.log({ isPlaying: this.isPlaying });
|
|
53
|
-
if (this.isPlaying)
|
|
54
|
-
return;
|
|
55
|
-
if (!this.chunkQueue[this.currentIndex]) {
|
|
56
|
-
// wait until the correct chunk arrives
|
|
57
|
-
setTimeout(() => this.playChunks(), 10);
|
|
58
|
-
}
|
|
59
|
-
this.isPlaying = true;
|
|
60
|
-
this.playChunk(this.chunkQueue[this.currentIndex]).then(() => {
|
|
61
|
-
this.isPlaying = false;
|
|
62
|
-
this.currentIndex++;
|
|
63
|
-
if (this.chunkQueue[this.currentIndex]) {
|
|
64
|
-
this.shouldMonitorLoudness = true;
|
|
65
|
-
this.playChunks();
|
|
66
|
-
}
|
|
67
|
-
else {
|
|
68
|
-
// console.log('Playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
69
|
-
setTimeout(() => {
|
|
70
|
-
// console.log('Check again if really playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
71
|
-
if (this.chunkQueue.length > this.currentIndex) {
|
|
72
|
-
this.playChunks();
|
|
73
|
-
}
|
|
74
|
-
else {
|
|
75
|
-
this.startedPlaying = false;
|
|
76
|
-
this.shouldMonitorLoudness = false;
|
|
77
|
-
}
|
|
78
|
-
}, 1000);
|
|
79
|
-
}
|
|
80
|
-
});
|
|
81
|
-
}
|
|
82
|
-
stopPlayback() {
|
|
83
|
-
// console.log('Stopping playback');
|
|
84
|
-
// Implement logic to stop the current playback
|
|
85
|
-
this.isPlaying = false;
|
|
86
|
-
this.chunkQueue = [];
|
|
87
|
-
this.startedPlaying = false;
|
|
88
|
-
this.shouldMonitorLoudness = false;
|
|
89
|
-
cancelAnimationFrame(this.handle);
|
|
90
|
-
}
|
|
91
|
-
playChunk(chunk) {
|
|
92
|
-
// console.log({queue: this.chunkQueue})
|
|
93
|
-
if (!chunk) {
|
|
94
|
-
return Promise.resolve();
|
|
95
|
-
}
|
|
96
|
-
// console.log('Playing chunk', chunk);
|
|
97
|
-
return new Promise((resolve) => {
|
|
98
|
-
const source = this.audioContext.createBufferSource();
|
|
99
|
-
this.audioContext.decodeAudioData(chunk.slice(0)).then((audioBuffer) => {
|
|
100
|
-
source.buffer = audioBuffer;
|
|
101
|
-
// Create a GainNode for volume control
|
|
102
|
-
const gainNode = this.audioContext.createGain();
|
|
103
|
-
gainNode.gain.value = this.volume;
|
|
104
|
-
// Connect the source to the GainNode, then to the analyser node, then to the destination (speakers)
|
|
105
|
-
source.connect(gainNode);
|
|
106
|
-
gainNode.connect(this.analyser);
|
|
107
|
-
this.analyser.connect(this.audioContext.destination);
|
|
108
|
-
source.start(0);
|
|
109
|
-
// console.log('Playing chunk', this.currentIndex);
|
|
110
|
-
gainNode.gain.value = this.volume;
|
|
111
|
-
source.onended = () => {
|
|
112
|
-
// console.log('Chunk ended');
|
|
113
|
-
resolve();
|
|
114
|
-
};
|
|
115
|
-
// Start monitoring loudness only once
|
|
116
|
-
if (!this.isMonitoring) {
|
|
117
|
-
this.isMonitoring = true;
|
|
118
|
-
this.shouldMonitorLoudness = true;
|
|
119
|
-
this.monitorLoudness();
|
|
120
|
-
}
|
|
121
|
-
});
|
|
122
|
-
});
|
|
123
|
-
}
|
|
124
|
-
playAgain() {
|
|
125
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
126
|
-
console.log('Playing again');
|
|
127
|
-
if (this.chunkQueue.length > 0 && !this.isPlaying) {
|
|
128
|
-
this.playChunks();
|
|
129
|
-
}
|
|
130
|
-
});
|
|
131
|
-
}
|
|
132
|
-
monitorLoudness() {
|
|
133
|
-
// Stop monitoring when the flag is false
|
|
134
|
-
if (!this.shouldMonitorLoudness) {
|
|
135
|
-
// console.log('Loudness monitoring stopped.');
|
|
136
|
-
cancelAnimationFrame(this.handle);
|
|
137
|
-
this.loudnessCallback(0);
|
|
138
|
-
this.onEndOfSpeech();
|
|
139
|
-
return;
|
|
140
|
-
}
|
|
141
|
-
// Get the time domain data from the analyser (this is a snapshot of the waveform)
|
|
142
|
-
this.analyser.getByteTimeDomainData(this.dataArray);
|
|
143
|
-
// Calculate the RMS (root mean square) of the waveform values to get the perceived loudness
|
|
144
|
-
let sum = 0;
|
|
145
|
-
for (let i = 0; i < this.dataArray.length; i++) {
|
|
146
|
-
const value = this.dataArray[i] / 128.0 - 1.0; // Normalize between -1 and 1
|
|
147
|
-
sum += value * value;
|
|
148
|
-
}
|
|
149
|
-
const rms = Math.sqrt(sum / this.dataArray.length);
|
|
150
|
-
// Handle the case where RMS is 0 to avoid log10(0)
|
|
151
|
-
if (rms === 0) {
|
|
152
|
-
// console.log('Current loudness: Silent');
|
|
153
|
-
}
|
|
154
|
-
else {
|
|
155
|
-
let loudnessInDb = 20 * Math.log10(rms); // Convert to dB
|
|
156
|
-
// console.log('Current loudness:' + loudnessInDb);
|
|
157
|
-
const minDb = -57;
|
|
158
|
-
const maxDb = -15;
|
|
159
|
-
if (loudnessInDb < minDb) {
|
|
160
|
-
loudnessInDb = minDb;
|
|
161
|
-
}
|
|
162
|
-
if (loudnessInDb > maxDb) {
|
|
163
|
-
loudnessInDb = maxDb;
|
|
164
|
-
}
|
|
165
|
-
const loudnessScale = ((loudnessInDb - minDb) / (maxDb - minDb)) * 100;
|
|
166
|
-
// console.log("root:corrent loudness", loudnessScale);
|
|
167
|
-
this.loudnessCallback(loudnessScale);
|
|
168
|
-
}
|
|
169
|
-
// Call this method again at regular intervals if you want continuous loudness monitoring
|
|
170
|
-
this.handle = requestAnimationFrame(() => this.monitorLoudness());
|
|
171
|
-
}
|
|
172
|
-
reset() {
|
|
173
|
-
// console.log('Resetting player');
|
|
174
|
-
this.stopPlayback();
|
|
175
|
-
this.currentIndex = 0;
|
|
176
|
-
this.shouldMonitorLoudness = true;
|
|
177
|
-
//reset to the beginning when the class gets initialized
|
|
178
|
-
this.isMonitoring = false;
|
|
179
|
-
this.isPlaying = false;
|
|
180
|
-
this.init();
|
|
181
|
-
}
|
|
182
|
-
setOnEndOfSpeech(callback) {
|
|
183
|
-
this.onEndOfSpeech = callback;
|
|
184
|
-
}
|
|
185
|
-
}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
export function getFirstMessages(instructions) {
|
|
2
|
-
const messages = [];
|
|
3
|
-
if (instructions.instructions) {
|
|
4
|
-
messages.push({ id: '1', role: 'system', content: instructions.instructions });
|
|
5
|
-
}
|
|
6
|
-
if (instructions.userMessage) {
|
|
7
|
-
messages.push({ id: '2', role: 'user', content: instructions.userMessage });
|
|
8
|
-
}
|
|
9
|
-
if (instructions.assistantMessage) {
|
|
10
|
-
messages.push({ id: '3', role: 'assistant', content: instructions.assistantMessage });
|
|
11
|
-
}
|
|
12
|
-
return messages;
|
|
13
|
-
}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import React from 'react';
|
|
2
|
-
type AudioPlayerProps = {
|
|
3
|
-
text: string;
|
|
4
|
-
voice?: string;
|
|
5
|
-
language?: string;
|
|
6
|
-
hide?: boolean;
|
|
7
|
-
playOnMount?: boolean;
|
|
8
|
-
initialSpeed?: number;
|
|
9
|
-
enableSpeedAdjustment?: boolean;
|
|
10
|
-
playListenerEvent?: string;
|
|
11
|
-
};
|
|
12
|
-
export declare const AudioPlayOptions: number[];
|
|
13
|
-
export type AudioPlayOptionType = 0.8 | 0.9 | 1.0 | 1.1 | 1.2 | 1.5;
|
|
14
|
-
export declare const AudioPlayer: React.FC<AudioPlayerProps>;
|
|
15
|
-
export {};
|
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
-
});
|
|
9
|
-
};
|
|
10
|
-
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
11
|
-
import { useState, useEffect } from 'react';
|
|
12
|
-
import { FaPlayCircle, FaStopCircle } from 'react-icons/fa';
|
|
13
|
-
import { useRimori } from '../../providers/PluginProvider';
|
|
14
|
-
import { Spinner } from '../Spinner';
|
|
15
|
-
import { EventBus } from '@rimori/client';
|
|
16
|
-
export const AudioPlayOptions = [0.8, 0.9, 1.0, 1.1, 1.2, 1.5];
|
|
17
|
-
let isFetchingAudio = false;
|
|
18
|
-
export const AudioPlayer = ({ text, voice, language, hide, playListenerEvent, initialSpeed = 1.0, playOnMount = false, enableSpeedAdjustment = false, }) => {
|
|
19
|
-
const [audioUrl, setAudioUrl] = useState(null);
|
|
20
|
-
const [speed, setSpeed] = useState(initialSpeed);
|
|
21
|
-
const [isPlaying, setIsPlaying] = useState(false);
|
|
22
|
-
const [isLoading, setIsLoading] = useState(false);
|
|
23
|
-
const { ai } = useRimori();
|
|
24
|
-
useEffect(() => {
|
|
25
|
-
if (audioUrl)
|
|
26
|
-
setAudioUrl(null);
|
|
27
|
-
return () => {
|
|
28
|
-
if (audioUrl)
|
|
29
|
-
URL.revokeObjectURL(audioUrl);
|
|
30
|
-
};
|
|
31
|
-
}, [text]);
|
|
32
|
-
// Function to generate audio from text using API
|
|
33
|
-
const generateAudio = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
34
|
-
setIsLoading(true);
|
|
35
|
-
const blob = yield ai.getVoice(text, voice || (language ? 'aws_default' : 'openai_alloy'), 1, language);
|
|
36
|
-
setAudioUrl(URL.createObjectURL(blob));
|
|
37
|
-
setIsLoading(false);
|
|
38
|
-
});
|
|
39
|
-
// Effect to play audio when audioUrl changes and play state is true
|
|
40
|
-
useEffect(() => {
|
|
41
|
-
if (!audioUrl || !isPlaying)
|
|
42
|
-
return;
|
|
43
|
-
const audio = new Audio(audioUrl);
|
|
44
|
-
audio.playbackRate = speed;
|
|
45
|
-
audio
|
|
46
|
-
.play()
|
|
47
|
-
.then(() => {
|
|
48
|
-
audio.onended = () => {
|
|
49
|
-
setIsPlaying(false);
|
|
50
|
-
isFetchingAudio = false;
|
|
51
|
-
};
|
|
52
|
-
})
|
|
53
|
-
.catch((e) => {
|
|
54
|
-
console.warn('Error playing audio:', e);
|
|
55
|
-
setIsPlaying(false);
|
|
56
|
-
});
|
|
57
|
-
return () => {
|
|
58
|
-
audio.pause();
|
|
59
|
-
};
|
|
60
|
-
}, [audioUrl, isPlaying, speed]);
|
|
61
|
-
const togglePlayback = () => {
|
|
62
|
-
if (!isPlaying && !audioUrl) {
|
|
63
|
-
generateAudio().then(() => setIsPlaying(true));
|
|
64
|
-
}
|
|
65
|
-
else {
|
|
66
|
-
setIsPlaying((prev) => !prev);
|
|
67
|
-
}
|
|
68
|
-
};
|
|
69
|
-
useEffect(() => {
|
|
70
|
-
if (!playListenerEvent)
|
|
71
|
-
return;
|
|
72
|
-
EventBus.on(playListenerEvent, () => togglePlayback());
|
|
73
|
-
}, [playListenerEvent]);
|
|
74
|
-
useEffect(() => {
|
|
75
|
-
if (!playOnMount || isFetchingAudio)
|
|
76
|
-
return;
|
|
77
|
-
isFetchingAudio = true;
|
|
78
|
-
// console.log("playOnMount", playOnMount);
|
|
79
|
-
togglePlayback();
|
|
80
|
-
}, [playOnMount]);
|
|
81
|
-
return (_jsx("div", { className: "group relative", children: _jsxs("div", { className: "flex flex-row items-end", children: [!hide && (_jsx("button", { className: "text-gray-500", onClick: togglePlayback, disabled: isLoading, children: isLoading ? _jsx(Spinner, {}) : isPlaying ? _jsx(FaStopCircle, { size: '25px' }) : _jsx(FaPlayCircle, { size: '25px' }) })), enableSpeedAdjustment && (_jsxs("div", { className: "ml-1 opacity-0 group-hover:opacity-100 transition-opacity duration-300 flex flex-row text-sm text-gray-500", children: [_jsx("span", { className: "pr-1", children: "Speed: " }), _jsx("select", { value: speed, className: "appearance-none cursor-pointer pr-0 p-0 rounded shadow leading-tight focus:outline-none focus:bg-gray-800 focus:ring bg-transparent border-0", onChange: (e) => setSpeed(parseFloat(e.target.value)), disabled: isLoading, children: AudioPlayOptions.map((s) => (_jsx("option", { value: s, children: s }, s))) })] }))] }) }));
|
|
82
|
-
};
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
import { RimoriClient } from '@rimori/client';
|
|
2
|
-
export interface Position {
|
|
3
|
-
x: number;
|
|
4
|
-
y: number;
|
|
5
|
-
text?: string;
|
|
6
|
-
}
|
|
7
|
-
declare const ContextMenu: ({ client }: {
|
|
8
|
-
client: RimoriClient;
|
|
9
|
-
}) => import("react/jsx-runtime").JSX.Element | null;
|
|
10
|
-
export default ContextMenu;
|
|
@@ -1,135 +0,0 @@
|
|
|
1
|
-
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
-
import { useState, useEffect, useRef } from 'react';
|
|
3
|
-
import { EventBus } from '@rimori/client';
|
|
4
|
-
const ContextMenu = ({ client }) => {
|
|
5
|
-
const [isOpen, setIsOpen] = useState(false);
|
|
6
|
-
const [actions, setActions] = useState([]);
|
|
7
|
-
const [position, setPosition] = useState({ x: 0, y: 0 });
|
|
8
|
-
const [openOnTextSelect, setOpenOnTextSelect] = useState(false);
|
|
9
|
-
const [menuWidth, setMenuWidth] = useState(0);
|
|
10
|
-
const menuRef = useRef(null);
|
|
11
|
-
const isMobile = window.innerWidth < 768;
|
|
12
|
-
/**
|
|
13
|
-
* Calculates position for mobile context menu based on selected text bounds.
|
|
14
|
-
* Centers the menu horizontally over the selected text and positions it 30px below the text's end.
|
|
15
|
-
* @param selectedText - The currently selected text
|
|
16
|
-
* @param menuWidth - The width of the menu to center properly
|
|
17
|
-
* @returns Position object with x and y coordinates
|
|
18
|
-
*/
|
|
19
|
-
const calculateMobilePosition = (selectedText, menuWidth = 0) => {
|
|
20
|
-
const selection = window.getSelection();
|
|
21
|
-
if (!selection || !selectedText) {
|
|
22
|
-
return { x: 0, y: 0, text: selectedText };
|
|
23
|
-
}
|
|
24
|
-
const range = selection.getRangeAt(0);
|
|
25
|
-
const rect = range.getBoundingClientRect();
|
|
26
|
-
// Center horizontally over the selected text, accounting for menu width
|
|
27
|
-
const centerX = rect.left + rect.width / 2 - menuWidth / 2;
|
|
28
|
-
// Position 12px below where the text ends vertically
|
|
29
|
-
const textEndY = rect.bottom + 12;
|
|
30
|
-
return { x: centerX, y: textEndY, text: selectedText };
|
|
31
|
-
};
|
|
32
|
-
useEffect(() => {
|
|
33
|
-
const actions = client.plugin
|
|
34
|
-
.getPluginInfo()
|
|
35
|
-
.installedPlugins.flatMap((p) => p.context_menu_actions)
|
|
36
|
-
.filter(Boolean);
|
|
37
|
-
setActions(actions);
|
|
38
|
-
setOpenOnTextSelect(client.plugin.getUserInfo().context_menu_on_select);
|
|
39
|
-
EventBus.on('global.contextMenu.createActions', ({ data }) => {
|
|
40
|
-
setActions([...data.actions, ...actions]);
|
|
41
|
-
});
|
|
42
|
-
}, []);
|
|
43
|
-
// Update menu width when menu is rendered
|
|
44
|
-
useEffect(() => {
|
|
45
|
-
if (isOpen && menuRef.current) {
|
|
46
|
-
setMenuWidth(menuRef.current.offsetWidth);
|
|
47
|
-
}
|
|
48
|
-
}, [isOpen, actions]);
|
|
49
|
-
useEffect(() => {
|
|
50
|
-
// Track mouse position globally
|
|
51
|
-
const handleMouseMove = (e) => {
|
|
52
|
-
var _a;
|
|
53
|
-
const selectedText = (_a = window.getSelection()) === null || _a === void 0 ? void 0 : _a.toString().trim();
|
|
54
|
-
if (isOpen && selectedText === position.text)
|
|
55
|
-
return;
|
|
56
|
-
if (isMobile && selectedText) {
|
|
57
|
-
setPosition(calculateMobilePosition(selectedText, menuWidth));
|
|
58
|
-
}
|
|
59
|
-
else {
|
|
60
|
-
setPosition({ x: e.clientX, y: e.clientY, text: selectedText });
|
|
61
|
-
}
|
|
62
|
-
};
|
|
63
|
-
const handleMouseUp = (e) => {
|
|
64
|
-
var _a, _b;
|
|
65
|
-
const selectedText = (_a = window.getSelection()) === null || _a === void 0 ? void 0 : _a.toString().trim();
|
|
66
|
-
// Check if click is inside the context menu
|
|
67
|
-
if (menuRef.current && menuRef.current.contains(e.target)) {
|
|
68
|
-
// Don't close the menu if clicking inside
|
|
69
|
-
return;
|
|
70
|
-
}
|
|
71
|
-
// Prevent context menu on textarea or text input selection
|
|
72
|
-
const target = e.target;
|
|
73
|
-
const isTextInput = target &&
|
|
74
|
-
(target.tagName === 'TEXTAREA' || (target.tagName === 'INPUT' && target.type === 'text'));
|
|
75
|
-
if (isTextInput) {
|
|
76
|
-
setIsOpen(false);
|
|
77
|
-
return;
|
|
78
|
-
}
|
|
79
|
-
if (e.button === 0 && isOpen) {
|
|
80
|
-
setIsOpen(false);
|
|
81
|
-
(_b = window.getSelection()) === null || _b === void 0 ? void 0 : _b.removeAllRanges();
|
|
82
|
-
}
|
|
83
|
-
else if (selectedText && (openOnTextSelect || e.button === 2)) {
|
|
84
|
-
if (e.button === 2) {
|
|
85
|
-
e.preventDefault();
|
|
86
|
-
}
|
|
87
|
-
if (isMobile) {
|
|
88
|
-
setPosition(calculateMobilePosition(selectedText, menuWidth));
|
|
89
|
-
}
|
|
90
|
-
else {
|
|
91
|
-
setPosition({ x: e.clientX, y: e.clientY, text: selectedText });
|
|
92
|
-
}
|
|
93
|
-
setIsOpen(true);
|
|
94
|
-
}
|
|
95
|
-
else {
|
|
96
|
-
setIsOpen(false);
|
|
97
|
-
}
|
|
98
|
-
};
|
|
99
|
-
// Add selectionchange listener to close menu if selection is cleared and update position for mobile
|
|
100
|
-
const handleSelectionChange = () => {
|
|
101
|
-
var _a;
|
|
102
|
-
const selectedText = (_a = window.getSelection()) === null || _a === void 0 ? void 0 : _a.toString().trim();
|
|
103
|
-
if (!selectedText && isOpen) {
|
|
104
|
-
setIsOpen(false);
|
|
105
|
-
}
|
|
106
|
-
else if (selectedText && isOpen && isMobile) {
|
|
107
|
-
// Update position in real-time as text selection changes on mobile
|
|
108
|
-
setPosition(calculateMobilePosition(selectedText, menuWidth));
|
|
109
|
-
}
|
|
110
|
-
};
|
|
111
|
-
document.addEventListener('mouseup', handleMouseUp);
|
|
112
|
-
window.addEventListener('mousemove', handleMouseMove);
|
|
113
|
-
document.addEventListener('contextmenu', handleMouseUp);
|
|
114
|
-
document.addEventListener('selectionchange', handleSelectionChange);
|
|
115
|
-
return () => {
|
|
116
|
-
document.removeEventListener('mouseup', handleMouseUp);
|
|
117
|
-
window.removeEventListener('mousemove', handleMouseMove);
|
|
118
|
-
document.removeEventListener('contextmenu', handleMouseUp);
|
|
119
|
-
document.removeEventListener('selectionchange', handleSelectionChange);
|
|
120
|
-
};
|
|
121
|
-
}, [openOnTextSelect, isOpen, position.text]);
|
|
122
|
-
if (!isOpen) {
|
|
123
|
-
return null;
|
|
124
|
-
}
|
|
125
|
-
return (_jsx("div", { ref: menuRef, className: "fixed bg-gray-400 dark:bg-gray-700 shadow-lg border border-gray-400 rounded-md overflow-hidden dark:text-white z-50", style: { top: position.y, left: position.x }, children: actions.map((action, index) => (_jsx(MenuEntryItem, { icon: action.icon, text: action.text, onClick: () => {
|
|
126
|
-
var _a;
|
|
127
|
-
setIsOpen(false);
|
|
128
|
-
(_a = window.getSelection()) === null || _a === void 0 ? void 0 : _a.removeAllRanges();
|
|
129
|
-
client.event.emitSidebarAction(action.plugin_id, action.action_key, position.text);
|
|
130
|
-
} }, index))) }));
|
|
131
|
-
};
|
|
132
|
-
function MenuEntryItem(props) {
|
|
133
|
-
return (_jsxs("button", { onClick: props.onClick, className: "px-4 py-2 text-left hover:bg-gray-500 dark:hover:bg-gray-600 w-full flex flex-row", children: [_jsx("span", { className: "flex-grow", children: props.icon }), _jsx("span", { className: "flex-grow", children: props.text })] }));
|
|
134
|
-
}
|
|
135
|
-
export default ContextMenu;
|