@rimori/client 1.0.3 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/dist/components/CRUDModal.js +0 -1
- package/dist/components/ai/Assistant.d.ts +9 -0
- package/dist/components/ai/Assistant.js +59 -0
- package/dist/components/ai/Avatar.d.ts +11 -0
- package/dist/components/ai/Avatar.js +39 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.js +86 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/ai/utils.d.ts +6 -0
- package/dist/components/ai/utils.js +14 -0
- package/dist/components/audio/Playbutton.js +4 -5
- package/dist/components/avatar/Assistant.d.ts +9 -0
- package/dist/components/avatar/Assistant.js +59 -0
- package/dist/components/avatar/Avatar.d.ts +12 -0
- package/dist/components/avatar/Avatar.js +42 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.js +84 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/avatar/utils.d.ts +6 -0
- package/dist/components/avatar/utils.js +14 -0
- package/dist/components.d.ts +9 -0
- package/dist/components.js +10 -0
- package/dist/controller/AIController.d.ts +4 -3
- package/dist/controller/AIController.js +32 -8
- package/dist/controller/ObjectController.d.ts +2 -2
- package/dist/controller/ObjectController.js +4 -5
- package/dist/controller/SettingsController.d.ts +2 -1
- package/dist/controller/SettingsController.js +9 -0
- package/dist/controller/SharedContentController.js +6 -6
- package/dist/core.d.ts +9 -0
- package/dist/core.js +10 -0
- package/dist/hooks/UseChatHook.js +2 -2
- package/dist/index.d.ts +3 -2
- package/dist/index.js +4 -2
- package/dist/plugin/PluginController.d.ts +4 -12
- package/dist/plugin/PluginController.js +43 -70
- package/dist/plugin/RimoriClient.d.ts +85 -32
- package/dist/plugin/RimoriClient.js +98 -77
- package/dist/plugin/fromRimori/EventBus.d.ts +98 -0
- package/dist/plugin/fromRimori/EventBus.js +240 -0
- package/dist/providers/PluginProvider.d.ts +1 -0
- package/dist/providers/PluginProvider.js +10 -12
- package/dist/worker/WorkerSetup.d.ts +6 -0
- package/dist/worker/WorkerSetup.js +79 -0
- package/package.json +16 -3
- package/src/components/CRUDModal.tsx +1 -3
- package/src/components/ai/Assistant.tsx +96 -0
- package/src/components/ai/Avatar.tsx +61 -0
- package/src/components/ai/EmbeddedAssistent/AudioInputField.tsx +64 -0
- package/src/components/ai/EmbeddedAssistent/CircleAudioAvatar.tsx +75 -0
- package/src/components/ai/EmbeddedAssistent/TTS/MessageSender.ts +91 -0
- package/src/components/ai/EmbeddedAssistent/TTS/Player.ts +192 -0
- package/src/components/ai/EmbeddedAssistent/VoiceRecoder.tsx +56 -0
- package/src/components/ai/utils.ts +23 -0
- package/src/components/audio/Playbutton.tsx +4 -5
- package/src/components.ts +10 -0
- package/src/controller/AIController.ts +84 -60
- package/src/controller/ObjectController.ts +4 -6
- package/src/controller/SettingsController.ts +9 -1
- package/src/controller/SharedContentController.ts +6 -6
- package/src/core.ts +10 -0
- package/src/hooks/UseChatHook.ts +2 -2
- package/src/index.ts +4 -2
- package/src/plugin/PluginController.ts +46 -76
- package/src/plugin/RimoriClient.ts +147 -85
- package/src/plugin/fromRimori/EventBus.ts +301 -0
- package/src/plugin/fromRimori/readme.md +2 -0
- package/src/providers/PluginProvider.tsx +12 -14
- package/src/worker/WorkerSetup.ts +80 -0
- package/dist/CRUDModal.d.ts +0 -16
- package/dist/CRUDModal.js +0 -31
- package/dist/MarkdownEditor.d.ts +0 -8
- package/dist/MarkdownEditor.js +0 -46
- package/dist/audio/Playbutton.d.ts +0 -14
- package/dist/audio/Playbutton.js +0 -73
- package/dist/components/hooks/UseChatHook.d.ts +0 -15
- package/dist/components/hooks/UseChatHook.js +0 -21
- package/dist/controller/PluginController.d.ts +0 -14
- package/dist/controller/PluginController.js +0 -30
- package/dist/plugin/AIController copy.d.ts +0 -22
- package/dist/plugin/AIController copy.js +0 -68
- package/dist/plugin/AIController.d.ts +0 -22
- package/dist/plugin/AIController.js +0 -68
- package/dist/plugin/ObjectController.d.ts +0 -34
- package/dist/plugin/ObjectController.js +0 -77
- package/dist/plugin/SettingController.d.ts +0 -13
- package/dist/plugin/SettingController.js +0 -55
- package/dist/plugin/VoiceController.d.ts +0 -2
- package/dist/plugin/VoiceController.js +0 -27
- package/dist/providers/EventEmitter.d.ts +0 -11
- package/dist/providers/EventEmitter.js +0 -41
- package/dist/providers/EventEmitterContext.d.ts +0 -6
- package/dist/providers/EventEmitterContext.js +0 -19
- package/dist/utils/DifficultyConverter.d.ts +0 -3
- package/dist/utils/DifficultyConverter.js +0 -7
- package/dist/utils/constants.d.ts +0 -4
- package/dist/utils/constants.js +0 -12
- package/dist/utils/plugin/Client.d.ts +0 -72
- package/dist/utils/plugin/Client.js +0 -118
- package/dist/utils/plugin/PluginController.d.ts +0 -36
- package/dist/utils/plugin/PluginController.js +0 -119
- package/dist/utils/plugin/PluginUtils.d.ts +0 -2
- package/dist/utils/plugin/PluginUtils.js +0 -23
- package/dist/utils/plugin/RimoriClient.d.ts +0 -72
- package/dist/utils/plugin/RimoriClient.js +0 -118
- package/dist/utils/plugin/ThemeSetter.d.ts +0 -1
- package/dist/utils/plugin/ThemeSetter.js +0 -13
- package/dist/utils/plugin/WhereClauseBuilder.d.ts +0 -24
- package/dist/utils/plugin/WhereClauseBuilder.js +0 -79
- package/dist/utils/plugin/providers/EventEmitter.d.ts +0 -11
- package/dist/utils/plugin/providers/EventEmitter.js +0 -41
- package/dist/utils/plugin/providers/EventEmitterContext.d.ts +0 -6
- package/dist/utils/plugin/providers/EventEmitterContext.js +0 -19
- package/dist/utils/plugin/providers/PluginProvider.d.ts +0 -8
- package/dist/utils/plugin/providers/PluginProvider.js +0 -49
- package/src/providers/EventEmitter.ts +0 -48
- package/src/providers/EventEmitterContext.tsx +0 -27
- package/src/utils/constants.ts +0 -18
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import React, { useEffect, useRef } from 'react';
|
|
2
|
+
import { EventBus, EventBusMessage } from '../../../core';
|
|
3
|
+
|
|
4
|
+
interface CircleAudioAvatarProps {
|
|
5
|
+
width?: string;
|
|
6
|
+
imageUrl: string;
|
|
7
|
+
className?: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
export function CircleAudioAvatar({ imageUrl, className, width = "150px" }: CircleAudioAvatarProps) {
|
|
12
|
+
const canvasRef = useRef<HTMLCanvasElement>(null);
|
|
13
|
+
|
|
14
|
+
useEffect(() => {
|
|
15
|
+
const canvas = canvasRef.current;
|
|
16
|
+
if (canvas) {
|
|
17
|
+
const ctx = canvas.getContext('2d');
|
|
18
|
+
if (ctx) {
|
|
19
|
+
const image = new Image();
|
|
20
|
+
image.src = imageUrl;
|
|
21
|
+
image.onload = () => {
|
|
22
|
+
draw(ctx, canvas, image, 0);
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
const handleLoudness = (event: EventBusMessage) => {
|
|
26
|
+
draw(ctx, canvas, image, event.data.loudness);
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
// Subscribe to loudness changes
|
|
30
|
+
const listenerId = EventBus.on('self.avatar.triggerLoudness', handleLoudness);
|
|
31
|
+
|
|
32
|
+
return () => {
|
|
33
|
+
EventBus.off(listenerId);
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}, [imageUrl]);
|
|
38
|
+
|
|
39
|
+
// Function to draw on the canvas
|
|
40
|
+
const draw = (ctx: CanvasRenderingContext2D, canvas: HTMLCanvasElement, image: HTMLImageElement, loudness: number) => {
|
|
41
|
+
if (canvas && ctx) {
|
|
42
|
+
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
43
|
+
|
|
44
|
+
// Draw pulsing circle
|
|
45
|
+
const radius = Math.min(canvas.width, canvas.height) / 3;
|
|
46
|
+
const centerX = canvas.width / 2;
|
|
47
|
+
const centerY = canvas.height / 2;
|
|
48
|
+
const pulseRadius = radius + loudness / 2.5; // Adjust the divisor for sensitivity
|
|
49
|
+
ctx.beginPath();
|
|
50
|
+
ctx.arc(centerX, centerY, pulseRadius, 0, Math.PI * 2, true);
|
|
51
|
+
ctx.strokeStyle = 'rgba(0, 0, 0, 0.5)';
|
|
52
|
+
ctx.lineWidth = 5;
|
|
53
|
+
ctx.stroke();
|
|
54
|
+
|
|
55
|
+
// Draw image circle
|
|
56
|
+
ctx.save();
|
|
57
|
+
ctx.beginPath();
|
|
58
|
+
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
59
|
+
ctx.closePath();
|
|
60
|
+
ctx.clip();
|
|
61
|
+
ctx.drawImage(image, centerX - radius, centerY - radius, radius * 2, radius * 2);
|
|
62
|
+
ctx.restore();
|
|
63
|
+
|
|
64
|
+
// Draw circular frame around the image
|
|
65
|
+
ctx.beginPath();
|
|
66
|
+
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2, true);
|
|
67
|
+
ctx.strokeStyle = 'rgba(20,20, 20, 0.9)';
|
|
68
|
+
ctx.lineWidth = 5; // Adjust the width of the frame as needed
|
|
69
|
+
ctx.stroke();
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
return <canvas ref={canvasRef} className={className} width={500} height={500} style={{ width }} />;
|
|
74
|
+
};
|
|
75
|
+
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { ChunkedAudioPlayer } from './Player';
|
|
2
|
+
|
|
3
|
+
type VoiceBackend = (text: string, voice?: string, speed?: number) => Promise<Blob>;
|
|
4
|
+
|
|
5
|
+
export class MessageSender {
|
|
6
|
+
private player = new ChunkedAudioPlayer();
|
|
7
|
+
private fetchedSentences = new Set<string>();
|
|
8
|
+
private lastLoading = false;
|
|
9
|
+
private voice: string;
|
|
10
|
+
private model: string;
|
|
11
|
+
private voiceBackend: VoiceBackend;
|
|
12
|
+
|
|
13
|
+
constructor(voiceBackend: VoiceBackend, voice: string = 'alloy', model = 'openai') {
|
|
14
|
+
this.voiceBackend = voiceBackend;
|
|
15
|
+
this.voice = voice;
|
|
16
|
+
this.model = model;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
private getCompletedSentences(currentText: string, isLoading: boolean): string[] {
|
|
20
|
+
// Split the text based on the following characters: .,?!
|
|
21
|
+
// Only split on : when followed by a space
|
|
22
|
+
const pattern = /(.+?[,.?!]|.+?:\s+|.+?\n+)/g;
|
|
23
|
+
const result: string[] = [];
|
|
24
|
+
let match;
|
|
25
|
+
while ((match = pattern.exec(currentText)) !== null) {
|
|
26
|
+
const sentence = match[0].trim();
|
|
27
|
+
if (sentence.length > 0) {
|
|
28
|
+
result.push(sentence);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
if (!isLoading) {
|
|
32
|
+
const lastFullSentence = result[result.length - 1];
|
|
33
|
+
const leftoverIndex = currentText.lastIndexOf(lastFullSentence) + lastFullSentence.length;
|
|
34
|
+
if (leftoverIndex < currentText.length) {
|
|
35
|
+
result.push(currentText.slice(leftoverIndex).trim());
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return result;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
public async handleNewText(currentText: string | undefined, isLoading: boolean) {
|
|
42
|
+
if (!this.lastLoading && isLoading) {
|
|
43
|
+
this.reset();
|
|
44
|
+
}
|
|
45
|
+
this.lastLoading = isLoading;
|
|
46
|
+
|
|
47
|
+
if (!currentText) {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const sentences = this.getCompletedSentences(currentText, isLoading);
|
|
52
|
+
|
|
53
|
+
for (let i = 0; i < sentences.length; i++) {
|
|
54
|
+
const sentence = sentences[i];
|
|
55
|
+
if (!this.fetchedSentences.has(sentence)) {
|
|
56
|
+
this.fetchedSentences.add(sentence);
|
|
57
|
+
const audioData = await this.generateSpeech(sentence);
|
|
58
|
+
await this.player.addChunk(audioData, i);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
private async generateSpeech(sentence: string): Promise<ArrayBuffer> {
|
|
64
|
+
const blob = await this.voiceBackend(sentence, this.voice, 1.0);
|
|
65
|
+
return await blob.arrayBuffer();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
public play() {
|
|
69
|
+
this.player.playAgain();
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
public stop() {
|
|
73
|
+
this.player.stopPlayback();
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
private reset() {
|
|
77
|
+
this.stop();
|
|
78
|
+
this.fetchedSentences.clear();
|
|
79
|
+
this.player.reset();
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
public setVolume(volume: number) {
|
|
83
|
+
this.player.setVolume(volume);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
public setOnLoudnessChange(callback: (value: number) => void) {
|
|
87
|
+
this.player.setOnLoudnessChange((loudness) => {
|
|
88
|
+
callback(loudness);
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
}
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
export class ChunkedAudioPlayer {
|
|
2
|
+
|
|
3
|
+
private audioContext!: AudioContext;
|
|
4
|
+
private chunkQueue: ArrayBuffer[] = [];
|
|
5
|
+
private isPlaying = false;
|
|
6
|
+
private analyser!: AnalyserNode;
|
|
7
|
+
private dataArray!: Uint8Array;
|
|
8
|
+
private shouldMonitorLoudness = true;
|
|
9
|
+
private isMonitoring = false;
|
|
10
|
+
private handle = 0;
|
|
11
|
+
private volume = 1.0;
|
|
12
|
+
private loudnessCallback: (value: number) => void = () => { };
|
|
13
|
+
private currentIndex = 0;
|
|
14
|
+
private startedPlaying = false;
|
|
15
|
+
|
|
16
|
+
constructor() {
|
|
17
|
+
this.init();
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
private init(): void {
|
|
21
|
+
this.audioContext = new AudioContext();
|
|
22
|
+
this.analyser = this.audioContext.createAnalyser();
|
|
23
|
+
this.analyser.fftSize = 256; // Set the FFT size (smaller values provide faster updates, larger ones give better resolution)
|
|
24
|
+
const bufferLength = this.analyser.frequencyBinCount;
|
|
25
|
+
this.dataArray = new Uint8Array(bufferLength); // Array to hold frequency data
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
public setOnLoudnessChange(callback: (value: number) => void) {
|
|
29
|
+
this.loudnessCallback = callback;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
public setVolume(volume: number) {
|
|
33
|
+
this.volume = volume;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
public async addChunk(chunk: ArrayBuffer, position: number): Promise<void> {
|
|
37
|
+
console.log('Adding chunk', position, chunk);
|
|
38
|
+
this.chunkQueue[position] = chunk;
|
|
39
|
+
// console.log("received chunk", {
|
|
40
|
+
// chunkQueue: this.chunkQueue.length,
|
|
41
|
+
// isPlaying: this.isPlaying,
|
|
42
|
+
// })
|
|
43
|
+
|
|
44
|
+
if (position === 0 && !this.startedPlaying) {
|
|
45
|
+
this.startedPlaying = true;
|
|
46
|
+
this.playChunks();
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
private playChunks(): void {
|
|
51
|
+
// console.log({ isPlaying: this.isPlaying });
|
|
52
|
+
if (this.isPlaying) return;
|
|
53
|
+
if (!this.chunkQueue[this.currentIndex]) {
|
|
54
|
+
// wait until the correct chunk arrives
|
|
55
|
+
setTimeout(() => this.playChunks(), 10);
|
|
56
|
+
}
|
|
57
|
+
this.isPlaying = true;
|
|
58
|
+
|
|
59
|
+
this.playChunk(this.chunkQueue[this.currentIndex]).then(() => {
|
|
60
|
+
this.isPlaying = false;
|
|
61
|
+
this.currentIndex++;
|
|
62
|
+
if (this.chunkQueue[this.currentIndex]) {
|
|
63
|
+
this.shouldMonitorLoudness = true;
|
|
64
|
+
this.playChunks();
|
|
65
|
+
} else {
|
|
66
|
+
// console.log('Playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
67
|
+
setTimeout(() => {
|
|
68
|
+
// console.log('Check again if really playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
69
|
+
if (this.chunkQueue.length > this.currentIndex) {
|
|
70
|
+
this.playChunks();
|
|
71
|
+
} else {
|
|
72
|
+
this.startedPlaying = false;
|
|
73
|
+
this.shouldMonitorLoudness = false;
|
|
74
|
+
}
|
|
75
|
+
}, 1000);
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
public stopPlayback(): void {
|
|
81
|
+
// console.log('Stopping playback');
|
|
82
|
+
// Implement logic to stop the current playback
|
|
83
|
+
this.isPlaying = false;
|
|
84
|
+
this.chunkQueue = [];
|
|
85
|
+
this.startedPlaying = false;
|
|
86
|
+
this.shouldMonitorLoudness = false;
|
|
87
|
+
cancelAnimationFrame(this.handle);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
private playChunk(chunk: ArrayBuffer): Promise<void> {
|
|
91
|
+
console.log({queue: this.chunkQueue})
|
|
92
|
+
if (!chunk) {
|
|
93
|
+
return Promise.resolve();
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// console.log('Playing chunk', chunk);
|
|
97
|
+
return new Promise((resolve) => {
|
|
98
|
+
const source = this.audioContext.createBufferSource();
|
|
99
|
+
this.audioContext.decodeAudioData(chunk.slice(0)).then((audioBuffer) => {
|
|
100
|
+
source.buffer = audioBuffer;
|
|
101
|
+
|
|
102
|
+
// Create a GainNode for volume control
|
|
103
|
+
const gainNode = this.audioContext.createGain();
|
|
104
|
+
gainNode.gain.value = this.volume;
|
|
105
|
+
|
|
106
|
+
// Connect the source to the GainNode, then to the analyser node, then to the destination (speakers)
|
|
107
|
+
source.connect(gainNode);
|
|
108
|
+
gainNode.connect(this.analyser);
|
|
109
|
+
this.analyser.connect(this.audioContext.destination);
|
|
110
|
+
|
|
111
|
+
source.start(0);
|
|
112
|
+
// console.log('Playing chunk', this.currentIndex);
|
|
113
|
+
gainNode.gain.value = this.volume;
|
|
114
|
+
source.onended = () => {
|
|
115
|
+
// console.log('Chunk ended');
|
|
116
|
+
resolve();
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
// Start monitoring loudness only once
|
|
120
|
+
if (!this.isMonitoring) {
|
|
121
|
+
this.isMonitoring = true;
|
|
122
|
+
this.shouldMonitorLoudness = true;
|
|
123
|
+
this.monitorLoudness();
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
async playAgain(): Promise<void> {
|
|
130
|
+
console.log('Playing again');
|
|
131
|
+
if (this.chunkQueue.length > 0 && !this.isPlaying) {
|
|
132
|
+
this.playChunks();
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
private monitorLoudness(): void {
|
|
137
|
+
// Stop monitoring when the flag is false
|
|
138
|
+
if (!this.shouldMonitorLoudness) {
|
|
139
|
+
// console.log('Loudness monitoring stopped.');
|
|
140
|
+
cancelAnimationFrame(this.handle);
|
|
141
|
+
this.loudnessCallback(0);
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Get the time domain data from the analyser (this is a snapshot of the waveform)
|
|
146
|
+
this.analyser.getByteTimeDomainData(this.dataArray);
|
|
147
|
+
|
|
148
|
+
// Calculate the RMS (root mean square) of the waveform values to get the perceived loudness
|
|
149
|
+
let sum = 0;
|
|
150
|
+
for (let i = 0; i < this.dataArray.length; i++) {
|
|
151
|
+
const value = this.dataArray[i] / 128.0 - 1.0; // Normalize between -1 and 1
|
|
152
|
+
sum += value * value;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const rms = Math.sqrt(sum / this.dataArray.length);
|
|
156
|
+
|
|
157
|
+
// Handle the case where RMS is 0 to avoid log10(0)
|
|
158
|
+
if (rms === 0) {
|
|
159
|
+
// console.log('Current loudness: Silent');
|
|
160
|
+
} else {
|
|
161
|
+
let loudnessInDb = 20 * Math.log10(rms); // Convert to dB
|
|
162
|
+
// console.log('Current loudness:' + loudnessInDb);
|
|
163
|
+
const minDb = -57;
|
|
164
|
+
const maxDb = -15;
|
|
165
|
+
|
|
166
|
+
if (loudnessInDb < minDb) {
|
|
167
|
+
loudnessInDb = minDb;
|
|
168
|
+
}
|
|
169
|
+
if (loudnessInDb > maxDb) {
|
|
170
|
+
loudnessInDb = maxDb;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const loudnessScale = ((loudnessInDb - minDb) / (maxDb - minDb)) * 100;
|
|
174
|
+
// console.log("root:corrent loudness", loudnessScale);
|
|
175
|
+
|
|
176
|
+
this.loudnessCallback(loudnessScale);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// Call this method again at regular intervals if you want continuous loudness monitoring
|
|
180
|
+
this.handle = requestAnimationFrame(() => this.monitorLoudness());
|
|
181
|
+
}
|
|
182
|
+
public reset() {
|
|
183
|
+
// console.log('Resetting player');
|
|
184
|
+
this.stopPlayback();
|
|
185
|
+
this.currentIndex = 0;
|
|
186
|
+
this.shouldMonitorLoudness = true;
|
|
187
|
+
//reset to the beginning when the class gets initialized
|
|
188
|
+
this.isMonitoring = false;
|
|
189
|
+
this.isPlaying = false;
|
|
190
|
+
this.init();
|
|
191
|
+
}
|
|
192
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { useState, useRef, forwardRef, useImperativeHandle } from 'react';
|
|
2
|
+
import { FaMicrophone } from 'react-icons/fa6';
|
|
3
|
+
import { usePlugin } from '../../../components';
|
|
4
|
+
|
|
5
|
+
interface Props {
|
|
6
|
+
iconSize?: string;
|
|
7
|
+
className?: string;
|
|
8
|
+
onVoiceRecorded: (message: string) => void;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export const VoiceRecorder = forwardRef(({ onVoiceRecorded, iconSize, className }: Props, ref) => {
|
|
12
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
13
|
+
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
|
14
|
+
const audioChunksRef = useRef<Blob[]>([]);
|
|
15
|
+
const { llm } = usePlugin();
|
|
16
|
+
|
|
17
|
+
const startRecording = async () => {
|
|
18
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
19
|
+
const mediaRecorder = new MediaRecorder(stream);
|
|
20
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
21
|
+
|
|
22
|
+
mediaRecorder.ondataavailable = (event) => {
|
|
23
|
+
audioChunksRef.current.push(event.data);
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
mediaRecorder.onstop = async () => {
|
|
27
|
+
const audioBlob = new Blob(audioChunksRef.current);
|
|
28
|
+
audioChunksRef.current = [];
|
|
29
|
+
|
|
30
|
+
onVoiceRecorded(await llm.getTextFromVoice(audioBlob));
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
mediaRecorder.start();
|
|
34
|
+
setIsRecording(true);
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const stopRecording = () => {
|
|
38
|
+
if (mediaRecorderRef.current) {
|
|
39
|
+
mediaRecorderRef.current.stop();
|
|
40
|
+
setIsRecording(false);
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
useImperativeHandle(ref, () => ({
|
|
45
|
+
startRecording,
|
|
46
|
+
stopRecording,
|
|
47
|
+
}));
|
|
48
|
+
|
|
49
|
+
return (
|
|
50
|
+
<div className={className}>
|
|
51
|
+
<button onClick={isRecording ? stopRecording : startRecording}>
|
|
52
|
+
<FaMicrophone size={iconSize} className={"h-7 w-7 mr-2 " + (isRecording ? "text-red-600" : "")} />
|
|
53
|
+
</button>
|
|
54
|
+
</div>
|
|
55
|
+
);
|
|
56
|
+
});
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
export interface FirstMessages {
|
|
2
|
+
instructions?: string;
|
|
3
|
+
userMessage: string;
|
|
4
|
+
assistantMessage?: string;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export function getFirstMessages(instructions: FirstMessages): any[] {
|
|
8
|
+
const messages = [];
|
|
9
|
+
|
|
10
|
+
if (instructions.instructions) {
|
|
11
|
+
messages.push({ id: '1', role: 'system', content: instructions.instructions });
|
|
12
|
+
}
|
|
13
|
+
if (instructions.userMessage) {
|
|
14
|
+
messages.push({ id: '2', role: 'user', content: instructions.userMessage });
|
|
15
|
+
}
|
|
16
|
+
if (instructions.assistantMessage) {
|
|
17
|
+
messages.push({ id: '3', role: 'assistant', content: instructions.assistantMessage });
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
console.log("getFirstMessages", messages);
|
|
21
|
+
|
|
22
|
+
return messages;
|
|
23
|
+
}
|
|
@@ -2,7 +2,7 @@ import React, { useState, useEffect } from 'react';
|
|
|
2
2
|
import { FaPlayCircle, FaStopCircle } from "react-icons/fa";
|
|
3
3
|
import { usePlugin } from "../../providers/PluginProvider";
|
|
4
4
|
import { Spinner } from '../Spinner';
|
|
5
|
-
import {
|
|
5
|
+
import { EventBus } from '../../plugin/fromRimori/EventBus';
|
|
6
6
|
|
|
7
7
|
type AudioPlayerProps = {
|
|
8
8
|
text: string;
|
|
@@ -34,12 +34,11 @@ export const AudioPlayer: React.FC<AudioPlayerProps> = ({
|
|
|
34
34
|
const [speed, setSpeed] = useState(initialSpeed);
|
|
35
35
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
36
36
|
const [isLoading, setIsLoading] = useState(false);
|
|
37
|
-
const {
|
|
38
|
-
const emitter = EmitterSingleton;
|
|
37
|
+
const { llm } = usePlugin();
|
|
39
38
|
|
|
40
39
|
useEffect(() => {
|
|
41
40
|
if (!playListenerEvent) return;
|
|
42
|
-
|
|
41
|
+
EventBus.on(playListenerEvent, () => togglePlayback());
|
|
43
42
|
}, [playListenerEvent]);
|
|
44
43
|
|
|
45
44
|
useEffect(() => {
|
|
@@ -53,7 +52,7 @@ export const AudioPlayer: React.FC<AudioPlayerProps> = ({
|
|
|
53
52
|
const generateAudio = async () => {
|
|
54
53
|
setIsLoading(true);
|
|
55
54
|
|
|
56
|
-
const blob = await
|
|
55
|
+
const blob = await llm.getVoice(text, voice || (language ? "aws_default" : "openai_alloy"), 1, language);
|
|
57
56
|
setAudioUrl(URL.createObjectURL(blob));
|
|
58
57
|
setIsLoading(false);
|
|
59
58
|
};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
// React components and hooks exports
|
|
2
|
+
export * from "./components/MarkdownEditor";
|
|
3
|
+
export * from "./components/CRUDModal";
|
|
4
|
+
export * from "./components/Spinner";
|
|
5
|
+
export * from "./components/audio/Playbutton";
|
|
6
|
+
export * from "./hooks/UseChatHook";
|
|
7
|
+
export * from "./plugin/ThemeSetter";
|
|
8
|
+
export * from "./providers/PluginProvider";
|
|
9
|
+
export * from "./components/ai/Avatar";
|
|
10
|
+
export * from "./components/ai/Assistant";
|
|
@@ -1,87 +1,111 @@
|
|
|
1
|
-
import { env } from "../utils/constants";
|
|
2
|
-
|
|
3
1
|
export interface ToolInvocation {
|
|
4
|
-
|
|
5
|
-
|
|
2
|
+
toolName: string;
|
|
3
|
+
args: Record<string, string>;
|
|
6
4
|
}
|
|
7
5
|
|
|
8
6
|
export interface Tool {
|
|
7
|
+
name: string;
|
|
8
|
+
description: string;
|
|
9
|
+
parameters: {
|
|
9
10
|
name: string;
|
|
10
11
|
description: string;
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
description: string;
|
|
15
|
-
}[];
|
|
12
|
+
type: "string" | "number" | "boolean";
|
|
13
|
+
}[];
|
|
14
|
+
execute?: <T = Record<string, string | boolean | number>>(args: T) => Promise<any> | void;
|
|
16
15
|
}
|
|
17
16
|
|
|
18
17
|
export interface Message {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
18
|
+
id: string;
|
|
19
|
+
role: string;
|
|
20
|
+
content: string;
|
|
21
|
+
toolInvocations?: ToolInvocation[];
|
|
23
22
|
}
|
|
24
23
|
|
|
25
|
-
export async function generateText(messages: Message[], tools: Tool[], token: string) {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
24
|
+
export async function generateText(supabaseUrl: string, messages: Message[], tools: Tool[], token: string) {
|
|
25
|
+
const response = await fetch(`${supabaseUrl}/functions/v1/llm`, {
|
|
26
|
+
method: 'POST',
|
|
27
|
+
body: JSON.stringify({ messages, tools }),
|
|
28
|
+
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' }
|
|
29
|
+
});
|
|
31
30
|
|
|
32
|
-
|
|
31
|
+
return await response.json();
|
|
33
32
|
}
|
|
34
33
|
|
|
35
34
|
export type OnLLMResponse = (id: string, response: string, finished: boolean, toolInvocations?: ToolInvocation[]) => void;
|
|
36
35
|
|
|
37
|
-
export async function streamChatGPT(messages: Message[], tools: Tool[], onResponse: OnLLMResponse, token: string) {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
36
|
+
export async function streamChatGPT(supabaseUrl: string, messages: Message[], tools: Tool[], onResponse: OnLLMResponse, token: string) {
|
|
37
|
+
const messageId = Math.random().toString(36).substring(3);
|
|
38
|
+
const response = await fetch(`${supabaseUrl}/functions/v1/llm`, {
|
|
39
|
+
method: 'POST',
|
|
40
|
+
body: JSON.stringify({ messages, tools, stream: true }),
|
|
41
|
+
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' }
|
|
42
|
+
});
|
|
44
43
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
44
|
+
if (!response.body) {
|
|
45
|
+
console.error('No response body.');
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const reader = response.body.getReader();
|
|
50
|
+
const decoder = new TextDecoder('utf-8');
|
|
49
51
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
+
let content = "";
|
|
53
|
+
let done = false;
|
|
54
|
+
let toolInvocations: ToolInvocation[] = [];
|
|
55
|
+
while (!done) {
|
|
56
|
+
const { value } = await reader.read();
|
|
52
57
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
while (!done) {
|
|
57
|
-
const { value } = await reader.read();
|
|
58
|
+
if (value) {
|
|
59
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
60
|
+
const lines = chunk.split('\n').filter(line => line.trim() !== '');
|
|
58
61
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
+
for (const line of lines) {
|
|
63
|
+
const data = line.substring(3, line.length - 1);
|
|
64
|
+
const command = line.substring(0, 1);
|
|
65
|
+
// console.log("data: ", { line, data, command });
|
|
62
66
|
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
// console.log("data: ", { line, data, command });
|
|
67
|
+
if (command === '0') {
|
|
68
|
+
content += data;
|
|
69
|
+
// console.log("AI response:", content);
|
|
67
70
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
+
//content \n\n should be real line break when message is displayed
|
|
72
|
+
onResponse(messageId, content.replace(/\\n/g, '\n'), false);
|
|
73
|
+
} else if (command === 'd') {
|
|
74
|
+
// console.log("AI usage:", JSON.parse(line.substring(2)));
|
|
75
|
+
done = true;
|
|
76
|
+
break;
|
|
77
|
+
} else if (command === '9') {
|
|
78
|
+
// console.log("tool call:", JSON.parse(line.substring(2)));
|
|
79
|
+
// console.log("tools", tools);
|
|
80
|
+
const toolInvocation = JSON.parse(line.substring(2));
|
|
81
|
+
toolInvocations.push(toolInvocation);
|
|
71
82
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
+
// Find the tool and execute it if it has an execute function
|
|
84
|
+
const tool = tools.find(t => t.name === toolInvocation.toolName);
|
|
85
|
+
// console.log("tool", tool);
|
|
86
|
+
if (tool && tool.execute) {
|
|
87
|
+
try {
|
|
88
|
+
const result = await tool.execute(toolInvocation.args);
|
|
89
|
+
const toolResult = { success: true, result: result }
|
|
90
|
+
// Add the tool result as a message
|
|
91
|
+
messages.push({
|
|
92
|
+
id: Math.random().toString(36).substring(3),
|
|
93
|
+
role: "function",
|
|
94
|
+
content: JSON.stringify(toolResult),
|
|
95
|
+
toolInvocations: [{
|
|
96
|
+
args: toolInvocation.args,
|
|
97
|
+
toolName: toolInvocation.toolName,
|
|
98
|
+
}]
|
|
99
|
+
});
|
|
100
|
+
} catch (error) {
|
|
101
|
+
console.error(`Error executing tool ${toolInvocation.toolName}:`, error);
|
|
83
102
|
}
|
|
103
|
+
}
|
|
84
104
|
}
|
|
105
|
+
}
|
|
85
106
|
}
|
|
86
|
-
|
|
107
|
+
// Wait for 5ms to avoid blocking the main thread
|
|
108
|
+
await new Promise(resolve => setTimeout(resolve, 5));
|
|
109
|
+
}
|
|
110
|
+
onResponse(messageId, content.replace(/\\n/g, '\n'), true, toolInvocations);
|
|
87
111
|
}
|