@rimori/client 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/dist/components/CRUDModal.js +0 -1
- package/dist/components/ai/Assistant.d.ts +9 -0
- package/dist/components/ai/Assistant.js +59 -0
- package/dist/components/ai/Avatar.d.ts +11 -0
- package/dist/components/ai/Avatar.js +39 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/MessageSender.js +86 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/ai/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/ai/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/ai/utils.d.ts +6 -0
- package/dist/components/ai/utils.js +14 -0
- package/dist/components/audio/Playbutton.js +4 -5
- package/dist/components/avatar/Assistant.d.ts +9 -0
- package/dist/components/avatar/Assistant.js +59 -0
- package/dist/components/avatar/Avatar.d.ts +12 -0
- package/dist/components/avatar/Avatar.js +42 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/AudioInputField.js +38 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/CircleAudioAvatar.js +59 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.d.ts +19 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/MessageSender.js +84 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.d.ts +25 -0
- package/dist/components/avatar/EmbeddedAssistent/TTS/Player.js +180 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.d.ts +7 -0
- package/dist/components/avatar/EmbeddedAssistent/VoiceRecoder.js +45 -0
- package/dist/components/avatar/utils.d.ts +6 -0
- package/dist/components/avatar/utils.js +14 -0
- package/dist/components.d.ts +9 -0
- package/dist/components.js +10 -0
- package/dist/controller/AIController.d.ts +4 -3
- package/dist/controller/AIController.js +32 -8
- package/dist/controller/ObjectController.d.ts +2 -2
- package/dist/controller/ObjectController.js +4 -5
- package/dist/controller/SettingsController.d.ts +3 -1
- package/dist/controller/SettingsController.js +9 -0
- package/dist/controller/SharedContentController.js +6 -6
- package/dist/controller/SidePluginController.d.ts +14 -0
- package/dist/{plugin/VoiceController.js → controller/SidePluginController.js} +18 -15
- package/dist/controller/VoiceController.js +1 -1
- package/dist/core.d.ts +9 -0
- package/dist/core.js +10 -0
- package/dist/hooks/UseChatHook.js +2 -2
- package/dist/index.d.ts +3 -2
- package/dist/index.js +4 -2
- package/dist/plugin/PluginController.d.ts +4 -12
- package/dist/plugin/PluginController.js +43 -70
- package/dist/plugin/RimoriClient.d.ts +87 -27
- package/dist/plugin/RimoriClient.js +101 -67
- package/dist/plugin/fromRimori/EventBus.d.ts +98 -0
- package/dist/plugin/fromRimori/EventBus.js +240 -0
- package/dist/providers/PluginProvider.d.ts +1 -0
- package/dist/providers/PluginProvider.js +64 -12
- package/dist/worker/WorkerSetup.d.ts +6 -0
- package/dist/worker/WorkerSetup.js +79 -0
- package/package.json +16 -3
- package/src/components/CRUDModal.tsx +1 -3
- package/src/components/ai/Assistant.tsx +96 -0
- package/src/components/ai/Avatar.tsx +61 -0
- package/src/components/ai/EmbeddedAssistent/AudioInputField.tsx +64 -0
- package/src/components/ai/EmbeddedAssistent/CircleAudioAvatar.tsx +75 -0
- package/src/components/ai/EmbeddedAssistent/TTS/MessageSender.ts +91 -0
- package/src/components/ai/EmbeddedAssistent/TTS/Player.ts +192 -0
- package/src/components/ai/EmbeddedAssistent/VoiceRecoder.tsx +56 -0
- package/src/components/ai/utils.ts +23 -0
- package/src/components/audio/Playbutton.tsx +4 -5
- package/src/components.ts +10 -0
- package/src/controller/AIController.ts +84 -60
- package/src/controller/ObjectController.ts +4 -6
- package/src/controller/SettingsController.ts +10 -1
- package/src/controller/SharedContentController.ts +6 -6
- package/src/controller/SidePluginController.ts +36 -0
- package/src/controller/VoiceController.ts +1 -1
- package/src/core.ts +10 -0
- package/src/hooks/UseChatHook.ts +2 -2
- package/src/index.ts +4 -2
- package/src/plugin/PluginController.ts +46 -76
- package/src/plugin/RimoriClient.ts +151 -76
- package/src/plugin/fromRimori/EventBus.ts +301 -0
- package/src/plugin/fromRimori/readme.md +2 -0
- package/src/providers/PluginProvider.tsx +70 -14
- package/src/worker/WorkerSetup.ts +80 -0
- package/dist/CRUDModal.d.ts +0 -16
- package/dist/CRUDModal.js +0 -31
- package/dist/MarkdownEditor.d.ts +0 -8
- package/dist/MarkdownEditor.js +0 -46
- package/dist/audio/Playbutton.d.ts +0 -14
- package/dist/audio/Playbutton.js +0 -73
- package/dist/components/hooks/UseChatHook.d.ts +0 -15
- package/dist/components/hooks/UseChatHook.js +0 -21
- package/dist/plugin/AIController copy.d.ts +0 -22
- package/dist/plugin/AIController copy.js +0 -68
- package/dist/plugin/AIController.d.ts +0 -22
- package/dist/plugin/AIController.js +0 -68
- package/dist/plugin/ObjectController.d.ts +0 -34
- package/dist/plugin/ObjectController.js +0 -77
- package/dist/plugin/SettingController.d.ts +0 -13
- package/dist/plugin/SettingController.js +0 -55
- package/dist/plugin/VoiceController.d.ts +0 -2
- package/dist/providers/EventEmitter.d.ts +0 -11
- package/dist/providers/EventEmitter.js +0 -41
- package/dist/providers/EventEmitterContext.d.ts +0 -6
- package/dist/providers/EventEmitterContext.js +0 -19
- package/dist/utils/DifficultyConverter.d.ts +0 -3
- package/dist/utils/DifficultyConverter.js +0 -7
- package/dist/utils/constants.d.ts +0 -4
- package/dist/utils/constants.js +0 -12
- package/dist/utils/plugin/Client.d.ts +0 -72
- package/dist/utils/plugin/Client.js +0 -118
- package/dist/utils/plugin/PluginController.d.ts +0 -36
- package/dist/utils/plugin/PluginController.js +0 -119
- package/dist/utils/plugin/PluginUtils.d.ts +0 -2
- package/dist/utils/plugin/PluginUtils.js +0 -23
- package/dist/utils/plugin/RimoriClient.d.ts +0 -72
- package/dist/utils/plugin/RimoriClient.js +0 -118
- package/dist/utils/plugin/ThemeSetter.d.ts +0 -1
- package/dist/utils/plugin/ThemeSetter.js +0 -13
- package/dist/utils/plugin/WhereClauseBuilder.d.ts +0 -24
- package/dist/utils/plugin/WhereClauseBuilder.js +0 -79
- package/dist/utils/plugin/providers/EventEmitter.d.ts +0 -11
- package/dist/utils/plugin/providers/EventEmitter.js +0 -41
- package/dist/utils/plugin/providers/EventEmitterContext.d.ts +0 -6
- package/dist/utils/plugin/providers/EventEmitterContext.js +0 -19
- package/dist/utils/plugin/providers/PluginProvider.d.ts +0 -8
- package/dist/utils/plugin/providers/PluginProvider.js +0 -49
- package/src/providers/EventEmitter.ts +0 -48
- package/src/providers/EventEmitterContext.tsx +0 -27
- package/src/utils/constants.ts +0 -18
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
export class ChunkedAudioPlayer {
|
|
2
|
+
|
|
3
|
+
private audioContext!: AudioContext;
|
|
4
|
+
private chunkQueue: ArrayBuffer[] = [];
|
|
5
|
+
private isPlaying = false;
|
|
6
|
+
private analyser!: AnalyserNode;
|
|
7
|
+
private dataArray!: Uint8Array;
|
|
8
|
+
private shouldMonitorLoudness = true;
|
|
9
|
+
private isMonitoring = false;
|
|
10
|
+
private handle = 0;
|
|
11
|
+
private volume = 1.0;
|
|
12
|
+
private loudnessCallback: (value: number) => void = () => { };
|
|
13
|
+
private currentIndex = 0;
|
|
14
|
+
private startedPlaying = false;
|
|
15
|
+
|
|
16
|
+
constructor() {
|
|
17
|
+
this.init();
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
private init(): void {
|
|
21
|
+
this.audioContext = new AudioContext();
|
|
22
|
+
this.analyser = this.audioContext.createAnalyser();
|
|
23
|
+
this.analyser.fftSize = 256; // Set the FFT size (smaller values provide faster updates, larger ones give better resolution)
|
|
24
|
+
const bufferLength = this.analyser.frequencyBinCount;
|
|
25
|
+
this.dataArray = new Uint8Array(bufferLength); // Array to hold frequency data
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
public setOnLoudnessChange(callback: (value: number) => void) {
|
|
29
|
+
this.loudnessCallback = callback;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
public setVolume(volume: number) {
|
|
33
|
+
this.volume = volume;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
public async addChunk(chunk: ArrayBuffer, position: number): Promise<void> {
|
|
37
|
+
console.log('Adding chunk', position, chunk);
|
|
38
|
+
this.chunkQueue[position] = chunk;
|
|
39
|
+
// console.log("received chunk", {
|
|
40
|
+
// chunkQueue: this.chunkQueue.length,
|
|
41
|
+
// isPlaying: this.isPlaying,
|
|
42
|
+
// })
|
|
43
|
+
|
|
44
|
+
if (position === 0 && !this.startedPlaying) {
|
|
45
|
+
this.startedPlaying = true;
|
|
46
|
+
this.playChunks();
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
private playChunks(): void {
|
|
51
|
+
// console.log({ isPlaying: this.isPlaying });
|
|
52
|
+
if (this.isPlaying) return;
|
|
53
|
+
if (!this.chunkQueue[this.currentIndex]) {
|
|
54
|
+
// wait until the correct chunk arrives
|
|
55
|
+
setTimeout(() => this.playChunks(), 10);
|
|
56
|
+
}
|
|
57
|
+
this.isPlaying = true;
|
|
58
|
+
|
|
59
|
+
this.playChunk(this.chunkQueue[this.currentIndex]).then(() => {
|
|
60
|
+
this.isPlaying = false;
|
|
61
|
+
this.currentIndex++;
|
|
62
|
+
if (this.chunkQueue[this.currentIndex]) {
|
|
63
|
+
this.shouldMonitorLoudness = true;
|
|
64
|
+
this.playChunks();
|
|
65
|
+
} else {
|
|
66
|
+
// console.log('Playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
67
|
+
setTimeout(() => {
|
|
68
|
+
// console.log('Check again if really playback finished', { currentIndex: this.currentIndex, chunkQueue: this.chunkQueue });
|
|
69
|
+
if (this.chunkQueue.length > this.currentIndex) {
|
|
70
|
+
this.playChunks();
|
|
71
|
+
} else {
|
|
72
|
+
this.startedPlaying = false;
|
|
73
|
+
this.shouldMonitorLoudness = false;
|
|
74
|
+
}
|
|
75
|
+
}, 1000);
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
public stopPlayback(): void {
|
|
81
|
+
// console.log('Stopping playback');
|
|
82
|
+
// Implement logic to stop the current playback
|
|
83
|
+
this.isPlaying = false;
|
|
84
|
+
this.chunkQueue = [];
|
|
85
|
+
this.startedPlaying = false;
|
|
86
|
+
this.shouldMonitorLoudness = false;
|
|
87
|
+
cancelAnimationFrame(this.handle);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
private playChunk(chunk: ArrayBuffer): Promise<void> {
|
|
91
|
+
console.log({queue: this.chunkQueue})
|
|
92
|
+
if (!chunk) {
|
|
93
|
+
return Promise.resolve();
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// console.log('Playing chunk', chunk);
|
|
97
|
+
return new Promise((resolve) => {
|
|
98
|
+
const source = this.audioContext.createBufferSource();
|
|
99
|
+
this.audioContext.decodeAudioData(chunk.slice(0)).then((audioBuffer) => {
|
|
100
|
+
source.buffer = audioBuffer;
|
|
101
|
+
|
|
102
|
+
// Create a GainNode for volume control
|
|
103
|
+
const gainNode = this.audioContext.createGain();
|
|
104
|
+
gainNode.gain.value = this.volume;
|
|
105
|
+
|
|
106
|
+
// Connect the source to the GainNode, then to the analyser node, then to the destination (speakers)
|
|
107
|
+
source.connect(gainNode);
|
|
108
|
+
gainNode.connect(this.analyser);
|
|
109
|
+
this.analyser.connect(this.audioContext.destination);
|
|
110
|
+
|
|
111
|
+
source.start(0);
|
|
112
|
+
// console.log('Playing chunk', this.currentIndex);
|
|
113
|
+
gainNode.gain.value = this.volume;
|
|
114
|
+
source.onended = () => {
|
|
115
|
+
// console.log('Chunk ended');
|
|
116
|
+
resolve();
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
// Start monitoring loudness only once
|
|
120
|
+
if (!this.isMonitoring) {
|
|
121
|
+
this.isMonitoring = true;
|
|
122
|
+
this.shouldMonitorLoudness = true;
|
|
123
|
+
this.monitorLoudness();
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
async playAgain(): Promise<void> {
|
|
130
|
+
console.log('Playing again');
|
|
131
|
+
if (this.chunkQueue.length > 0 && !this.isPlaying) {
|
|
132
|
+
this.playChunks();
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
private monitorLoudness(): void {
|
|
137
|
+
// Stop monitoring when the flag is false
|
|
138
|
+
if (!this.shouldMonitorLoudness) {
|
|
139
|
+
// console.log('Loudness monitoring stopped.');
|
|
140
|
+
cancelAnimationFrame(this.handle);
|
|
141
|
+
this.loudnessCallback(0);
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Get the time domain data from the analyser (this is a snapshot of the waveform)
|
|
146
|
+
this.analyser.getByteTimeDomainData(this.dataArray);
|
|
147
|
+
|
|
148
|
+
// Calculate the RMS (root mean square) of the waveform values to get the perceived loudness
|
|
149
|
+
let sum = 0;
|
|
150
|
+
for (let i = 0; i < this.dataArray.length; i++) {
|
|
151
|
+
const value = this.dataArray[i] / 128.0 - 1.0; // Normalize between -1 and 1
|
|
152
|
+
sum += value * value;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const rms = Math.sqrt(sum / this.dataArray.length);
|
|
156
|
+
|
|
157
|
+
// Handle the case where RMS is 0 to avoid log10(0)
|
|
158
|
+
if (rms === 0) {
|
|
159
|
+
// console.log('Current loudness: Silent');
|
|
160
|
+
} else {
|
|
161
|
+
let loudnessInDb = 20 * Math.log10(rms); // Convert to dB
|
|
162
|
+
// console.log('Current loudness:' + loudnessInDb);
|
|
163
|
+
const minDb = -57;
|
|
164
|
+
const maxDb = -15;
|
|
165
|
+
|
|
166
|
+
if (loudnessInDb < minDb) {
|
|
167
|
+
loudnessInDb = minDb;
|
|
168
|
+
}
|
|
169
|
+
if (loudnessInDb > maxDb) {
|
|
170
|
+
loudnessInDb = maxDb;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const loudnessScale = ((loudnessInDb - minDb) / (maxDb - minDb)) * 100;
|
|
174
|
+
// console.log("root:corrent loudness", loudnessScale);
|
|
175
|
+
|
|
176
|
+
this.loudnessCallback(loudnessScale);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// Call this method again at regular intervals if you want continuous loudness monitoring
|
|
180
|
+
this.handle = requestAnimationFrame(() => this.monitorLoudness());
|
|
181
|
+
}
|
|
182
|
+
public reset() {
|
|
183
|
+
// console.log('Resetting player');
|
|
184
|
+
this.stopPlayback();
|
|
185
|
+
this.currentIndex = 0;
|
|
186
|
+
this.shouldMonitorLoudness = true;
|
|
187
|
+
//reset to the beginning when the class gets initialized
|
|
188
|
+
this.isMonitoring = false;
|
|
189
|
+
this.isPlaying = false;
|
|
190
|
+
this.init();
|
|
191
|
+
}
|
|
192
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { useState, useRef, forwardRef, useImperativeHandle } from 'react';
|
|
2
|
+
import { FaMicrophone } from 'react-icons/fa6';
|
|
3
|
+
import { usePlugin } from '../../../components';
|
|
4
|
+
|
|
5
|
+
interface Props {
|
|
6
|
+
iconSize?: string;
|
|
7
|
+
className?: string;
|
|
8
|
+
onVoiceRecorded: (message: string) => void;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export const VoiceRecorder = forwardRef(({ onVoiceRecorded, iconSize, className }: Props, ref) => {
|
|
12
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
13
|
+
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
|
14
|
+
const audioChunksRef = useRef<Blob[]>([]);
|
|
15
|
+
const { llm } = usePlugin();
|
|
16
|
+
|
|
17
|
+
const startRecording = async () => {
|
|
18
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
19
|
+
const mediaRecorder = new MediaRecorder(stream);
|
|
20
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
21
|
+
|
|
22
|
+
mediaRecorder.ondataavailable = (event) => {
|
|
23
|
+
audioChunksRef.current.push(event.data);
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
mediaRecorder.onstop = async () => {
|
|
27
|
+
const audioBlob = new Blob(audioChunksRef.current);
|
|
28
|
+
audioChunksRef.current = [];
|
|
29
|
+
|
|
30
|
+
onVoiceRecorded(await llm.getTextFromVoice(audioBlob));
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
mediaRecorder.start();
|
|
34
|
+
setIsRecording(true);
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const stopRecording = () => {
|
|
38
|
+
if (mediaRecorderRef.current) {
|
|
39
|
+
mediaRecorderRef.current.stop();
|
|
40
|
+
setIsRecording(false);
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
useImperativeHandle(ref, () => ({
|
|
45
|
+
startRecording,
|
|
46
|
+
stopRecording,
|
|
47
|
+
}));
|
|
48
|
+
|
|
49
|
+
return (
|
|
50
|
+
<div className={className}>
|
|
51
|
+
<button onClick={isRecording ? stopRecording : startRecording}>
|
|
52
|
+
<FaMicrophone size={iconSize} className={"h-7 w-7 mr-2 " + (isRecording ? "text-red-600" : "")} />
|
|
53
|
+
</button>
|
|
54
|
+
</div>
|
|
55
|
+
);
|
|
56
|
+
});
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
export interface FirstMessages {
|
|
2
|
+
instructions?: string;
|
|
3
|
+
userMessage: string;
|
|
4
|
+
assistantMessage?: string;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export function getFirstMessages(instructions: FirstMessages): any[] {
|
|
8
|
+
const messages = [];
|
|
9
|
+
|
|
10
|
+
if (instructions.instructions) {
|
|
11
|
+
messages.push({ id: '1', role: 'system', content: instructions.instructions });
|
|
12
|
+
}
|
|
13
|
+
if (instructions.userMessage) {
|
|
14
|
+
messages.push({ id: '2', role: 'user', content: instructions.userMessage });
|
|
15
|
+
}
|
|
16
|
+
if (instructions.assistantMessage) {
|
|
17
|
+
messages.push({ id: '3', role: 'assistant', content: instructions.assistantMessage });
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
console.log("getFirstMessages", messages);
|
|
21
|
+
|
|
22
|
+
return messages;
|
|
23
|
+
}
|
|
@@ -2,7 +2,7 @@ import React, { useState, useEffect } from 'react';
|
|
|
2
2
|
import { FaPlayCircle, FaStopCircle } from "react-icons/fa";
|
|
3
3
|
import { usePlugin } from "../../providers/PluginProvider";
|
|
4
4
|
import { Spinner } from '../Spinner';
|
|
5
|
-
import {
|
|
5
|
+
import { EventBus } from '../../plugin/fromRimori/EventBus';
|
|
6
6
|
|
|
7
7
|
type AudioPlayerProps = {
|
|
8
8
|
text: string;
|
|
@@ -34,12 +34,11 @@ export const AudioPlayer: React.FC<AudioPlayerProps> = ({
|
|
|
34
34
|
const [speed, setSpeed] = useState(initialSpeed);
|
|
35
35
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
36
36
|
const [isLoading, setIsLoading] = useState(false);
|
|
37
|
-
const {
|
|
38
|
-
const emitter = EmitterSingleton;
|
|
37
|
+
const { llm } = usePlugin();
|
|
39
38
|
|
|
40
39
|
useEffect(() => {
|
|
41
40
|
if (!playListenerEvent) return;
|
|
42
|
-
|
|
41
|
+
EventBus.on(playListenerEvent, () => togglePlayback());
|
|
43
42
|
}, [playListenerEvent]);
|
|
44
43
|
|
|
45
44
|
useEffect(() => {
|
|
@@ -53,7 +52,7 @@ export const AudioPlayer: React.FC<AudioPlayerProps> = ({
|
|
|
53
52
|
const generateAudio = async () => {
|
|
54
53
|
setIsLoading(true);
|
|
55
54
|
|
|
56
|
-
const blob = await
|
|
55
|
+
const blob = await llm.getVoice(text, voice || (language ? "aws_default" : "openai_alloy"), 1, language);
|
|
57
56
|
setAudioUrl(URL.createObjectURL(blob));
|
|
58
57
|
setIsLoading(false);
|
|
59
58
|
};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
// React components and hooks exports
|
|
2
|
+
export * from "./components/MarkdownEditor";
|
|
3
|
+
export * from "./components/CRUDModal";
|
|
4
|
+
export * from "./components/Spinner";
|
|
5
|
+
export * from "./components/audio/Playbutton";
|
|
6
|
+
export * from "./hooks/UseChatHook";
|
|
7
|
+
export * from "./plugin/ThemeSetter";
|
|
8
|
+
export * from "./providers/PluginProvider";
|
|
9
|
+
export * from "./components/ai/Avatar";
|
|
10
|
+
export * from "./components/ai/Assistant";
|
|
@@ -1,87 +1,111 @@
|
|
|
1
|
-
import { env } from "../utils/constants";
|
|
2
|
-
|
|
3
1
|
export interface ToolInvocation {
|
|
4
|
-
|
|
5
|
-
|
|
2
|
+
toolName: string;
|
|
3
|
+
args: Record<string, string>;
|
|
6
4
|
}
|
|
7
5
|
|
|
8
6
|
export interface Tool {
|
|
7
|
+
name: string;
|
|
8
|
+
description: string;
|
|
9
|
+
parameters: {
|
|
9
10
|
name: string;
|
|
10
11
|
description: string;
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
description: string;
|
|
15
|
-
}[];
|
|
12
|
+
type: "string" | "number" | "boolean";
|
|
13
|
+
}[];
|
|
14
|
+
execute?: <T = Record<string, string | boolean | number>>(args: T) => Promise<any> | void;
|
|
16
15
|
}
|
|
17
16
|
|
|
18
17
|
export interface Message {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
18
|
+
id: string;
|
|
19
|
+
role: string;
|
|
20
|
+
content: string;
|
|
21
|
+
toolInvocations?: ToolInvocation[];
|
|
23
22
|
}
|
|
24
23
|
|
|
25
|
-
export async function generateText(messages: Message[], tools: Tool[], token: string) {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
24
|
+
export async function generateText(supabaseUrl: string, messages: Message[], tools: Tool[], token: string) {
|
|
25
|
+
const response = await fetch(`${supabaseUrl}/functions/v1/llm`, {
|
|
26
|
+
method: 'POST',
|
|
27
|
+
body: JSON.stringify({ messages, tools }),
|
|
28
|
+
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' }
|
|
29
|
+
});
|
|
31
30
|
|
|
32
|
-
|
|
31
|
+
return await response.json();
|
|
33
32
|
}
|
|
34
33
|
|
|
35
34
|
export type OnLLMResponse = (id: string, response: string, finished: boolean, toolInvocations?: ToolInvocation[]) => void;
|
|
36
35
|
|
|
37
|
-
export async function streamChatGPT(messages: Message[], tools: Tool[], onResponse: OnLLMResponse, token: string) {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
36
|
+
export async function streamChatGPT(supabaseUrl: string, messages: Message[], tools: Tool[], onResponse: OnLLMResponse, token: string) {
|
|
37
|
+
const messageId = Math.random().toString(36).substring(3);
|
|
38
|
+
const response = await fetch(`${supabaseUrl}/functions/v1/llm`, {
|
|
39
|
+
method: 'POST',
|
|
40
|
+
body: JSON.stringify({ messages, tools, stream: true }),
|
|
41
|
+
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' }
|
|
42
|
+
});
|
|
44
43
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
44
|
+
if (!response.body) {
|
|
45
|
+
console.error('No response body.');
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const reader = response.body.getReader();
|
|
50
|
+
const decoder = new TextDecoder('utf-8');
|
|
49
51
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
+
let content = "";
|
|
53
|
+
let done = false;
|
|
54
|
+
let toolInvocations: ToolInvocation[] = [];
|
|
55
|
+
while (!done) {
|
|
56
|
+
const { value } = await reader.read();
|
|
52
57
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
while (!done) {
|
|
57
|
-
const { value } = await reader.read();
|
|
58
|
+
if (value) {
|
|
59
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
60
|
+
const lines = chunk.split('\n').filter(line => line.trim() !== '');
|
|
58
61
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
+
for (const line of lines) {
|
|
63
|
+
const data = line.substring(3, line.length - 1);
|
|
64
|
+
const command = line.substring(0, 1);
|
|
65
|
+
// console.log("data: ", { line, data, command });
|
|
62
66
|
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
// console.log("data: ", { line, data, command });
|
|
67
|
+
if (command === '0') {
|
|
68
|
+
content += data;
|
|
69
|
+
// console.log("AI response:", content);
|
|
67
70
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
+
//content \n\n should be real line break when message is displayed
|
|
72
|
+
onResponse(messageId, content.replace(/\\n/g, '\n'), false);
|
|
73
|
+
} else if (command === 'd') {
|
|
74
|
+
// console.log("AI usage:", JSON.parse(line.substring(2)));
|
|
75
|
+
done = true;
|
|
76
|
+
break;
|
|
77
|
+
} else if (command === '9') {
|
|
78
|
+
// console.log("tool call:", JSON.parse(line.substring(2)));
|
|
79
|
+
// console.log("tools", tools);
|
|
80
|
+
const toolInvocation = JSON.parse(line.substring(2));
|
|
81
|
+
toolInvocations.push(toolInvocation);
|
|
71
82
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
+
// Find the tool and execute it if it has an execute function
|
|
84
|
+
const tool = tools.find(t => t.name === toolInvocation.toolName);
|
|
85
|
+
// console.log("tool", tool);
|
|
86
|
+
if (tool && tool.execute) {
|
|
87
|
+
try {
|
|
88
|
+
const result = await tool.execute(toolInvocation.args);
|
|
89
|
+
const toolResult = { success: true, result: result }
|
|
90
|
+
// Add the tool result as a message
|
|
91
|
+
messages.push({
|
|
92
|
+
id: Math.random().toString(36).substring(3),
|
|
93
|
+
role: "function",
|
|
94
|
+
content: JSON.stringify(toolResult),
|
|
95
|
+
toolInvocations: [{
|
|
96
|
+
args: toolInvocation.args,
|
|
97
|
+
toolName: toolInvocation.toolName,
|
|
98
|
+
}]
|
|
99
|
+
});
|
|
100
|
+
} catch (error) {
|
|
101
|
+
console.error(`Error executing tool ${toolInvocation.toolName}:`, error);
|
|
83
102
|
}
|
|
103
|
+
}
|
|
84
104
|
}
|
|
105
|
+
}
|
|
85
106
|
}
|
|
86
|
-
|
|
107
|
+
// Wait for 5ms to avoid blocking the main thread
|
|
108
|
+
await new Promise(resolve => setTimeout(resolve, 5));
|
|
109
|
+
}
|
|
110
|
+
onResponse(messageId, content.replace(/\\n/g, '\n'), true, toolInvocations);
|
|
87
111
|
}
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import { env } from "../utils/constants";
|
|
2
|
-
|
|
3
1
|
type PrimitiveType = 'string' | 'number' | 'boolean';
|
|
4
2
|
|
|
5
3
|
// This is the type that can appear in the `type` property
|
|
@@ -35,8 +33,8 @@ export interface ObjectRequest {
|
|
|
35
33
|
instructions: string;
|
|
36
34
|
}
|
|
37
35
|
|
|
38
|
-
export async function generateObject(request: ObjectRequest, token: string) {
|
|
39
|
-
return await fetch(`${
|
|
36
|
+
export async function generateObject(supabaseUrl: string, request: ObjectRequest, token: string) {
|
|
37
|
+
return await fetch(`${supabaseUrl}/functions/v1/llm-object`, {
|
|
40
38
|
method: 'POST',
|
|
41
39
|
body: JSON.stringify({
|
|
42
40
|
stream: false,
|
|
@@ -51,9 +49,9 @@ export async function generateObject(request: ObjectRequest, token: string) {
|
|
|
51
49
|
// TODO adjust stream to work with object
|
|
52
50
|
export type OnLLMResponse = (id: string, response: string, finished: boolean, toolInvocations?: any[]) => void;
|
|
53
51
|
|
|
54
|
-
export async function streamObject(request: ObjectRequest, onResponse: OnLLMResponse, token: string) {
|
|
52
|
+
export async function streamObject(supabaseUrl: string, request: ObjectRequest, onResponse: OnLLMResponse, token: string) {
|
|
55
53
|
const messageId = Math.random().toString(36).substring(3);
|
|
56
|
-
const response = await fetch(`${
|
|
54
|
+
const response = await fetch(`${supabaseUrl}/functions/v1/llm-object`, {
|
|
57
55
|
method: 'POST',
|
|
58
56
|
body: JSON.stringify({
|
|
59
57
|
stream: true,
|
|
@@ -3,9 +3,10 @@ import { LanguageLevel } from "../utils/difficultyConverter";
|
|
|
3
3
|
|
|
4
4
|
type SettingsType = "user" | "system" | "plugin";
|
|
5
5
|
|
|
6
|
-
export interface
|
|
6
|
+
export interface UserInfo {
|
|
7
7
|
motherTongue: string;
|
|
8
8
|
languageLevel: LanguageLevel;
|
|
9
|
+
contextMenuOnSelect: boolean;
|
|
9
10
|
}
|
|
10
11
|
|
|
11
12
|
export interface SystemSettings {
|
|
@@ -44,6 +45,14 @@ export class SettingsController {
|
|
|
44
45
|
await this.supabase.from("plugin_settings").upsert({ plugin_id: this.pluginId, settings });
|
|
45
46
|
}
|
|
46
47
|
|
|
48
|
+
public async getUserInfo(): Promise<UserInfo> {
|
|
49
|
+
return this.getSettings<UserInfo>({
|
|
50
|
+
motherTongue: "sv",
|
|
51
|
+
languageLevel: "A1",
|
|
52
|
+
contextMenuOnSelect: true,
|
|
53
|
+
}, "user");
|
|
54
|
+
}
|
|
55
|
+
|
|
47
56
|
/**
|
|
48
57
|
* Get the settings for the plugin. T can be any type of settings, UserSettings or SystemSettings.
|
|
49
58
|
* @param defaultSettings The default settings to use if no settings are found.
|
|
@@ -24,14 +24,14 @@ export class SharedContentController {
|
|
|
24
24
|
filter?: { column: string, value: string | number | boolean },
|
|
25
25
|
): Promise<R[]> {
|
|
26
26
|
const queryParameter = { filter_column: filter?.column || null, filter_value: filter?.value || null, unread: true }
|
|
27
|
-
const { data: newAssignments } = await this.rimoriClient.rpc(type + "_entries", queryParameter)
|
|
27
|
+
const { data: newAssignments } = await this.rimoriClient.db.rpc(type + "_entries", queryParameter)
|
|
28
28
|
console.log('newAssignments:', newAssignments);
|
|
29
29
|
|
|
30
30
|
if ((newAssignments as any[]).length > 0) {
|
|
31
31
|
return newAssignments as R[];
|
|
32
32
|
}
|
|
33
33
|
// generate new assignments
|
|
34
|
-
const { data: oldAssignments } = await this.rimoriClient.rpc(type + "_entries", { ...queryParameter, unread: false })
|
|
34
|
+
const { data: oldAssignments } = await this.rimoriClient.db.rpc(type + "_entries", { ...queryParameter, unread: false })
|
|
35
35
|
console.log('oldAssignments:', oldAssignments);
|
|
36
36
|
const reservedTopics = this.getReservedTopics(oldAssignments as BasicAssignment[]);
|
|
37
37
|
|
|
@@ -39,7 +39,7 @@ export class SharedContentController {
|
|
|
39
39
|
if (!request.tool.keywords || !request.tool.topic) {
|
|
40
40
|
throw new Error("topic or keywords not found in the request schema");
|
|
41
41
|
}
|
|
42
|
-
const instructions = await this.rimoriClient.
|
|
42
|
+
const instructions = await this.rimoriClient.llm.getObject(request);
|
|
43
43
|
console.log('instructions:', instructions);
|
|
44
44
|
|
|
45
45
|
const preparedData = {
|
|
@@ -47,7 +47,7 @@ export class SharedContentController {
|
|
|
47
47
|
...instructions,
|
|
48
48
|
keywords: this.purifyStringArray(instructions.keywords),
|
|
49
49
|
};
|
|
50
|
-
return await this.rimoriClient.from(type).insert(preparedData).then(() => [preparedData] as R[]);
|
|
50
|
+
return await this.rimoriClient.db.from(type).insert(preparedData).then(() => [preparedData] as R[]);
|
|
51
51
|
}
|
|
52
52
|
|
|
53
53
|
private getReservedTopics(oldAssignments: BasicAssignment[]) {
|
|
@@ -62,10 +62,10 @@ export class SharedContentController {
|
|
|
62
62
|
}
|
|
63
63
|
|
|
64
64
|
public async getSharedContent<T extends BasicAssignment>(type: string, id: string): Promise<T> {
|
|
65
|
-
return await this.rimoriClient.from(type).select().eq('id', id).single() as unknown as T;
|
|
65
|
+
return await this.rimoriClient.db.from(type).select().eq('id', id).single() as unknown as T;
|
|
66
66
|
}
|
|
67
67
|
|
|
68
68
|
public async completeSharedContent(type: string, assignmentId: string) {
|
|
69
|
-
await this.rimoriClient.from(type + "_result").insert({ assignment_id: assignmentId });
|
|
69
|
+
await this.rimoriClient.db.from(type + "_result").insert({ assignment_id: assignmentId });
|
|
70
70
|
}
|
|
71
71
|
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { SupabaseClient } from '@supabase/supabase-js';
|
|
2
|
+
|
|
3
|
+
export interface Plugin {
|
|
4
|
+
id: string;
|
|
5
|
+
title: string;
|
|
6
|
+
icon_url: string;
|
|
7
|
+
website: string;
|
|
8
|
+
context_menu_actions: string;
|
|
9
|
+
plugin_pages: string;
|
|
10
|
+
sidebar_pages: string;
|
|
11
|
+
settings_page: string;
|
|
12
|
+
version: string;
|
|
13
|
+
external_hosted_url: string;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export async function getPlugins(supabase: SupabaseClient): Promise<Plugin[]> {
|
|
17
|
+
let { data, error } = await supabase.from('plugins').select('*');
|
|
18
|
+
|
|
19
|
+
if (error) {
|
|
20
|
+
console.error(error);
|
|
21
|
+
return [];
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return (data || []).map((plugin: any) => ({
|
|
25
|
+
id: plugin.id,
|
|
26
|
+
title: plugin.title,
|
|
27
|
+
icon_url: plugin.icon_url,
|
|
28
|
+
website: plugin.website,
|
|
29
|
+
context_menu_actions: plugin.context_menu_actions,
|
|
30
|
+
plugin_pages: plugin.plugin_pages,
|
|
31
|
+
sidebar_pages: plugin.sidebar_pages,
|
|
32
|
+
settings_page: plugin.settings_page,
|
|
33
|
+
version: plugin.version,
|
|
34
|
+
external_hosted_url: plugin.external_hosted_url,
|
|
35
|
+
}));
|
|
36
|
+
}
|
|
@@ -4,7 +4,7 @@ export async function getSTTResponse(supabase: SupabaseClient, audio: Blob) {
|
|
|
4
4
|
const formData = new FormData();
|
|
5
5
|
formData.append('file', audio);
|
|
6
6
|
|
|
7
|
-
return await supabase.functions.invoke('speech', { method: 'POST', body: formData }).then((
|
|
7
|
+
return await supabase.functions.invoke('speech', { method: 'POST', body: formData }).then(({ data }) => data.text);
|
|
8
8
|
}
|
|
9
9
|
|
|
10
10
|
export async function getTTSResponse(supabaseUrl: string, request: TTSRequest, token: string) {
|
package/src/core.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
// Core functionality exports
|
|
2
|
+
export * from "./controller/AIController";
|
|
3
|
+
export * from "./controller/SharedContentController";
|
|
4
|
+
export * from "./controller/SettingsController";
|
|
5
|
+
export * from "./plugin/RimoriClient";
|
|
6
|
+
export * from "./plugin/PluginController";
|
|
7
|
+
export * from "./utils/difficultyConverter";
|
|
8
|
+
export * from "./utils/PluginUtils";
|
|
9
|
+
export * from "./worker/WorkerSetup";
|
|
10
|
+
export * from "./plugin/fromRimori/EventBus";
|