@bytexbyte/nxtlinq-ai-agent-sdk 1.6.20 → 1.6.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/context/ChatBotContext.d.ts.map +1 -1
- package/dist/components/context/ChatBotContext.js +286 -1
- package/dist/components/types/ChatBotTypes.d.ts +8 -0
- package/dist/components/types/ChatBotTypes.d.ts.map +1 -1
- package/dist/components/ui/MessageInput.d.ts.map +1 -1
- package/dist/components/ui/MessageInput.js +11 -2
- package/dist/components/ui/MessageList.d.ts.map +1 -1
- package/dist/components/ui/MessageList.js +22 -2
- package/dist/components/ui/ModelSelector.d.ts.map +1 -1
- package/dist/components/ui/ModelSelector.js +14 -5
- package/dist/core/lib/textToSpeech.d.ts +22 -0
- package/dist/core/lib/textToSpeech.d.ts.map +1 -0
- package/dist/core/lib/textToSpeech.js +109 -0
- package/package.json +1 -1
- package/umd/nxtlinq-ai-agent.umd.js +138 -134
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;
|
|
1
|
+
{"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAU/B,OAAO,EAEL,kBAAkB,EAClB,YAAY,EAEb,MAAM,uBAAuB,CAAC;AAM/B,eAAO,MAAM,UAAU,0BAMtB,CAAC;AAEF,eAAO,MAAM,eAAe,EAAE,KAAK,CAAC,EAAE,CAAC,YAAY,CAk+ElD,CAAC"}
|
|
@@ -7,6 +7,7 @@ import { createNxtlinqApi, setApiHosts } from '../../api/nxtlinq-api';
|
|
|
7
7
|
import useLocalStorage from '../../core/lib/useLocalStorage';
|
|
8
8
|
import useSessionStorage from '../../core/lib/useSessionStorage';
|
|
9
9
|
import { useSpeechToTextFromMic } from '../../core/lib/useSpeechToTextFromMic';
|
|
10
|
+
import textToBuffer, { getDefaultSpeechToken } from '../../core/lib/textToSpeech';
|
|
10
11
|
import metakeepClient from '../../core/metakeepClient';
|
|
11
12
|
import { sleep } from '../../core/utils';
|
|
12
13
|
const MIC_ENABLED_SESSION_KEY = 'chatbot-mic-enabled';
|
|
@@ -72,6 +73,17 @@ isStopRecordingOnSend = false, }) => {
|
|
|
72
73
|
const [isAITEnabling, setIsAITEnabling] = React.useState(false);
|
|
73
74
|
const [isAwaitingMicGesture, setIsAwaitingMicGesture] = React.useState(false);
|
|
74
75
|
const [autoSendEnabled, setAutoSendEnabled] = useLocalStorage('chatbot-auto-send-enabled', true);
|
|
76
|
+
// Speech related state
|
|
77
|
+
const [textToSpeechEnabled, setTextToSpeechEnabled] = useLocalStorage('chatbot-text-to-speech-enabled', false);
|
|
78
|
+
const [speechingIndex, setSpeechingIndex] = React.useState(undefined);
|
|
79
|
+
// Speech related refs
|
|
80
|
+
const audioCtxRef = React.useRef(null);
|
|
81
|
+
const audioSourceRef = React.useRef(null);
|
|
82
|
+
const audioElementRef = React.useRef(null);
|
|
83
|
+
const speechingRef = React.useRef(false);
|
|
84
|
+
const [isTtsProcessing, setIsTtsProcessing] = React.useState(false);
|
|
85
|
+
const [requiresGesture, setRequiresGesture] = React.useState(false);
|
|
86
|
+
const pendingTtsRef = React.useRef(null);
|
|
75
87
|
// Use refs to get latest state values in hasPermission function
|
|
76
88
|
const hitAddressRef = React.useRef(hitAddress);
|
|
77
89
|
const aitRef = React.useRef(ait);
|
|
@@ -131,6 +143,183 @@ isStopRecordingOnSend = false, }) => {
|
|
|
131
143
|
console.error('Error clearing expired token:', error);
|
|
132
144
|
}
|
|
133
145
|
}, []);
|
|
146
|
+
// Internal: Play text-to-speech with retry mechanism
|
|
147
|
+
const playTextToSpeechWithRetry = React.useCallback(async (text, messageIndex, attempt = 0) => {
|
|
148
|
+
if (attempt > 2) {
|
|
149
|
+
console.error('TTS playback failed after retries');
|
|
150
|
+
setIsTtsProcessing(false);
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
const bufferData = await textToBuffer(text, getDefaultSpeechToken);
|
|
154
|
+
if (!bufferData)
|
|
155
|
+
throw new Error('Failed to get audio buffer');
|
|
156
|
+
const isMobileDevice = /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);
|
|
157
|
+
if (isMobileDevice) {
|
|
158
|
+
const blob = new Blob([bufferData], { type: 'audio/wav' });
|
|
159
|
+
const audioUrl = URL.createObjectURL(blob);
|
|
160
|
+
const audio = new Audio(audioUrl);
|
|
161
|
+
if ('setSinkId' in HTMLMediaElement.prototype) {
|
|
162
|
+
try {
|
|
163
|
+
const devices = await navigator.mediaDevices?.enumerateDevices?.();
|
|
164
|
+
const speakerDevice = devices?.find((d) => d.kind === 'audiooutput' && d.label.toLowerCase().includes('speaker'));
|
|
165
|
+
if (speakerDevice)
|
|
166
|
+
await audio.setSinkId(speakerDevice.deviceId);
|
|
167
|
+
else
|
|
168
|
+
await audio.setSinkId('default');
|
|
169
|
+
}
|
|
170
|
+
catch (err) {
|
|
171
|
+
console.debug('Could not set audio sink:', err);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
audio.volume = 1.0;
|
|
175
|
+
audioElementRef.current = audio;
|
|
176
|
+
speechingRef.current = true;
|
|
177
|
+
if (messageIndex !== undefined)
|
|
178
|
+
setSpeechingIndex(messageIndex);
|
|
179
|
+
// Track if onplay has been called to avoid clearing state multiple times
|
|
180
|
+
let playStarted = false;
|
|
181
|
+
const clearProcessingState = () => {
|
|
182
|
+
if (!playStarted) {
|
|
183
|
+
playStarted = true;
|
|
184
|
+
setIsTtsProcessing(false);
|
|
185
|
+
}
|
|
186
|
+
};
|
|
187
|
+
await new Promise(async (resolve, reject) => {
|
|
188
|
+
audio.onended = () => {
|
|
189
|
+
speechingRef.current = false;
|
|
190
|
+
setSpeechingIndex(undefined);
|
|
191
|
+
URL.revokeObjectURL(audioUrl);
|
|
192
|
+
audioElementRef.current = null;
|
|
193
|
+
resolve();
|
|
194
|
+
};
|
|
195
|
+
audio.onerror = (error) => {
|
|
196
|
+
speechingRef.current = false;
|
|
197
|
+
setSpeechingIndex(undefined);
|
|
198
|
+
URL.revokeObjectURL(audioUrl);
|
|
199
|
+
audioElementRef.current = null;
|
|
200
|
+
clearProcessingState();
|
|
201
|
+
reject(error);
|
|
202
|
+
};
|
|
203
|
+
audio.onplay = () => {
|
|
204
|
+
// Clear processing state when audio actually starts playing
|
|
205
|
+
setRequiresGesture(false);
|
|
206
|
+
clearProcessingState();
|
|
207
|
+
};
|
|
208
|
+
// Handle play() promise - some mobile browsers require user interaction
|
|
209
|
+
audio.play().then(() => {
|
|
210
|
+
// Play promise resolved, but wait for onplay event to confirm actual playback
|
|
211
|
+
// Don't clear requiresGesture here - let onplay handle it
|
|
212
|
+
setTimeout(clearProcessingState, 100);
|
|
213
|
+
}).catch((playError) => {
|
|
214
|
+
// Mark requires gesture and store pending text
|
|
215
|
+
setRequiresGesture(true);
|
|
216
|
+
pendingTtsRef.current = text;
|
|
217
|
+
// Fallback: try WebAudio (AudioContext) playback on mobile
|
|
218
|
+
try {
|
|
219
|
+
if (audioCtxRef.current === null) {
|
|
220
|
+
audioCtxRef.current = new AudioContext();
|
|
221
|
+
}
|
|
222
|
+
audioSourceRef.current = audioCtxRef.current.createBufferSource();
|
|
223
|
+
if (!audioSourceRef.current) {
|
|
224
|
+
throw playError;
|
|
225
|
+
}
|
|
226
|
+
audioCtxRef.current.decodeAudioData(bufferData.slice(0), (decodedBuffer) => {
|
|
227
|
+
if (!audioSourceRef.current) {
|
|
228
|
+
reject(playError);
|
|
229
|
+
return;
|
|
230
|
+
}
|
|
231
|
+
audioSourceRef.current.buffer = decodedBuffer;
|
|
232
|
+
audioSourceRef.current.connect(audioCtxRef.current.destination);
|
|
233
|
+
speechingRef.current = true;
|
|
234
|
+
clearProcessingState();
|
|
235
|
+
audioSourceRef.current.onended = () => {
|
|
236
|
+
speechingRef.current = false;
|
|
237
|
+
setSpeechingIndex(undefined);
|
|
238
|
+
audioSourceRef.current = null;
|
|
239
|
+
// cleanup audio element blob URL
|
|
240
|
+
URL.revokeObjectURL(audioUrl);
|
|
241
|
+
audioElementRef.current = null;
|
|
242
|
+
resolve();
|
|
243
|
+
};
|
|
244
|
+
try {
|
|
245
|
+
// Ensure AudioContext is running before starting
|
|
246
|
+
if (audioCtxRef.current?.state === 'suspended') {
|
|
247
|
+
audioCtxRef.current.resume().then(() => {
|
|
248
|
+
audioSourceRef.current?.start();
|
|
249
|
+
// Only clear requiresGesture after successfully starting
|
|
250
|
+
setRequiresGesture(false);
|
|
251
|
+
}).catch((resumeErr) => {
|
|
252
|
+
// AudioContext resume failed, keep requiresGesture true
|
|
253
|
+
reject(resumeErr);
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
else {
|
|
257
|
+
audioSourceRef.current.start();
|
|
258
|
+
// Only clear requiresGesture after successfully starting
|
|
259
|
+
setRequiresGesture(false);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
catch (e) {
|
|
263
|
+
// start() failed, keep requiresGesture true
|
|
264
|
+
reject(e);
|
|
265
|
+
}
|
|
266
|
+
}, (decodeErr) => {
|
|
267
|
+
// If decode also fails, give up
|
|
268
|
+
speechingRef.current = false;
|
|
269
|
+
setSpeechingIndex(undefined);
|
|
270
|
+
URL.revokeObjectURL(audioUrl);
|
|
271
|
+
audioElementRef.current = null;
|
|
272
|
+
clearProcessingState();
|
|
273
|
+
reject(decodeErr);
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
catch (fallbackErr) {
|
|
277
|
+
// Play failed (e.g., user interaction required)
|
|
278
|
+
speechingRef.current = false;
|
|
279
|
+
setSpeechingIndex(undefined);
|
|
280
|
+
URL.revokeObjectURL(audioUrl);
|
|
281
|
+
audioElementRef.current = null;
|
|
282
|
+
clearProcessingState();
|
|
283
|
+
reject(fallbackErr);
|
|
284
|
+
}
|
|
285
|
+
});
|
|
286
|
+
});
|
|
287
|
+
return;
|
|
288
|
+
}
|
|
289
|
+
if (audioCtxRef.current === null) {
|
|
290
|
+
audioCtxRef.current = new AudioContext();
|
|
291
|
+
}
|
|
292
|
+
audioSourceRef.current = audioCtxRef.current.createBufferSource();
|
|
293
|
+
if (!audioSourceRef.current)
|
|
294
|
+
return;
|
|
295
|
+
const buffer = await new Promise((resolve, reject) => {
|
|
296
|
+
if (audioCtxRef.current !== null && bufferData) {
|
|
297
|
+
audioCtxRef.current.decodeAudioData(bufferData, (decodedBuffer) => resolve(decodedBuffer), (error) => reject(error));
|
|
298
|
+
}
|
|
299
|
+
else {
|
|
300
|
+
reject(new Error('Audio context or buffer data is missing'));
|
|
301
|
+
}
|
|
302
|
+
});
|
|
303
|
+
if (!audioSourceRef.current.buffer) {
|
|
304
|
+
audioSourceRef.current.buffer = buffer;
|
|
305
|
+
audioSourceRef.current.connect(audioCtxRef.current.destination);
|
|
306
|
+
speechingRef.current = true;
|
|
307
|
+
if (messageIndex !== undefined)
|
|
308
|
+
setSpeechingIndex(messageIndex);
|
|
309
|
+
// Clear processing state when audio starts playing
|
|
310
|
+
setRequiresGesture(false);
|
|
311
|
+
setIsTtsProcessing(false);
|
|
312
|
+
await new Promise((resolve) => {
|
|
313
|
+
audioSourceRef.current.onended = () => {
|
|
314
|
+
speechingRef.current = false;
|
|
315
|
+
setSpeechingIndex(undefined);
|
|
316
|
+
audioSourceRef.current = null;
|
|
317
|
+
resolve();
|
|
318
|
+
};
|
|
319
|
+
audioSourceRef.current.start();
|
|
320
|
+
});
|
|
321
|
+
}
|
|
322
|
+
}, []);
|
|
134
323
|
// Update refs when state changes
|
|
135
324
|
React.useEffect(() => {
|
|
136
325
|
hitAddressRef.current = hitAddress;
|
|
@@ -832,7 +1021,6 @@ isStopRecordingOnSend = false, }) => {
|
|
|
832
1021
|
}
|
|
833
1022
|
try {
|
|
834
1023
|
const payload = JSON.parse(atob(currentToken.split('.')[1]));
|
|
835
|
-
// Token expiration check is handled by backend, so we only check address match
|
|
836
1024
|
const address = payload.address;
|
|
837
1025
|
if (address !== currentHitAddress) {
|
|
838
1026
|
setNxtlinqAITServiceAccessToken('');
|
|
@@ -1386,6 +1574,14 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1386
1574
|
window.location.href = redirectUrl;
|
|
1387
1575
|
}, 100);
|
|
1388
1576
|
}
|
|
1577
|
+
// Play text-to-speech for AI response if enabled
|
|
1578
|
+
if (botResponse && botResponse.role === 'assistant' && textToSpeechEnabled) {
|
|
1579
|
+
setTimeout(() => {
|
|
1580
|
+
playTextToSpeech(botResponse.content).catch(error => {
|
|
1581
|
+
console.error('Failed to play text-to-speech:', error);
|
|
1582
|
+
});
|
|
1583
|
+
}, 100);
|
|
1584
|
+
}
|
|
1389
1585
|
}
|
|
1390
1586
|
catch (error) {
|
|
1391
1587
|
console.error('Failed to send message:', error);
|
|
@@ -1450,6 +1646,8 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1450
1646
|
return;
|
|
1451
1647
|
if (!textInputRef.current.value.trim() || isLoading)
|
|
1452
1648
|
return;
|
|
1649
|
+
// Stop current speech playback when user sends new message
|
|
1650
|
+
stopTextToSpeechAndReset();
|
|
1453
1651
|
// Show loading message if AIT is still loading
|
|
1454
1652
|
if (isAITLoading) {
|
|
1455
1653
|
const loadingMessage = {
|
|
@@ -1486,6 +1684,62 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1486
1684
|
setMessages(prev => [...prev, errorMessage]);
|
|
1487
1685
|
}
|
|
1488
1686
|
};
|
|
1687
|
+
// Stop text-to-speech (does not clear queue)
|
|
1688
|
+
const stopTextToSpeech = React.useCallback(() => {
|
|
1689
|
+
if (speechingRef.current) {
|
|
1690
|
+
// Stop AudioContext source if exists
|
|
1691
|
+
if (audioSourceRef.current) {
|
|
1692
|
+
audioSourceRef.current.stop();
|
|
1693
|
+
audioSourceRef.current.disconnect();
|
|
1694
|
+
audioSourceRef.current = null;
|
|
1695
|
+
}
|
|
1696
|
+
// Stop HTML Audio element if exists
|
|
1697
|
+
if (audioElementRef.current) {
|
|
1698
|
+
audioElementRef.current.pause();
|
|
1699
|
+
audioElementRef.current.currentTime = 0;
|
|
1700
|
+
if (audioElementRef.current.src.startsWith('blob:')) {
|
|
1701
|
+
URL.revokeObjectURL(audioElementRef.current.src);
|
|
1702
|
+
}
|
|
1703
|
+
audioElementRef.current = null;
|
|
1704
|
+
}
|
|
1705
|
+
speechingRef.current = false;
|
|
1706
|
+
}
|
|
1707
|
+
setSpeechingIndex(undefined);
|
|
1708
|
+
}, []);
|
|
1709
|
+
// Stop text-to-speech (used when user sends new message)
|
|
1710
|
+
const stopTextToSpeechAndReset = React.useCallback(() => {
|
|
1711
|
+
stopTextToSpeech();
|
|
1712
|
+
setRequiresGesture(false);
|
|
1713
|
+
setIsTtsProcessing(false);
|
|
1714
|
+
pendingTtsRef.current = null;
|
|
1715
|
+
}, [stopTextToSpeech]);
|
|
1716
|
+
// Play text-to-speech (simplified, no queue)
|
|
1717
|
+
const playTextToSpeech = React.useCallback(async (text, messageIndex) => {
|
|
1718
|
+
if (!textToSpeechEnabled || !text.trim())
|
|
1719
|
+
return;
|
|
1720
|
+
// Stop any ongoing speech
|
|
1721
|
+
stopTextToSpeech();
|
|
1722
|
+
// Show processing indicator
|
|
1723
|
+
setIsTtsProcessing(true);
|
|
1724
|
+
// Retry up to 2 times (3 attempts total)
|
|
1725
|
+
let lastError;
|
|
1726
|
+
for (let attempt = 0; attempt < 3; attempt++) {
|
|
1727
|
+
try {
|
|
1728
|
+
await playTextToSpeechWithRetry(text, messageIndex, attempt);
|
|
1729
|
+
return; // Success, exit
|
|
1730
|
+
}
|
|
1731
|
+
catch (err) {
|
|
1732
|
+
lastError = err;
|
|
1733
|
+
if (attempt < 2) {
|
|
1734
|
+
// Continue to next attempt
|
|
1735
|
+
continue;
|
|
1736
|
+
}
|
|
1737
|
+
}
|
|
1738
|
+
}
|
|
1739
|
+
// All attempts failed
|
|
1740
|
+
console.error('TTS playback failed after retries:', lastError);
|
|
1741
|
+
setIsTtsProcessing(false);
|
|
1742
|
+
}, [textToSpeechEnabled, stopTextToSpeech, playTextToSpeechWithRetry]);
|
|
1489
1743
|
// Handle preset message
|
|
1490
1744
|
const handlePresetMessage = (message) => {
|
|
1491
1745
|
if (message.autoSend) {
|
|
@@ -1974,6 +2228,11 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1974
2228
|
transcript,
|
|
1975
2229
|
textInputRef,
|
|
1976
2230
|
autoSendEnabled,
|
|
2231
|
+
// Speech related state
|
|
2232
|
+
textToSpeechEnabled,
|
|
2233
|
+
speechingIndex,
|
|
2234
|
+
isTtsProcessing,
|
|
2235
|
+
requiresGesture,
|
|
1977
2236
|
// AI Model related state
|
|
1978
2237
|
availableModels: effectiveAvailableModels,
|
|
1979
2238
|
selectedModelIndex,
|
|
@@ -1993,6 +2252,7 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1993
2252
|
setSelectedModelIndex,
|
|
1994
2253
|
setSuggestions,
|
|
1995
2254
|
setAutoSendEnabled,
|
|
2255
|
+
setTextToSpeechEnabled,
|
|
1996
2256
|
// Functions
|
|
1997
2257
|
connectWallet,
|
|
1998
2258
|
signInWallet,
|
|
@@ -2013,6 +2273,31 @@ isStopRecordingOnSend = false, }) => {
|
|
|
2013
2273
|
// AI Model related functions
|
|
2014
2274
|
handleModelChange,
|
|
2015
2275
|
getCurrentModel,
|
|
2276
|
+
// Speech related functions
|
|
2277
|
+
playTextToSpeech,
|
|
2278
|
+
stopTextToSpeech,
|
|
2279
|
+
retryTtsWithGesture: async () => {
|
|
2280
|
+
try {
|
|
2281
|
+
setRequiresGesture(false);
|
|
2282
|
+
// Try to resume audio context
|
|
2283
|
+
if (audioCtxRef.current) {
|
|
2284
|
+
try {
|
|
2285
|
+
await audioCtxRef.current.resume();
|
|
2286
|
+
}
|
|
2287
|
+
catch { }
|
|
2288
|
+
}
|
|
2289
|
+
const pending = pendingTtsRef.current;
|
|
2290
|
+
if (pending && textToSpeechEnabled) {
|
|
2291
|
+
setIsTtsProcessing(true);
|
|
2292
|
+
await playTextToSpeech(pending);
|
|
2293
|
+
pendingTtsRef.current = null;
|
|
2294
|
+
}
|
|
2295
|
+
}
|
|
2296
|
+
catch (err) {
|
|
2297
|
+
console.error('Retry TTS with gesture failed:', err);
|
|
2298
|
+
setIsTtsProcessing(false);
|
|
2299
|
+
}
|
|
2300
|
+
},
|
|
2016
2301
|
// Additional properties for PermissionForm
|
|
2017
2302
|
onSave: savePermissions,
|
|
2018
2303
|
onConnectWallet: () => connectWallet(false),
|
|
@@ -86,6 +86,10 @@ export interface ChatBotContextType {
|
|
|
86
86
|
transcript: string;
|
|
87
87
|
textInputRef: React.RefObject<HTMLInputElement>;
|
|
88
88
|
autoSendEnabled: boolean;
|
|
89
|
+
textToSpeechEnabled: boolean;
|
|
90
|
+
speechingIndex: number | undefined;
|
|
91
|
+
isTtsProcessing: boolean;
|
|
92
|
+
requiresGesture?: boolean;
|
|
89
93
|
availableModels: AIModel[];
|
|
90
94
|
selectedModelIndex: number;
|
|
91
95
|
showModelSelector: boolean;
|
|
@@ -101,6 +105,7 @@ export interface ChatBotContextType {
|
|
|
101
105
|
setSelectedModelIndex: (index: number) => void;
|
|
102
106
|
setSuggestions: (suggestions: PresetMessage[]) => void;
|
|
103
107
|
setAutoSendEnabled: (enabled: boolean) => void;
|
|
108
|
+
setTextToSpeechEnabled: (enabled: boolean) => void;
|
|
104
109
|
connectWallet: (autoShowSignInMessage?: boolean) => Promise<string | false | undefined>;
|
|
105
110
|
signInWallet: (autoShowSuccessMessage?: boolean) => Promise<void>;
|
|
106
111
|
sendMessage: (content: string, retryCount?: number) => Promise<void>;
|
|
@@ -119,6 +124,9 @@ export interface ChatBotContextType {
|
|
|
119
124
|
clearRecording: () => void;
|
|
120
125
|
handleModelChange: (modelIndex: number) => void;
|
|
121
126
|
getCurrentModel: () => AIModel;
|
|
127
|
+
playTextToSpeech: (text: string) => Promise<void>;
|
|
128
|
+
stopTextToSpeech: () => void;
|
|
129
|
+
retryTtsWithGesture?: () => Promise<void>;
|
|
122
130
|
onSave: (newPermissions?: string[]) => Promise<void>;
|
|
123
131
|
onConnectWallet: () => Promise<string | false | undefined>;
|
|
124
132
|
onSignIn: () => Promise<void>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatBotTypes.d.ts","sourceRoot":"","sources":["../../../src/components/types/ChatBotTypes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B;AAED,MAAM,WAAW,QAAQ;IACvB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAC/B,QAAQ,CAAC,EAAE,QAAQ,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAGD,MAAM,WAAW,OAAO;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;CACf;AACD,MAAM,WAAW,YAAY;IAC3B,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IACvC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IACjC,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IAC1D,cAAc,CAAC,EAAE,aAAa,EAAE,CAAC;IACjC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;IACvC,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;QAC7B,KAAK,EAAE,MAAM,CAAC;KACf,GAAG,SAAS,CAAC,CAAC;IACf,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;IAE3B,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,IAAI,CAAC;IAEzC,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAE5B,4BAA4B,CAAC,EAAE,OAAO,CAAC;IACvC,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAE9B,cAAc,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAErC,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB,uBAAuB,CAAC,EAAE,MAAM,CAAC;IAEjC,YAAY,CAAC,EAAE,UAAU,GAAG,UAAU,CAAC;IACvC,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,kBAAkB;IAEjC,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,OAAO,CAAC;IACnB,MAAM,EAAE,OAAO,CAAC;IAChB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC;IAChB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,oBAAoB,EAAE,iBAAiB,EAAE,CAAC;IAC1C,kBAAkB,EAAE,OAAO,CAAC;IAC5B,oBAAoB,EAAE,OAAO,CAAC;IAC9B,YAAY,EAAE,OAAO,CAAC;IACtB,aAAa,EAAE,OAAO,CAAC;IACvB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,GAAG,CAAC;IAChB,eAAe,EAAE,OAAO,CAAC;IACzB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,YAAY,EAAE;QACZ,IAAI,EAAE,OAAO,CAAC;QACd,IAAI,EAAE,SAAS,GAAG,OAAO,GAAG,SAAS,GAAG,MAAM,CAAC;QAC/C,OAAO,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,CAAC;IACF,YAAY,EAAE,OAAO,CAAC;IACtB,oBAAoB,EAAE,OAAO,CAAC;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,KAAK,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAC;IAChD,eAAe,EAAE,OAAO,CAAC;IAEzB,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,kBAAkB,EAAE,MAAM,CAAC;IAC3B,iBAAiB,EAAE,OAAO,CAAC;IAC3B,WAAW,EAAE,aAAa,EAAE,CAAC;IAG7B,aAAa,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,SAAS,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACnC,qBAAqB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,uBAAuB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACjD,cAAc,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,KAAK,IAAI,CAAC;IAChD,aAAa,EAAE,CAAC,QAAQ,EAAE,OAAO,KAAK,IAAI,CAAC;IAC3C,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,eAAe,EAAE,CAAC,YAAY,EAAE,GAAG,KAAK,IAAI,CAAC;IAE7C,qBAAqB,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAC/C,cAAc,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,IAAI,CAAC;IACvD,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;
|
|
1
|
+
{"version":3,"file":"ChatBotTypes.d.ts","sourceRoot":"","sources":["../../../src/components/types/ChatBotTypes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B;AAED,MAAM,WAAW,QAAQ;IACvB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAC/B,QAAQ,CAAC,EAAE,QAAQ,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAGD,MAAM,WAAW,OAAO;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;CACf;AACD,MAAM,WAAW,YAAY;IAC3B,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IACvC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IACjC,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IAC1D,cAAc,CAAC,EAAE,aAAa,EAAE,CAAC;IACjC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;IACvC,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;QAC7B,KAAK,EAAE,MAAM,CAAC;KACf,GAAG,SAAS,CAAC,CAAC;IACf,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;IAE3B,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,IAAI,CAAC;IAEzC,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAE5B,4BAA4B,CAAC,EAAE,OAAO,CAAC;IACvC,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAE9B,cAAc,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAErC,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB,uBAAuB,CAAC,EAAE,MAAM,CAAC;IAEjC,YAAY,CAAC,EAAE,UAAU,GAAG,UAAU,CAAC;IACvC,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,kBAAkB;IAEjC,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,OAAO,CAAC;IACnB,MAAM,EAAE,OAAO,CAAC;IAChB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC;IAChB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,oBAAoB,EAAE,iBAAiB,EAAE,CAAC;IAC1C,kBAAkB,EAAE,OAAO,CAAC;IAC5B,oBAAoB,EAAE,OAAO,CAAC;IAC9B,YAAY,EAAE,OAAO,CAAC;IACtB,aAAa,EAAE,OAAO,CAAC;IACvB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,GAAG,CAAC;IAChB,eAAe,EAAE,OAAO,CAAC;IACzB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,YAAY,EAAE;QACZ,IAAI,EAAE,OAAO,CAAC;QACd,IAAI,EAAE,SAAS,GAAG,OAAO,GAAG,SAAS,GAAG,MAAM,CAAC;QAC/C,OAAO,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,CAAC;IACF,YAAY,EAAE,OAAO,CAAC;IACtB,oBAAoB,EAAE,OAAO,CAAC;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,KAAK,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAC;IAChD,eAAe,EAAE,OAAO,CAAC;IAEzB,mBAAmB,EAAE,OAAO,CAAC;IAC7B,cAAc,EAAE,MAAM,GAAG,SAAS,CAAC;IACnC,eAAe,EAAE,OAAO,CAAC;IACzB,eAAe,CAAC,EAAE,OAAO,CAAC;IAE1B,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,kBAAkB,EAAE,MAAM,CAAC;IAC3B,iBAAiB,EAAE,OAAO,CAAC;IAC3B,WAAW,EAAE,aAAa,EAAE,CAAC;IAG7B,aAAa,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,SAAS,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACnC,qBAAqB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,uBAAuB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACjD,cAAc,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,KAAK,IAAI,CAAC;IAChD,aAAa,EAAE,CAAC,QAAQ,EAAE,OAAO,KAAK,IAAI,CAAC;IAC3C,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,eAAe,EAAE,CAAC,YAAY,EAAE,GAAG,KAAK,IAAI,CAAC;IAE7C,qBAAqB,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAC/C,cAAc,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,IAAI,CAAC;IACvD,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,sBAAsB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAGnD,aAAa,EAAE,CAAC,qBAAqB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,GAAG,KAAK,GAAG,SAAS,CAAC,CAAC;IACxF,YAAY,EAAE,CAAC,sBAAsB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAClE,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACrE,YAAY,EAAE,CAAC,CAAC,EAAE,KAAK,CAAC,SAAS,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACpD,mBAAmB,EAAE,CAAC,OAAO,EAAE,aAAa,KAAK,IAAI,CAAC;IACtD,eAAe,EAAE,CAAC,cAAc,CAAC,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC9D,SAAS,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC;IAClD,uBAAuB,EAAE,CAAC,MAAM,EAAE,UAAU,GAAG,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC1E,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,QAAQ,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,UAAU,EAAE,CAAC,sBAAsB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAChE,cAAc,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IACpC,aAAa,EAAE,MAAM,IAAI,CAAC;IAC1B,cAAc,EAAE,MAAM,IAAI,CAAC;IAE3B,iBAAiB,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IAChD,eAAe,EAAE,MAAM,OAAO,CAAC;IAE/B,gBAAgB,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAClD,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,mBAAmB,CAAC,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAG1C,MAAM,EAAE,CAAC,cAAc,CAAC,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACrD,eAAe,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,KAAK,GAAG,SAAS,CAAC,CAAC;IAC3D,QAAQ,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC9B,sBAAsB,EAAE,OAAO,CAAC;IAChC,cAAc,EAAE,CAAC,MAAM,EAAE,UAAU,GAAG,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,MAAM,CAAC;IAGzB,KAAK,EAAE,YAAY,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;CACpB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MessageInput.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageInput.tsx"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"MessageInput.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageInput.tsx"],"names":[],"mappings":"AAQA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAI/B,eAAO,MAAM,YAAY,EAAE,KAAK,CAAC,EA0LhC,CAAC"}
|
|
@@ -4,11 +4,13 @@ import { css } from '@emotion/react';
|
|
|
4
4
|
import MicIcon from '@mui/icons-material/Mic';
|
|
5
5
|
import MicOffIcon from '@mui/icons-material/MicOff';
|
|
6
6
|
import SendIcon from '@mui/icons-material/Send';
|
|
7
|
+
import VolumeUpIcon from '@mui/icons-material/VolumeUp';
|
|
8
|
+
import VolumeOffIcon from '@mui/icons-material/VolumeOff';
|
|
7
9
|
import { IconButton, InputBase, Tooltip } from '@mui/material';
|
|
8
10
|
import { useChatBot } from '../context/ChatBotContext';
|
|
9
11
|
import { actionButton } from './styles/isolatedStyles';
|
|
10
12
|
export const MessageInput = () => {
|
|
11
|
-
const { inputValue, setInputValue, isLoading, isAITLoading, handleSubmit, isMicEnabled, isAwaitingMicGesture, startRecording, stopRecording, textInputRef, autoSendEnabled, setAutoSendEnabled, props: { placeholder = 'Type a message...' } } = useChatBot();
|
|
13
|
+
const { inputValue, setInputValue, isLoading, isAITLoading, handleSubmit, isMicEnabled, isAwaitingMicGesture, startRecording, stopRecording, textInputRef, autoSendEnabled, setAutoSendEnabled, textToSpeechEnabled, setTextToSpeechEnabled, props: { placeholder = 'Type a message...' } } = useChatBot();
|
|
12
14
|
const isDisabled = isLoading || isAITLoading;
|
|
13
15
|
const inputPlaceholder = isAITLoading ? 'Loading wallet configuration...' : placeholder;
|
|
14
16
|
const handleKeyPress = (e) => {
|
|
@@ -39,7 +41,14 @@ export const MessageInput = () => {
|
|
|
39
41
|
border-top: 1px solid #eee !important;
|
|
40
42
|
`, children: [_jsx(InputBase, { value: inputValue, onChange: (e) => setInputValue(e.target.value), onKeyPress: handleKeyPress, placeholder: inputPlaceholder, fullWidth: true, inputProps: {
|
|
41
43
|
ref: textInputRef
|
|
42
|
-
}, endAdornment: _jsxs(_Fragment, { children: [
|
|
44
|
+
}, endAdornment: _jsxs(_Fragment, { children: [_jsx(Tooltip, { title: textToSpeechEnabled ? 'Text-to-speech enabled' : 'Text-to-speech disabled', children: _jsx(IconButton, { size: "small", onClick: (e) => {
|
|
45
|
+
e.stopPropagation();
|
|
46
|
+
setTextToSpeechEnabled(!textToSpeechEnabled);
|
|
47
|
+
}, css: css `
|
|
48
|
+
padding: 4px !important;
|
|
49
|
+
margin-right: 4px !important;
|
|
50
|
+
color: ${textToSpeechEnabled ? '#1976d2' : '#9e9e9e'} !important;
|
|
51
|
+
`, children: textToSpeechEnabled ? _jsx(VolumeUpIcon, { fontSize: "small" }) : _jsx(VolumeOffIcon, { fontSize: "small" }) }) }), isMicEnabled && (_jsx(Tooltip, { title: autoSendEnabled ? 'Auto send enabled' : 'Auto send disabled', children: _jsx(IconButton, { size: "small", onClick: (e) => {
|
|
43
52
|
e.stopPropagation();
|
|
44
53
|
setAutoSendEnabled(!autoSendEnabled);
|
|
45
54
|
}, css: css `
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MessageList.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageList.tsx"],"names":[],"mappings":"AAAA,sCAAsC;AACtC,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAoB/B,eAAO,MAAM,WAAW,EAAE,KAAK,CAAC,
|
|
1
|
+
{"version":3,"file":"MessageList.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageList.tsx"],"names":[],"mappings":"AAAA,sCAAsC;AACtC,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAoB/B,eAAO,MAAM,WAAW,EAAE,KAAK,CAAC,EA2K/B,CAAC"}
|
|
@@ -6,7 +6,7 @@ import { convertUrlsToLinks } from '../../core/utils/urlUtils';
|
|
|
6
6
|
import { useChatBot } from '../context/ChatBotContext';
|
|
7
7
|
import { messageListContainer, messageBubble, userMessage, messageContent, userMessageContent, retryMessageContent, chatbotButton, connectedButton, loadingIndicator, modelIndicator, modelBadge, modelDot } from './styles/isolatedStyles';
|
|
8
8
|
export const MessageList = () => {
|
|
9
|
-
const { messages, isLoading, connectWallet, signInWallet, hitAddress, isAutoConnecting, isNeedSignInWithWallet, enableAIT, isAITLoading, isAITEnabling, sendMessage, permissions, availableModels } = useChatBot();
|
|
9
|
+
const { messages, isLoading, isTtsProcessing, requiresGesture, retryTtsWithGesture, connectWallet, signInWallet, hitAddress, isAutoConnecting, isNeedSignInWithWallet, enableAIT, isAITLoading, isAITEnabling, sendMessage, permissions, availableModels, serviceId } = useChatBot();
|
|
10
10
|
const messagesEndRef = React.useRef(null);
|
|
11
11
|
const scrollToBottom = () => {
|
|
12
12
|
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
|
|
@@ -47,6 +47,10 @@ export const MessageList = () => {
|
|
|
47
47
|
const getModelDisplayName = (modelValue) => {
|
|
48
48
|
if (!modelValue)
|
|
49
49
|
return '';
|
|
50
|
+
// Special case: show "Adi" for specific serviceId
|
|
51
|
+
if (serviceId === 'e48fc2b9-a7d1-49e3-85cb-9d621a0bf774') {
|
|
52
|
+
return 'Adi';
|
|
53
|
+
}
|
|
50
54
|
// Find model in the list returned by API
|
|
51
55
|
const model = availableModels.find(m => m.value === modelValue);
|
|
52
56
|
// Use label from API or fallback to value
|
|
@@ -75,5 +79,21 @@ export const MessageList = () => {
|
|
|
75
79
|
message.button === 'enableAIT' ?
|
|
76
80
|
((isAITLoading || isAITEnabling) ? 'Enabling...' :
|
|
77
81
|
(message.metadata?.requiredPermission && permissions.includes(message.metadata.requiredPermission)) ? 'AIT Enabled' : 'Enable AIT Permissions') :
|
|
78
|
-
message.button }) }))] }), message.role === 'assistant' && message.metadata?.model && (
|
|
82
|
+
message.button }) }))] }), message.role === 'assistant' && message.metadata?.model && (_jsxs("div", { css: css `
|
|
83
|
+
${modelIndicator}
|
|
84
|
+
gap: 8px !important;
|
|
85
|
+
`, children: [_jsxs("div", { css: modelBadge, children: [_jsx("span", { css: modelDot }), getModelDisplayName(message.metadata.model)] }), (isTtsProcessing || requiresGesture) && !isLoading && message.id === messages[messages.length - 1]?.id && (_jsxs("div", { css: css `
|
|
86
|
+
display: flex !important;
|
|
87
|
+
align-items: center !important;
|
|
88
|
+
font-size: 12px !important;
|
|
89
|
+
color: #666 !important;
|
|
90
|
+
gap: 4px !important;
|
|
91
|
+
`, children: [_jsx("span", { role: "img", "aria-hidden": "true", children: "\uD83D\uDD0A" }), _jsx("span", { children: requiresGesture ? (_jsx("button", { onClick: () => retryTtsWithGesture && retryTtsWithGesture(), css: css `
|
|
92
|
+
background: none !important;
|
|
93
|
+
border: none !important;
|
|
94
|
+
color: #1976d2 !important;
|
|
95
|
+
padding: 0 !important;
|
|
96
|
+
text-decoration: underline !important;
|
|
97
|
+
cursor: pointer !important;
|
|
98
|
+
`, children: "Tap to play voice" })) : 'Preparing voice reply...' })] }))] }))] }, message.id))), isLoading && (_jsx("div", { css: loadingIndicator, children: "Thinking..." })), _jsx("div", { ref: messagesEndRef })] }));
|
|
79
99
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ModelSelector.d.ts","sourceRoot":"","sources":["../../../src/components/ui/ModelSelector.tsx"],"names":[],"mappings":"AAAA,sCAAsC;AACtC,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAI/B,eAAO,MAAM,aAAa,EAAE,KAAK,CAAC,
|
|
1
|
+
{"version":3,"file":"ModelSelector.d.ts","sourceRoot":"","sources":["../../../src/components/ui/ModelSelector.tsx"],"names":[],"mappings":"AAAA,sCAAsC;AACtC,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAI/B,eAAO,MAAM,aAAa,EAAE,KAAK,CAAC,EAkJjC,CAAC"}
|
|
@@ -4,11 +4,20 @@ import * as React from 'react';
|
|
|
4
4
|
import { css } from '@emotion/react';
|
|
5
5
|
import { useChatBot } from '../context/ChatBotContext';
|
|
6
6
|
export const ModelSelector = () => {
|
|
7
|
-
const { availableModels, selectedModelIndex, handleModelChange } = useChatBot();
|
|
7
|
+
const { availableModels, selectedModelIndex, handleModelChange, serviceId } = useChatBot();
|
|
8
8
|
// Safety check: ensure selectedModelIndex is within bounds
|
|
9
9
|
const safeSelectedModelIndex = selectedModelIndex >= availableModels.length ? 0 : selectedModelIndex;
|
|
10
10
|
// Get fallback model label (first available model or 'Unknown')
|
|
11
|
-
|
|
11
|
+
// Special case: show "Adi" for specific serviceId
|
|
12
|
+
const getDisplayLabel = (label) => {
|
|
13
|
+
if (serviceId === 'e48fc2b9-a7d1-49e3-85cb-9d621a0bf774') {
|
|
14
|
+
return 'Adi';
|
|
15
|
+
}
|
|
16
|
+
return label;
|
|
17
|
+
};
|
|
18
|
+
const fallbackModelLabel = availableModels.length > 0
|
|
19
|
+
? getDisplayLabel(availableModels[0].label)
|
|
20
|
+
: 'Unknown';
|
|
12
21
|
const [anchorEl, setAnchorEl] = React.useState(null);
|
|
13
22
|
const open = Boolean(anchorEl);
|
|
14
23
|
const handleClick = (event) => {
|
|
@@ -35,7 +44,7 @@ export const ModelSelector = () => {
|
|
|
35
44
|
background-color: rgba(255, 255, 255, 0.1) !important;
|
|
36
45
|
font-size: 12px !important;
|
|
37
46
|
font-weight: 500 !important;
|
|
38
|
-
`, children: _jsx("span", { children: availableModels[0].label }) }));
|
|
47
|
+
`, children: _jsx("span", { children: getDisplayLabel(availableModels[0].label) }) }));
|
|
39
48
|
}
|
|
40
49
|
// If 2+ models, show full selector with dropdown
|
|
41
50
|
return (_jsxs("div", { css: css `
|
|
@@ -56,7 +65,7 @@ export const ModelSelector = () => {
|
|
|
56
65
|
&:hover {
|
|
57
66
|
background-color: rgba(255, 255, 255, 0.2) !important;
|
|
58
67
|
}
|
|
59
|
-
`, onClick: handleClick, title: "Change AI Model", children: [_jsx("span", { css: css `margin-right: 4px !important;`, children: availableModels[safeSelectedModelIndex]?.label || fallbackModelLabel }), _jsx("span", { css: css `font-size: 10px !important;`, children: "\u25BC" })] }), open && (_jsx("div", { css: css `
|
|
68
|
+
`, onClick: handleClick, title: "Change AI Model", children: [_jsx("span", { css: css `margin-right: 4px !important;`, children: getDisplayLabel(availableModels[safeSelectedModelIndex]?.label || fallbackModelLabel) }), _jsx("span", { css: css `font-size: 10px !important;`, children: "\u25BC" })] }), open && (_jsx("div", { css: css `
|
|
60
69
|
position: absolute !important;
|
|
61
70
|
top: 100% !important;
|
|
62
71
|
left: 0 !important;
|
|
@@ -78,7 +87,7 @@ export const ModelSelector = () => {
|
|
|
78
87
|
&:hover {
|
|
79
88
|
background-color: ${index === safeSelectedModelIndex ? '#f0f0f0' : '#f8f9fa'} !important;
|
|
80
89
|
}
|
|
81
|
-
`, onClick: (event) => handleMenuItemClick(event, index), children: model.label }, model.value))) })), open && (_jsx("div", { css: css `
|
|
90
|
+
`, onClick: (event) => handleMenuItemClick(event, index), children: getDisplayLabel(model.label) }, model.value))) })), open && (_jsx("div", { css: css `
|
|
82
91
|
position: fixed !important;
|
|
83
92
|
top: 0 !important;
|
|
84
93
|
left: 0 !important;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export interface SpeechTokenResponse {
|
|
2
|
+
authToken: string;
|
|
3
|
+
region: string;
|
|
4
|
+
error?: string;
|
|
5
|
+
}
|
|
6
|
+
export interface TextToSpeechConfig {
|
|
7
|
+
getToken: () => Promise<SpeechTokenResponse>;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Default getSpeechToken function that directly calls Azure token service
|
|
11
|
+
* Uses hardcoded SPEECH_KEY and SPEECH_REGION to get Azure Speech token
|
|
12
|
+
*/
|
|
13
|
+
export declare function getDefaultSpeechToken(): Promise<SpeechTokenResponse>;
|
|
14
|
+
/**
|
|
15
|
+
* Get or refresh Azure Cognitive Services Speech token
|
|
16
|
+
*/
|
|
17
|
+
export declare function getTokenOrRefresh(getToken: () => Promise<SpeechTokenResponse>): Promise<SpeechTokenResponse>;
|
|
18
|
+
/**
|
|
19
|
+
* Convert text to audio buffer using Azure Cognitive Services Speech SDK
|
|
20
|
+
*/
|
|
21
|
+
export default function textToBuffer(text: string, getToken: () => Promise<SpeechTokenResponse>): Promise<ArrayBuffer | undefined>;
|
|
22
|
+
//# sourceMappingURL=textToSpeech.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"textToSpeech.d.ts","sourceRoot":"","sources":["../../../src/core/lib/textToSpeech.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,mBAAmB;IAClC,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,kBAAkB;IACjC,QAAQ,EAAE,MAAM,OAAO,CAAC,mBAAmB,CAAC,CAAC;CAC9C;AAQD;;;GAGG;AACH,wBAAsB,qBAAqB,IAAI,OAAO,CAAC,mBAAmB,CAAC,CAkC1E;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CACrC,QAAQ,EAAE,MAAM,OAAO,CAAC,mBAAmB,CAAC,GAC3C,OAAO,CAAC,mBAAmB,CAAC,CAsC9B;AAED;;GAEG;AACH,wBAA8B,YAAY,CACxC,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,MAAM,OAAO,CAAC,mBAAmB,CAAC,GAC3C,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC,CAmClC"}
|