ai-input-react 1.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +294 -0
- package/dist/index.d.mts +255 -0
- package/dist/index.d.ts +255 -0
- package/dist/index.js +633 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +628 -0
- package/dist/index.mjs.map +1 -0
- package/dist/styles.css +2 -0
- package/package.json +67 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,628 @@
|
|
|
1
|
+
import { useRef, useState, useCallback, useEffect } from 'react';
|
|
2
|
+
import { jsx, Fragment, jsxs } from 'react/jsx-runtime';
|
|
3
|
+
|
|
4
|
+
// src/hooks/useAiInput.ts
|
|
5
|
+
var DEFAULT_OPTIONS = {
|
|
6
|
+
cooldownMs: 1e3,
|
|
7
|
+
maxRequests: 10,
|
|
8
|
+
windowMs: 6e4
|
|
9
|
+
};
|
|
10
|
+
function useRateLimiter(options = {}) {
|
|
11
|
+
const config = { ...DEFAULT_OPTIONS, ...options };
|
|
12
|
+
const requestTimestamps = useRef([]);
|
|
13
|
+
const [cooldownEnd, setCooldownEnd] = useState(0);
|
|
14
|
+
const [cooldownRemaining, setCooldownRemaining] = useState(0);
|
|
15
|
+
const [, forceUpdate] = useState({});
|
|
16
|
+
const cleanupAndCount = useCallback(() => {
|
|
17
|
+
const now = Date.now();
|
|
18
|
+
const windowStart = now - config.windowMs;
|
|
19
|
+
requestTimestamps.current = requestTimestamps.current.filter(
|
|
20
|
+
(ts) => ts > windowStart
|
|
21
|
+
);
|
|
22
|
+
return config.maxRequests - requestTimestamps.current.length;
|
|
23
|
+
}, [config.windowMs, config.maxRequests]);
|
|
24
|
+
useEffect(() => {
|
|
25
|
+
if (cooldownEnd <= Date.now()) {
|
|
26
|
+
setCooldownRemaining(0);
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
const interval = setInterval(() => {
|
|
30
|
+
const remaining = Math.max(0, cooldownEnd - Date.now());
|
|
31
|
+
setCooldownRemaining(remaining);
|
|
32
|
+
if (remaining === 0) {
|
|
33
|
+
clearInterval(interval);
|
|
34
|
+
}
|
|
35
|
+
}, 100);
|
|
36
|
+
return () => clearInterval(interval);
|
|
37
|
+
}, [cooldownEnd]);
|
|
38
|
+
const canRequest = useCallback(() => {
|
|
39
|
+
const now = Date.now();
|
|
40
|
+
if (now < cooldownEnd) {
|
|
41
|
+
return false;
|
|
42
|
+
}
|
|
43
|
+
return cleanupAndCount() > 0;
|
|
44
|
+
}, [cooldownEnd, cleanupAndCount]);
|
|
45
|
+
const recordRequest = useCallback(() => {
|
|
46
|
+
const now = Date.now();
|
|
47
|
+
requestTimestamps.current.push(now);
|
|
48
|
+
const newCooldownEnd = now + config.cooldownMs;
|
|
49
|
+
setCooldownEnd(newCooldownEnd);
|
|
50
|
+
setCooldownRemaining(config.cooldownMs);
|
|
51
|
+
forceUpdate({});
|
|
52
|
+
}, [config.cooldownMs]);
|
|
53
|
+
const reset = useCallback(() => {
|
|
54
|
+
requestTimestamps.current = [];
|
|
55
|
+
setCooldownEnd(0);
|
|
56
|
+
setCooldownRemaining(0);
|
|
57
|
+
forceUpdate({});
|
|
58
|
+
}, []);
|
|
59
|
+
return {
|
|
60
|
+
canRequest: canRequest(),
|
|
61
|
+
cooldownRemaining,
|
|
62
|
+
requestsRemaining: cleanupAndCount(),
|
|
63
|
+
recordRequest,
|
|
64
|
+
reset
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
var DEFAULT_OPTIONS2 = {
|
|
68
|
+
maxDurationMs: 6e4,
|
|
69
|
+
// 1 minute
|
|
70
|
+
mimeTypes: ["audio/webm", "audio/mp4", "audio/ogg", "audio/wav"]
|
|
71
|
+
};
|
|
72
|
+
function getSupportedMimeType(preferredTypes) {
|
|
73
|
+
if (typeof MediaRecorder === "undefined") {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
for (const mimeType of preferredTypes) {
|
|
77
|
+
if (MediaRecorder.isTypeSupported(mimeType)) {
|
|
78
|
+
return mimeType;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return "";
|
|
82
|
+
}
|
|
83
|
+
function useAudioRecorder(options = {}) {
|
|
84
|
+
const config = { ...DEFAULT_OPTIONS2, ...options };
|
|
85
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
86
|
+
const [duration, setDuration] = useState(0);
|
|
87
|
+
const [audioBlob, setAudioBlob] = useState(null);
|
|
88
|
+
const [error, setError] = useState(null);
|
|
89
|
+
const [audioLevels, setAudioLevels] = useState([]);
|
|
90
|
+
const mediaRecorderRef = useRef(null);
|
|
91
|
+
const streamRef = useRef(null);
|
|
92
|
+
const chunksRef = useRef([]);
|
|
93
|
+
const startTimeRef = useRef(0);
|
|
94
|
+
const durationIntervalRef = useRef(null);
|
|
95
|
+
const maxDurationTimeoutRef = useRef(null);
|
|
96
|
+
const audioContextRef = useRef(null);
|
|
97
|
+
const analyserRef = useRef(null);
|
|
98
|
+
const animationFrameRef = useRef(null);
|
|
99
|
+
const isSupported = typeof navigator !== "undefined" && "mediaDevices" in navigator && "getUserMedia" in navigator.mediaDevices && typeof MediaRecorder !== "undefined";
|
|
100
|
+
const updateAudioLevels = useCallback(() => {
|
|
101
|
+
if (!analyserRef.current) return;
|
|
102
|
+
const analyser = analyserRef.current;
|
|
103
|
+
const bufferLength = analyser.frequencyBinCount;
|
|
104
|
+
const dataArray = new Uint8Array(bufferLength);
|
|
105
|
+
analyser.getByteFrequencyData(dataArray);
|
|
106
|
+
const bars = 12;
|
|
107
|
+
const step = Math.floor(bufferLength / bars);
|
|
108
|
+
const levels = [];
|
|
109
|
+
for (let i = 0; i < bars; i++) {
|
|
110
|
+
let sum = 0;
|
|
111
|
+
for (let j = 0; j < step; j++) {
|
|
112
|
+
sum += dataArray[i * step + j];
|
|
113
|
+
}
|
|
114
|
+
levels.push(sum / step / 255);
|
|
115
|
+
}
|
|
116
|
+
setAudioLevels(levels);
|
|
117
|
+
animationFrameRef.current = requestAnimationFrame(updateAudioLevels);
|
|
118
|
+
}, []);
|
|
119
|
+
const cleanup = useCallback(() => {
|
|
120
|
+
if (durationIntervalRef.current) {
|
|
121
|
+
clearInterval(durationIntervalRef.current);
|
|
122
|
+
durationIntervalRef.current = null;
|
|
123
|
+
}
|
|
124
|
+
if (maxDurationTimeoutRef.current) {
|
|
125
|
+
clearTimeout(maxDurationTimeoutRef.current);
|
|
126
|
+
maxDurationTimeoutRef.current = null;
|
|
127
|
+
}
|
|
128
|
+
if (animationFrameRef.current) {
|
|
129
|
+
cancelAnimationFrame(animationFrameRef.current);
|
|
130
|
+
animationFrameRef.current = null;
|
|
131
|
+
}
|
|
132
|
+
if (audioContextRef.current) {
|
|
133
|
+
audioContextRef.current.close();
|
|
134
|
+
audioContextRef.current = null;
|
|
135
|
+
}
|
|
136
|
+
if (streamRef.current) {
|
|
137
|
+
streamRef.current.getTracks().forEach((track) => track.stop());
|
|
138
|
+
streamRef.current = null;
|
|
139
|
+
}
|
|
140
|
+
analyserRef.current = null;
|
|
141
|
+
mediaRecorderRef.current = null;
|
|
142
|
+
chunksRef.current = [];
|
|
143
|
+
setAudioLevels([]);
|
|
144
|
+
}, []);
|
|
145
|
+
const stopRecording = useCallback(() => {
|
|
146
|
+
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== "inactive") {
|
|
147
|
+
mediaRecorderRef.current.stop();
|
|
148
|
+
}
|
|
149
|
+
setIsRecording(false);
|
|
150
|
+
}, []);
|
|
151
|
+
const startRecording = useCallback(async () => {
|
|
152
|
+
if (!isSupported) {
|
|
153
|
+
setError(new Error("Audio recording is not supported in this browser"));
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
setError(null);
|
|
157
|
+
setAudioBlob(null);
|
|
158
|
+
setDuration(0);
|
|
159
|
+
setAudioLevels([]);
|
|
160
|
+
chunksRef.current = [];
|
|
161
|
+
try {
|
|
162
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
163
|
+
streamRef.current = stream;
|
|
164
|
+
const audioContext = new AudioContext();
|
|
165
|
+
audioContextRef.current = audioContext;
|
|
166
|
+
const source = audioContext.createMediaStreamSource(stream);
|
|
167
|
+
const analyser = audioContext.createAnalyser();
|
|
168
|
+
analyser.fftSize = 256;
|
|
169
|
+
analyser.smoothingTimeConstant = 0.8;
|
|
170
|
+
source.connect(analyser);
|
|
171
|
+
analyserRef.current = analyser;
|
|
172
|
+
const mimeType = getSupportedMimeType(config.mimeTypes);
|
|
173
|
+
const mediaRecorder = new MediaRecorder(stream, mimeType ? { mimeType } : void 0);
|
|
174
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
175
|
+
mediaRecorder.ondataavailable = (event) => {
|
|
176
|
+
if (event.data.size > 0) {
|
|
177
|
+
chunksRef.current.push(event.data);
|
|
178
|
+
}
|
|
179
|
+
};
|
|
180
|
+
mediaRecorder.onstop = () => {
|
|
181
|
+
const blob = new Blob(chunksRef.current, {
|
|
182
|
+
type: mimeType || "audio/webm"
|
|
183
|
+
});
|
|
184
|
+
setAudioBlob(blob);
|
|
185
|
+
if (config.onRecordingComplete) {
|
|
186
|
+
config.onRecordingComplete(blob);
|
|
187
|
+
}
|
|
188
|
+
cleanup();
|
|
189
|
+
};
|
|
190
|
+
mediaRecorder.onerror = () => {
|
|
191
|
+
setError(new Error("Recording error occurred"));
|
|
192
|
+
setIsRecording(false);
|
|
193
|
+
cleanup();
|
|
194
|
+
};
|
|
195
|
+
mediaRecorder.start(100);
|
|
196
|
+
startTimeRef.current = Date.now();
|
|
197
|
+
setIsRecording(true);
|
|
198
|
+
updateAudioLevels();
|
|
199
|
+
durationIntervalRef.current = setInterval(() => {
|
|
200
|
+
setDuration(Date.now() - startTimeRef.current);
|
|
201
|
+
}, 100);
|
|
202
|
+
maxDurationTimeoutRef.current = setTimeout(() => {
|
|
203
|
+
stopRecording();
|
|
204
|
+
}, config.maxDurationMs);
|
|
205
|
+
} catch (err) {
|
|
206
|
+
const errorMessage = err instanceof Error ? err.message : "Failed to access microphone";
|
|
207
|
+
setError(new Error(errorMessage));
|
|
208
|
+
cleanup();
|
|
209
|
+
}
|
|
210
|
+
}, [isSupported, config.mimeTypes, config.maxDurationMs, config.onRecordingComplete, cleanup, stopRecording, updateAudioLevels]);
|
|
211
|
+
const cancelRecording = useCallback(() => {
|
|
212
|
+
cleanup();
|
|
213
|
+
setIsRecording(false);
|
|
214
|
+
setDuration(0);
|
|
215
|
+
setAudioBlob(null);
|
|
216
|
+
}, [cleanup]);
|
|
217
|
+
const reset = useCallback(() => {
|
|
218
|
+
cleanup();
|
|
219
|
+
setIsRecording(false);
|
|
220
|
+
setDuration(0);
|
|
221
|
+
setAudioBlob(null);
|
|
222
|
+
setError(null);
|
|
223
|
+
setAudioLevels([]);
|
|
224
|
+
}, [cleanup]);
|
|
225
|
+
useEffect(() => {
|
|
226
|
+
return () => {
|
|
227
|
+
cleanup();
|
|
228
|
+
};
|
|
229
|
+
}, [cleanup]);
|
|
230
|
+
return {
|
|
231
|
+
isRecording,
|
|
232
|
+
isSupported,
|
|
233
|
+
duration,
|
|
234
|
+
audioBlob,
|
|
235
|
+
audioLevels,
|
|
236
|
+
error,
|
|
237
|
+
startRecording,
|
|
238
|
+
stopRecording,
|
|
239
|
+
cancelRecording,
|
|
240
|
+
reset
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// src/hooks/useAiInput.ts
|
|
245
|
+
var DEFAULT_RATE_LIMIT = {
|
|
246
|
+
cooldownMs: 1e3,
|
|
247
|
+
maxRequests: 10,
|
|
248
|
+
windowMs: 6e4
|
|
249
|
+
};
|
|
250
|
+
var DEFAULT_AUDIO_CONFIG = {
|
|
251
|
+
maxDurationMs: 6e4,
|
|
252
|
+
mimeTypes: ["audio/webm", "audio/mp4", "audio/ogg", "audio/wav"]
|
|
253
|
+
};
|
|
254
|
+
function useAiInput(options) {
|
|
255
|
+
const {
|
|
256
|
+
send,
|
|
257
|
+
sendAudio,
|
|
258
|
+
rateLimit = {},
|
|
259
|
+
audioConfig = {},
|
|
260
|
+
onSuccess,
|
|
261
|
+
onError,
|
|
262
|
+
onTranscription
|
|
263
|
+
} = options;
|
|
264
|
+
const rateLimitConfig = { ...DEFAULT_RATE_LIMIT, ...rateLimit };
|
|
265
|
+
const audioConfigMerged = { ...DEFAULT_AUDIO_CONFIG, ...audioConfig };
|
|
266
|
+
const [state, setState] = useState("idle");
|
|
267
|
+
const [text, setText] = useState("");
|
|
268
|
+
const [error, setError] = useState(null);
|
|
269
|
+
const [result, setResult] = useState(null);
|
|
270
|
+
const pendingAudioSubmitRef = useRef(false);
|
|
271
|
+
const rateLimiter = useRateLimiter(rateLimitConfig);
|
|
272
|
+
const audioRecorder = useAudioRecorder({
|
|
273
|
+
...audioConfigMerged
|
|
274
|
+
});
|
|
275
|
+
useEffect(() => {
|
|
276
|
+
if (!rateLimiter.canRequest && state === "idle") {
|
|
277
|
+
setState("rate-limited");
|
|
278
|
+
} else if (rateLimiter.canRequest && state === "rate-limited") {
|
|
279
|
+
setState("idle");
|
|
280
|
+
}
|
|
281
|
+
}, [rateLimiter.canRequest, state]);
|
|
282
|
+
useEffect(() => {
|
|
283
|
+
if (audioRecorder.isRecording && state !== "recording") {
|
|
284
|
+
setState("recording");
|
|
285
|
+
}
|
|
286
|
+
}, [audioRecorder.isRecording, state]);
|
|
287
|
+
useEffect(() => {
|
|
288
|
+
if (audioRecorder.error) {
|
|
289
|
+
setError(audioRecorder.error);
|
|
290
|
+
setState("error");
|
|
291
|
+
onError?.(audioRecorder.error);
|
|
292
|
+
}
|
|
293
|
+
}, [audioRecorder.error, onError]);
|
|
294
|
+
const submitText = useCallback(async () => {
|
|
295
|
+
if (!text.trim() || !rateLimiter.canRequest) {
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
setState("loading");
|
|
299
|
+
setError(null);
|
|
300
|
+
rateLimiter.recordRequest();
|
|
301
|
+
try {
|
|
302
|
+
const response = await send(text);
|
|
303
|
+
setResult(response);
|
|
304
|
+
setState("success");
|
|
305
|
+
onSuccess?.(response);
|
|
306
|
+
setText("");
|
|
307
|
+
} catch (err) {
|
|
308
|
+
const error2 = err instanceof Error ? err : new Error("Request failed");
|
|
309
|
+
setError(error2);
|
|
310
|
+
setState("error");
|
|
311
|
+
onError?.(error2);
|
|
312
|
+
}
|
|
313
|
+
}, [text, rateLimiter, send, onSuccess, onError]);
|
|
314
|
+
const submitAudio = useCallback(async (blob) => {
|
|
315
|
+
if (!rateLimiter.canRequest) {
|
|
316
|
+
return;
|
|
317
|
+
}
|
|
318
|
+
setState("loading");
|
|
319
|
+
setError(null);
|
|
320
|
+
rateLimiter.recordRequest();
|
|
321
|
+
try {
|
|
322
|
+
const sendFn = sendAudio || send;
|
|
323
|
+
const response = await sendFn(blob);
|
|
324
|
+
setResult(response);
|
|
325
|
+
setState("success");
|
|
326
|
+
onSuccess?.(response);
|
|
327
|
+
if (onTranscription && response && typeof response === "object") {
|
|
328
|
+
const res = response;
|
|
329
|
+
const transcriptionText = res.text || res.transcription || res.transcript;
|
|
330
|
+
if (typeof transcriptionText === "string") {
|
|
331
|
+
setText(transcriptionText);
|
|
332
|
+
onTranscription(transcriptionText);
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
} catch (err) {
|
|
336
|
+
const error2 = err instanceof Error ? err : new Error("Request failed");
|
|
337
|
+
setError(error2);
|
|
338
|
+
setState("error");
|
|
339
|
+
onError?.(error2);
|
|
340
|
+
}
|
|
341
|
+
}, [rateLimiter, send, sendAudio, onSuccess, onError, onTranscription]);
|
|
342
|
+
useEffect(() => {
|
|
343
|
+
if (pendingAudioSubmitRef.current && audioRecorder.audioBlob && !audioRecorder.isRecording) {
|
|
344
|
+
pendingAudioSubmitRef.current = false;
|
|
345
|
+
submitAudio(audioRecorder.audioBlob);
|
|
346
|
+
}
|
|
347
|
+
}, [audioRecorder.audioBlob, audioRecorder.isRecording, submitAudio]);
|
|
348
|
+
const startRecording = useCallback(async () => {
|
|
349
|
+
if (!rateLimiter.canRequest) {
|
|
350
|
+
return;
|
|
351
|
+
}
|
|
352
|
+
await audioRecorder.startRecording();
|
|
353
|
+
}, [rateLimiter.canRequest, audioRecorder]);
|
|
354
|
+
const stopRecording = useCallback(() => {
|
|
355
|
+
pendingAudioSubmitRef.current = true;
|
|
356
|
+
audioRecorder.stopRecording();
|
|
357
|
+
}, [audioRecorder]);
|
|
358
|
+
const cancelRecording = useCallback(() => {
|
|
359
|
+
audioRecorder.cancelRecording();
|
|
360
|
+
setState("idle");
|
|
361
|
+
}, [audioRecorder]);
|
|
362
|
+
const submit = useCallback(() => {
|
|
363
|
+
if (audioRecorder.isRecording) {
|
|
364
|
+
stopRecording();
|
|
365
|
+
} else if (text.trim()) {
|
|
366
|
+
submitText();
|
|
367
|
+
}
|
|
368
|
+
}, [audioRecorder.isRecording, text, stopRecording, submitText]);
|
|
369
|
+
const reset = useCallback(() => {
|
|
370
|
+
setState("idle");
|
|
371
|
+
setText("");
|
|
372
|
+
setError(null);
|
|
373
|
+
setResult(null);
|
|
374
|
+
rateLimiter.reset();
|
|
375
|
+
audioRecorder.reset();
|
|
376
|
+
}, [rateLimiter, audioRecorder]);
|
|
377
|
+
const canSubmit = rateLimiter.canRequest && state !== "loading" && (audioRecorder.isRecording || text.trim().length > 0);
|
|
378
|
+
return {
|
|
379
|
+
// State
|
|
380
|
+
state,
|
|
381
|
+
error,
|
|
382
|
+
result,
|
|
383
|
+
// Text
|
|
384
|
+
text,
|
|
385
|
+
setText,
|
|
386
|
+
submit,
|
|
387
|
+
canSubmit,
|
|
388
|
+
// Audio
|
|
389
|
+
isRecording: audioRecorder.isRecording,
|
|
390
|
+
startRecording,
|
|
391
|
+
stopRecording,
|
|
392
|
+
cancelRecording,
|
|
393
|
+
recordingDuration: audioRecorder.duration,
|
|
394
|
+
maxRecordingDuration: audioConfigMerged.maxDurationMs,
|
|
395
|
+
audioLevels: audioRecorder.audioLevels,
|
|
396
|
+
// Rate limiting
|
|
397
|
+
cooldownRemaining: rateLimiter.cooldownRemaining,
|
|
398
|
+
requestsRemaining: rateLimiter.requestsRemaining,
|
|
399
|
+
// Utils
|
|
400
|
+
reset
|
|
401
|
+
};
|
|
402
|
+
}
|
|
403
|
+
function formatDuration(ms) {
|
|
404
|
+
const seconds = Math.floor(ms / 1e3);
|
|
405
|
+
const minutes = Math.floor(seconds / 60);
|
|
406
|
+
const remainingSeconds = seconds % 60;
|
|
407
|
+
return `${minutes}:${remainingSeconds.toString().padStart(2, "0")}`;
|
|
408
|
+
}
|
|
409
|
+
function Waveform({ levels, className = "" }) {
|
|
410
|
+
const bars = levels.length > 0 ? levels : Array(16).fill(0.1);
|
|
411
|
+
return /* @__PURE__ */ jsx("div", { className: `flex items-center justify-center gap-1 h-10 ${className}`, children: bars.map((level, i) => /* @__PURE__ */ jsx(
|
|
412
|
+
"div",
|
|
413
|
+
{
|
|
414
|
+
className: "w-1.5 bg-amber-500 rounded-full transition-all duration-75",
|
|
415
|
+
style: {
|
|
416
|
+
height: `${Math.max(6, level * 40)}px`
|
|
417
|
+
}
|
|
418
|
+
},
|
|
419
|
+
i
|
|
420
|
+
)) });
|
|
421
|
+
}
|
|
422
|
+
function MicIcon({ className = "" }) {
|
|
423
|
+
return /* @__PURE__ */ jsx("svg", { className, viewBox: "0 0 256 256", fill: "currentColor", children: /* @__PURE__ */ jsx("path", { d: "M128,176a48.05,48.05,0,0,0,48-48V64a48,48,0,0,0-96,0v64A48.05,48.05,0,0,0,128,176ZM96,64a32,32,0,0,1,64,0v64a32,32,0,0,1-64,0Zm40,143.6V232a8,8,0,0,1-16,0V207.6A80.11,80.11,0,0,1,48,128a8,8,0,0,1,16,0,64,64,0,0,0,128,0,8,8,0,0,1,16,0A80.11,80.11,0,0,1,136,207.6Z" }) });
|
|
424
|
+
}
|
|
425
|
+
function ArrowUpIcon({ className = "" }) {
|
|
426
|
+
return /* @__PURE__ */ jsx("svg", { className, viewBox: "0 0 256 256", fill: "currentColor", children: /* @__PURE__ */ jsx("path", { d: "M205.66,117.66a8,8,0,0,1-11.32,0L136,59.31V216a8,8,0,0,1-16,0V59.31L61.66,117.66a8,8,0,0,1-11.32-11.32l72-72a8,8,0,0,1,11.32,0l72,72A8,8,0,0,1,205.66,117.66Z" }) });
|
|
427
|
+
}
|
|
428
|
+
function StopIcon({ className = "" }) {
|
|
429
|
+
return /* @__PURE__ */ jsx("svg", { className, viewBox: "0 0 256 256", fill: "currentColor", children: /* @__PURE__ */ jsx("path", { d: "M200,40H56A16,16,0,0,0,40,56V200a16,16,0,0,0,16,16H200a16,16,0,0,0,16-16V56A16,16,0,0,0,200,40Zm0,160H56V56H200V200Z" }) });
|
|
430
|
+
}
|
|
431
|
+
function XIcon({ className = "" }) {
|
|
432
|
+
return /* @__PURE__ */ jsx("svg", { className, viewBox: "0 0 256 256", fill: "currentColor", children: /* @__PURE__ */ jsx("path", { d: "M205.66,194.34a8,8,0,0,1-11.32,11.32L128,139.31,61.66,205.66a8,8,0,0,1-11.32-11.32L116.69,128,50.34,61.66A8,8,0,0,1,61.66,50.34L128,116.69l66.34-66.35a8,8,0,0,1,11.32,11.32L139.31,128Z" }) });
|
|
433
|
+
}
|
|
434
|
+
function Spinner({ className = "" }) {
|
|
435
|
+
return /* @__PURE__ */ jsxs("svg", { className: `animate-spin ${className}`, viewBox: "0 0 24 24", fill: "none", children: [
|
|
436
|
+
/* @__PURE__ */ jsx(
|
|
437
|
+
"circle",
|
|
438
|
+
{
|
|
439
|
+
className: "opacity-25",
|
|
440
|
+
cx: "12",
|
|
441
|
+
cy: "12",
|
|
442
|
+
r: "10",
|
|
443
|
+
stroke: "currentColor",
|
|
444
|
+
strokeWidth: "4"
|
|
445
|
+
}
|
|
446
|
+
),
|
|
447
|
+
/* @__PURE__ */ jsx(
|
|
448
|
+
"path",
|
|
449
|
+
{
|
|
450
|
+
className: "opacity-75",
|
|
451
|
+
fill: "currentColor",
|
|
452
|
+
d: "M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
|
|
453
|
+
}
|
|
454
|
+
)
|
|
455
|
+
] });
|
|
456
|
+
}
|
|
457
|
+
function DefaultUI({
|
|
458
|
+
text,
|
|
459
|
+
setText,
|
|
460
|
+
submit,
|
|
461
|
+
canSubmit,
|
|
462
|
+
state,
|
|
463
|
+
error,
|
|
464
|
+
isRecording,
|
|
465
|
+
startRecording,
|
|
466
|
+
stopRecording,
|
|
467
|
+
cancelRecording,
|
|
468
|
+
recordingDuration,
|
|
469
|
+
maxRecordingDuration,
|
|
470
|
+
audioLevels,
|
|
471
|
+
cooldownRemaining,
|
|
472
|
+
placeholder = "Ask anything...",
|
|
473
|
+
disabled = false
|
|
474
|
+
}) {
|
|
475
|
+
const isLoading = state === "loading";
|
|
476
|
+
const isRateLimited = state === "rate-limited";
|
|
477
|
+
const hasError = state === "error";
|
|
478
|
+
const handleKeyDown = (e) => {
|
|
479
|
+
if (e.key === "Enter" && !e.shiftKey && canSubmit && !isRecording) {
|
|
480
|
+
e.preventDefault();
|
|
481
|
+
submit();
|
|
482
|
+
}
|
|
483
|
+
};
|
|
484
|
+
const handleInput = (e) => {
|
|
485
|
+
setText(e.target.value);
|
|
486
|
+
e.target.style.height = "auto";
|
|
487
|
+
e.target.style.height = `${Math.min(Math.max(e.target.scrollHeight, 56), 200)}px`;
|
|
488
|
+
};
|
|
489
|
+
return /* @__PURE__ */ jsx("div", { className: "w-full", children: /* @__PURE__ */ jsxs(
|
|
490
|
+
"div",
|
|
491
|
+
{
|
|
492
|
+
className: `
|
|
493
|
+
bg-zinc-900 border border-zinc-800 rounded-xl
|
|
494
|
+
focus-within:ring-1 focus-within:ring-amber-500/50 focus-within:border-amber-500/50
|
|
495
|
+
transition-all duration-200
|
|
496
|
+
${disabled ? "opacity-50" : ""}
|
|
497
|
+
`,
|
|
498
|
+
children: [
|
|
499
|
+
/* @__PURE__ */ jsx(
|
|
500
|
+
"textarea",
|
|
501
|
+
{
|
|
502
|
+
value: text,
|
|
503
|
+
onChange: handleInput,
|
|
504
|
+
onKeyDown: handleKeyDown,
|
|
505
|
+
placeholder: isRecording ? "Listening..." : placeholder,
|
|
506
|
+
disabled: disabled || isLoading || isRateLimited,
|
|
507
|
+
rows: 1,
|
|
508
|
+
className: `
|
|
509
|
+
w-full px-4 pt-4 pb-2
|
|
510
|
+
bg-transparent text-zinc-100 placeholder:text-zinc-500
|
|
511
|
+
focus:outline-none
|
|
512
|
+
disabled:cursor-not-allowed
|
|
513
|
+
resize-none
|
|
514
|
+
min-h-[56px]
|
|
515
|
+
`,
|
|
516
|
+
style: { height: "56px" }
|
|
517
|
+
}
|
|
518
|
+
),
|
|
519
|
+
/* @__PURE__ */ jsxs("div", { className: "flex items-center justify-between px-3 pb-3 pt-1", children: [
|
|
520
|
+
/* @__PURE__ */ jsx("div", { className: "flex items-center gap-3", children: isRecording ? /* @__PURE__ */ jsxs(Fragment, { children: [
|
|
521
|
+
/* @__PURE__ */ jsx(
|
|
522
|
+
"button",
|
|
523
|
+
{
|
|
524
|
+
onClick: cancelRecording,
|
|
525
|
+
disabled,
|
|
526
|
+
className: "p-2 text-zinc-400 hover:text-zinc-200 transition-colors",
|
|
527
|
+
"aria-label": "Cancel recording",
|
|
528
|
+
children: /* @__PURE__ */ jsx(XIcon, { className: "h-5 w-5" })
|
|
529
|
+
}
|
|
530
|
+
),
|
|
531
|
+
/* @__PURE__ */ jsx(Waveform, { levels: audioLevels }),
|
|
532
|
+
/* @__PURE__ */ jsx("span", { className: "text-sm text-zinc-400 font-mono", children: formatDuration(recordingDuration) })
|
|
533
|
+
] }) : /* @__PURE__ */ jsxs("div", { className: "text-sm", children: [
|
|
534
|
+
hasError && error && /* @__PURE__ */ jsx("span", { className: "text-red-400", children: error.message }),
|
|
535
|
+
isRateLimited && /* @__PURE__ */ jsxs("span", { className: "text-amber-400", children: [
|
|
536
|
+
"Wait ",
|
|
537
|
+
formatDuration(cooldownRemaining)
|
|
538
|
+
] })
|
|
539
|
+
] }) }),
|
|
540
|
+
/* @__PURE__ */ jsx("div", { className: "flex items-center gap-2", children: isRecording ? /* @__PURE__ */ jsx(Fragment, { children: /* @__PURE__ */ jsx(
|
|
541
|
+
"button",
|
|
542
|
+
{
|
|
543
|
+
onClick: stopRecording,
|
|
544
|
+
disabled,
|
|
545
|
+
className: `
|
|
546
|
+
p-2.5 rounded-full
|
|
547
|
+
bg-red-500 hover:bg-red-600
|
|
548
|
+
text-white
|
|
549
|
+
transition-colors
|
|
550
|
+
disabled:opacity-50 disabled:cursor-not-allowed
|
|
551
|
+
`,
|
|
552
|
+
"aria-label": "Stop recording",
|
|
553
|
+
children: /* @__PURE__ */ jsx(StopIcon, { className: "h-5 w-5" })
|
|
554
|
+
}
|
|
555
|
+
) }) : /* @__PURE__ */ jsxs(Fragment, { children: [
|
|
556
|
+
/* @__PURE__ */ jsx(
|
|
557
|
+
"button",
|
|
558
|
+
{
|
|
559
|
+
onClick: startRecording,
|
|
560
|
+
disabled: disabled || isLoading || isRateLimited,
|
|
561
|
+
className: `
|
|
562
|
+
p-2 text-zinc-400 hover:text-zinc-200
|
|
563
|
+
transition-colors
|
|
564
|
+
disabled:opacity-50 disabled:cursor-not-allowed
|
|
565
|
+
`,
|
|
566
|
+
"aria-label": "Start recording",
|
|
567
|
+
children: /* @__PURE__ */ jsx(MicIcon, { className: "h-5 w-5" })
|
|
568
|
+
}
|
|
569
|
+
),
|
|
570
|
+
/* @__PURE__ */ jsx(
|
|
571
|
+
"button",
|
|
572
|
+
{
|
|
573
|
+
onClick: submit,
|
|
574
|
+
disabled: !canSubmit || disabled,
|
|
575
|
+
className: `
|
|
576
|
+
p-2.5 rounded-full
|
|
577
|
+
transition-colors
|
|
578
|
+
disabled:opacity-50 disabled:cursor-not-allowed
|
|
579
|
+
${canSubmit ? "bg-amber-500 hover:bg-amber-600 text-zinc-900" : "bg-zinc-700 text-zinc-500"}
|
|
580
|
+
`,
|
|
581
|
+
"aria-label": "Send message",
|
|
582
|
+
children: isLoading ? /* @__PURE__ */ jsx(Spinner, { className: "h-5 w-5" }) : /* @__PURE__ */ jsx(ArrowUpIcon, { className: "h-5 w-5" })
|
|
583
|
+
}
|
|
584
|
+
)
|
|
585
|
+
] }) })
|
|
586
|
+
] })
|
|
587
|
+
]
|
|
588
|
+
}
|
|
589
|
+
) });
|
|
590
|
+
}
|
|
591
|
+
function AiInput({
|
|
592
|
+
send,
|
|
593
|
+
sendAudio,
|
|
594
|
+
rateLimit,
|
|
595
|
+
audioConfig,
|
|
596
|
+
onSuccess,
|
|
597
|
+
onError,
|
|
598
|
+
onTranscription,
|
|
599
|
+
children,
|
|
600
|
+
placeholder,
|
|
601
|
+
className,
|
|
602
|
+
disabled = false
|
|
603
|
+
}) {
|
|
604
|
+
const inputState = useAiInput({
|
|
605
|
+
send,
|
|
606
|
+
sendAudio,
|
|
607
|
+
rateLimit,
|
|
608
|
+
audioConfig,
|
|
609
|
+
onSuccess,
|
|
610
|
+
onError,
|
|
611
|
+
onTranscription
|
|
612
|
+
});
|
|
613
|
+
if (children) {
|
|
614
|
+
return /* @__PURE__ */ jsx(Fragment, { children: children(inputState) });
|
|
615
|
+
}
|
|
616
|
+
return /* @__PURE__ */ jsx("div", { className: `w-full ${className || ""}`, children: /* @__PURE__ */ jsx(
|
|
617
|
+
DefaultUI,
|
|
618
|
+
{
|
|
619
|
+
...inputState,
|
|
620
|
+
placeholder,
|
|
621
|
+
disabled
|
|
622
|
+
}
|
|
623
|
+
) });
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
export { AiInput, useAiInput, useAudioRecorder, useRateLimiter };
|
|
627
|
+
//# sourceMappingURL=index.mjs.map
|
|
628
|
+
//# sourceMappingURL=index.mjs.map
|