@sampleapp.ai/sdk 1.0.24 → 1.0.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/chat-bar/typing-textarea.js +150 -0
- package/dist/components/chat-bar/voice-button/voice-icon.js +129 -0
- package/dist/components/chat-bar/voice-button.js +24 -0
- package/dist/components/chat-bar/voice-overlay.js +190 -0
- package/dist/components/chat-bar.js +580 -0
- package/dist/components/icons.js +155 -0
- package/dist/components/minimal-chat-bar.js +51 -0
- package/dist/components/ui/button.js +189 -0
- package/dist/components/ui/card.js +77 -0
- package/dist/components/ui/textarea.js +68 -0
- package/dist/index.d.ts +53 -0
- package/dist/index.es.js +1430 -94
- package/dist/index.js +5 -1
- package/dist/index.standalone.js +87 -0
- package/dist/index.standalone.umd.js +147 -0
- package/dist/index.umd.js +319 -34
- package/dist/sdk.css +1 -0
- package/dist/themes.js +189 -0
- package/package.json +9 -3
|
@@ -0,0 +1,580 @@
|
|
|
1
|
+
"use client";
|
|
2
|
+
/* eslint-disable @next/next/no-img-element */
|
|
3
|
+
import React from "react";
|
|
4
|
+
import { useState, useRef, useEffect, useMemo } from "react";
|
|
5
|
+
import { Textarea } from "./ui/textarea";
|
|
6
|
+
import { Button } from "./ui/button";
|
|
7
|
+
import { getTheme } from "../themes";
|
|
8
|
+
import { LoaderIcon, ArrowUpIcon } from "./icons";
|
|
9
|
+
import { TypingTextarea } from "./chat-bar/typing-textarea";
|
|
10
|
+
import { VoiceButton } from "./chat-bar/voice-button";
|
|
11
|
+
// Default fallback color scheme
|
|
12
|
+
const DEFAULT_COLOR_SCHEME = {
|
|
13
|
+
gradient: "linear-gradient(45deg, #3b82f6, #8b5cf6, #ec4899)",
|
|
14
|
+
shadow: {
|
|
15
|
+
dark: "rgba(0, 0, 0, 0.2)",
|
|
16
|
+
},
|
|
17
|
+
};
|
|
18
|
+
const getColorScheme = (themeName, playgroundUid) => {
|
|
19
|
+
try {
|
|
20
|
+
// If theme is provided, use it directly
|
|
21
|
+
if (themeName) {
|
|
22
|
+
const theme = getTheme(themeName);
|
|
23
|
+
return theme || DEFAULT_COLOR_SCHEME;
|
|
24
|
+
}
|
|
25
|
+
// Use default theme
|
|
26
|
+
const defaultTheme = getTheme();
|
|
27
|
+
return defaultTheme || DEFAULT_COLOR_SCHEME;
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
console.warn("Theme system error, using fallback:", error);
|
|
31
|
+
return DEFAULT_COLOR_SCHEME;
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
export const ChatBar = ({ placeholder = "Ask anything...", onSubmit, hasPendingEnv, playgroundUid, isSubmitting, typingTexts = [
|
|
35
|
+
"Build me an AI chatbot application using OpenAI API",
|
|
36
|
+
"Build me an ecommerce website using Stripe API",
|
|
37
|
+
"Build me a price tracking tool using AgentQL API",
|
|
38
|
+
"Build me a movie recommendation system using Qdrant API",
|
|
39
|
+
"Build me a weather forecasting app using OpenWeather API",
|
|
40
|
+
"Build me a SMS notification system using Twilio API",
|
|
41
|
+
"Build me a travel booking platform using Skyscanner API",
|
|
42
|
+
], shouldFocusOnMount = true, showModelSelector = true, projectUid, theme, height = "auto", // Add default for required height prop
|
|
43
|
+
}) => {
|
|
44
|
+
var _a;
|
|
45
|
+
const [isFocused, setIsFocused] = useState(true);
|
|
46
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
47
|
+
const [isTranscribing, setIsTranscribing] = useState(false);
|
|
48
|
+
const [, setShowVoiceOverlay] = useState(false);
|
|
49
|
+
const [recordingTimeout, setRecordingTimeout] = useState(null);
|
|
50
|
+
const [recordingStartTime, setRecordingStartTime] = useState(null);
|
|
51
|
+
const [query, setQuery] = useState("");
|
|
52
|
+
const mediaRecorderRef = useRef(null);
|
|
53
|
+
const audioChunksRef = useRef([]);
|
|
54
|
+
const textareaRef = useRef(null);
|
|
55
|
+
const [isMounted, setIsMounted] = useState(false);
|
|
56
|
+
// Get color scheme for gradient border with error handling
|
|
57
|
+
const colorScheme = useMemo(() => {
|
|
58
|
+
try {
|
|
59
|
+
return getColorScheme(theme, playgroundUid);
|
|
60
|
+
}
|
|
61
|
+
catch (error) {
|
|
62
|
+
console.warn("Error getting color scheme:", error);
|
|
63
|
+
return DEFAULT_COLOR_SCHEME;
|
|
64
|
+
}
|
|
65
|
+
}, [theme, playgroundUid]);
|
|
66
|
+
useEffect(() => {
|
|
67
|
+
setIsMounted(true);
|
|
68
|
+
}, []);
|
|
69
|
+
useEffect(() => {
|
|
70
|
+
// Focus and select all text in the textarea when component mounts
|
|
71
|
+
if (shouldFocusOnMount && textareaRef.current && isMounted) {
|
|
72
|
+
try {
|
|
73
|
+
textareaRef.current.focus();
|
|
74
|
+
textareaRef.current.select();
|
|
75
|
+
}
|
|
76
|
+
catch (error) {
|
|
77
|
+
console.warn("Focus error:", error);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}, [isMounted, shouldFocusOnMount]);
|
|
81
|
+
const handleFocus = () => {
|
|
82
|
+
setIsFocused(true);
|
|
83
|
+
};
|
|
84
|
+
const handleBlur = () => {
|
|
85
|
+
setIsFocused(true);
|
|
86
|
+
};
|
|
87
|
+
// Function to adjust textarea height
|
|
88
|
+
const adjustTextareaHeight = () => {
|
|
89
|
+
const textarea = textareaRef.current;
|
|
90
|
+
if (textarea) {
|
|
91
|
+
try {
|
|
92
|
+
// Reset height to auto to get the correct scrollHeight
|
|
93
|
+
textarea.style.height = "auto";
|
|
94
|
+
// Set new height based on scrollHeight, with a maximum of 200px
|
|
95
|
+
const newHeight = Math.min(textarea.scrollHeight, 200);
|
|
96
|
+
textarea.style.height = `${newHeight}px`;
|
|
97
|
+
}
|
|
98
|
+
catch (error) {
|
|
99
|
+
console.warn("Textarea height adjustment error:", error);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
};
|
|
103
|
+
// Update height whenever query changes
|
|
104
|
+
useEffect(() => {
|
|
105
|
+
adjustTextareaHeight();
|
|
106
|
+
}, [query]);
|
|
107
|
+
// Keyboard shortcuts for voice mode
|
|
108
|
+
useEffect(() => {
|
|
109
|
+
const handleKeyDown = (event) => {
|
|
110
|
+
// Check for Cmd+/ (Mac) or Ctrl+/ (Windows/Linux) - toggles recording
|
|
111
|
+
const isVoiceShortcut = (event.metaKey || event.ctrlKey) && event.key === "/";
|
|
112
|
+
// Check for Enter or Escape - stops recording if currently recording
|
|
113
|
+
const isStopRecordingKey = (event.key === "Enter" || event.key === "Escape") &&
|
|
114
|
+
(isRecording || isTranscribing);
|
|
115
|
+
if (isVoiceShortcut) {
|
|
116
|
+
event.preventDefault();
|
|
117
|
+
handleVoiceRecording();
|
|
118
|
+
}
|
|
119
|
+
else if (isStopRecordingKey) {
|
|
120
|
+
// Stop recording with Enter or Escape
|
|
121
|
+
event.preventDefault();
|
|
122
|
+
if (isRecording && !isTranscribing) {
|
|
123
|
+
// Only stop if recording (not if transcribing)
|
|
124
|
+
handleVoiceRecording();
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
// Add event listener
|
|
129
|
+
document.addEventListener("keydown", handleKeyDown);
|
|
130
|
+
// Cleanup function
|
|
131
|
+
return () => {
|
|
132
|
+
document.removeEventListener("keydown", handleKeyDown);
|
|
133
|
+
};
|
|
134
|
+
}, [isRecording, isTranscribing]); // Dependencies for handleVoiceRecording
|
|
135
|
+
// Cleanup on unmount
|
|
136
|
+
useEffect(() => {
|
|
137
|
+
return () => {
|
|
138
|
+
if (recordingTimeout) {
|
|
139
|
+
clearTimeout(recordingTimeout);
|
|
140
|
+
}
|
|
141
|
+
if (mediaRecorderRef.current && isRecording) {
|
|
142
|
+
mediaRecorderRef.current.stop();
|
|
143
|
+
}
|
|
144
|
+
setShowVoiceOverlay(false);
|
|
145
|
+
};
|
|
146
|
+
}, [recordingTimeout, isRecording]);
|
|
147
|
+
const handleVoiceOverlayDismiss = () => {
|
|
148
|
+
// Only allow dismissing during recording, not transcribing
|
|
149
|
+
if (isTranscribing)
|
|
150
|
+
return;
|
|
151
|
+
if (isRecording) {
|
|
152
|
+
// Stop recording if currently recording
|
|
153
|
+
stopRecording();
|
|
154
|
+
}
|
|
155
|
+
setShowVoiceOverlay(false);
|
|
156
|
+
};
|
|
157
|
+
const stopRecording = async () => {
|
|
158
|
+
if (mediaRecorderRef.current && isRecording) {
|
|
159
|
+
// Check minimum recording duration (1 second)
|
|
160
|
+
// const now = Date.now();
|
|
161
|
+
// const recordingDuration = recordingStartTime
|
|
162
|
+
// ? now - recordingStartTime
|
|
163
|
+
// : 0;
|
|
164
|
+
// if (recordingDuration < 1000) {
|
|
165
|
+
// toast({
|
|
166
|
+
// title: "Recording Too Short",
|
|
167
|
+
// description: "Please record for at least 1 second before stopping.",
|
|
168
|
+
// variant: "destructive",
|
|
169
|
+
// });
|
|
170
|
+
// return;
|
|
171
|
+
// }
|
|
172
|
+
// Immediately update UI state to prevent double-clicks
|
|
173
|
+
setIsRecording(false);
|
|
174
|
+
setIsTranscribing(true);
|
|
175
|
+
// Clear timeout
|
|
176
|
+
if (recordingTimeout) {
|
|
177
|
+
clearTimeout(recordingTimeout);
|
|
178
|
+
setRecordingTimeout(null);
|
|
179
|
+
}
|
|
180
|
+
// Wait for the audio data to be processed
|
|
181
|
+
return new Promise((resolve) => {
|
|
182
|
+
if (mediaRecorderRef.current) {
|
|
183
|
+
let hasData = false;
|
|
184
|
+
let stopTimeout; // eslint-disable-line prefer-const
|
|
185
|
+
// Set up data collection with timeout
|
|
186
|
+
mediaRecorderRef.current.ondataavailable = (event) => {
|
|
187
|
+
if (event.data.size > 0) {
|
|
188
|
+
hasData = true;
|
|
189
|
+
audioChunksRef.current.push(event.data);
|
|
190
|
+
}
|
|
191
|
+
};
|
|
192
|
+
mediaRecorderRef.current.onstop = async () => {
|
|
193
|
+
// Clear any pending timeout
|
|
194
|
+
if (stopTimeout) {
|
|
195
|
+
clearTimeout(stopTimeout);
|
|
196
|
+
}
|
|
197
|
+
try {
|
|
198
|
+
// Check if we have any audio data
|
|
199
|
+
if (!hasData || audioChunksRef.current.length === 0) {
|
|
200
|
+
throw new Error("No audio data recorded");
|
|
201
|
+
}
|
|
202
|
+
const audioBlob = new Blob(audioChunksRef.current, {
|
|
203
|
+
type: "audio/wav",
|
|
204
|
+
});
|
|
205
|
+
// Check if the blob has meaningful size (at least 1KB)
|
|
206
|
+
if (audioBlob.size < 1024) {
|
|
207
|
+
throw new Error("Audio recording too short");
|
|
208
|
+
}
|
|
209
|
+
// Send to backend for transcription
|
|
210
|
+
const formData = new FormData();
|
|
211
|
+
formData.append("audio", audioBlob, "recording.wav");
|
|
212
|
+
const response = await fetch("/api/voice/transcribe", {
|
|
213
|
+
method: "POST",
|
|
214
|
+
body: formData,
|
|
215
|
+
});
|
|
216
|
+
if (response.ok) {
|
|
217
|
+
const { transcription } = await response.json();
|
|
218
|
+
if (transcription && transcription.trim()) {
|
|
219
|
+
// Add transcribed text to current query
|
|
220
|
+
const newQuery = query
|
|
221
|
+
? `${query} ${transcription}`
|
|
222
|
+
: transcription;
|
|
223
|
+
setQuery(newQuery.trim());
|
|
224
|
+
// Focus the textarea after transcription is added so user can press Enter immediately
|
|
225
|
+
setTimeout(() => {
|
|
226
|
+
if (textareaRef.current) {
|
|
227
|
+
textareaRef.current.focus();
|
|
228
|
+
// Move cursor to end of text
|
|
229
|
+
const length = textareaRef.current.value.length;
|
|
230
|
+
textareaRef.current.setSelectionRange(length, length);
|
|
231
|
+
}
|
|
232
|
+
}, 100); // Small delay to ensure state update completes
|
|
233
|
+
}
|
|
234
|
+
else {
|
|
235
|
+
// toast({
|
|
236
|
+
// title: "No Speech Detected",
|
|
237
|
+
// description:
|
|
238
|
+
// "Try speaking more clearly or closer to the microphone",
|
|
239
|
+
// variant: "destructive",
|
|
240
|
+
// });
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
else {
|
|
244
|
+
throw new Error("Transcription failed");
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
catch (error) {
|
|
248
|
+
console.error("Transcription error:", error);
|
|
249
|
+
// Provide more specific error messages
|
|
250
|
+
let errorMessage = "Could not convert speech to text. Please try again.";
|
|
251
|
+
let errorTitle = "Transcription Failed";
|
|
252
|
+
if (error instanceof Error) {
|
|
253
|
+
if (error.message.includes("No audio data") ||
|
|
254
|
+
error.message.includes("too short")) {
|
|
255
|
+
errorTitle = "Recording Too Short";
|
|
256
|
+
errorMessage =
|
|
257
|
+
"Please speak for at least 1-2 seconds before stopping.";
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
// toast({
|
|
261
|
+
// title: errorTitle,
|
|
262
|
+
// description: errorMessage,
|
|
263
|
+
// variant: "destructive",
|
|
264
|
+
// });
|
|
265
|
+
}
|
|
266
|
+
finally {
|
|
267
|
+
// Clean up
|
|
268
|
+
audioChunksRef.current = [];
|
|
269
|
+
mediaRecorderRef.current = null;
|
|
270
|
+
setRecordingStartTime(null);
|
|
271
|
+
setIsTranscribing(false);
|
|
272
|
+
setShowVoiceOverlay(false);
|
|
273
|
+
resolve();
|
|
274
|
+
}
|
|
275
|
+
};
|
|
276
|
+
// Set up a timeout to force stop if MediaRecorder doesn't stop properly
|
|
277
|
+
stopTimeout = setTimeout(() => {
|
|
278
|
+
console.warn("MediaRecorder stop timeout, forcing cleanup");
|
|
279
|
+
audioChunksRef.current = [];
|
|
280
|
+
mediaRecorderRef.current = null;
|
|
281
|
+
setRecordingStartTime(null);
|
|
282
|
+
setIsTranscribing(false);
|
|
283
|
+
setShowVoiceOverlay(false);
|
|
284
|
+
resolve();
|
|
285
|
+
}, 5000); // 5 second timeout
|
|
286
|
+
// Stop the recording
|
|
287
|
+
try {
|
|
288
|
+
mediaRecorderRef.current.stop();
|
|
289
|
+
}
|
|
290
|
+
catch (error) {
|
|
291
|
+
console.error("Error stopping MediaRecorder:", error);
|
|
292
|
+
// Force cleanup if stop fails
|
|
293
|
+
if (stopTimeout) {
|
|
294
|
+
clearTimeout(stopTimeout);
|
|
295
|
+
}
|
|
296
|
+
audioChunksRef.current = [];
|
|
297
|
+
mediaRecorderRef.current = null;
|
|
298
|
+
setRecordingStartTime(null);
|
|
299
|
+
setIsTranscribing(false);
|
|
300
|
+
setShowVoiceOverlay(false);
|
|
301
|
+
resolve();
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
else {
|
|
305
|
+
// No MediaRecorder available, just clean up
|
|
306
|
+
setRecordingStartTime(null);
|
|
307
|
+
setIsTranscribing(false);
|
|
308
|
+
setShowVoiceOverlay(false);
|
|
309
|
+
resolve();
|
|
310
|
+
}
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
};
|
|
314
|
+
const handleVoiceRecording = async () => {
|
|
315
|
+
if (isTranscribing) {
|
|
316
|
+
// Don't allow interaction while transcribing
|
|
317
|
+
return;
|
|
318
|
+
}
|
|
319
|
+
if (isRecording) {
|
|
320
|
+
// Manual stop
|
|
321
|
+
await stopRecording();
|
|
322
|
+
}
|
|
323
|
+
else {
|
|
324
|
+
// Start recording - automatically trigger permission request
|
|
325
|
+
try {
|
|
326
|
+
// Check if mediaDevices is supported
|
|
327
|
+
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
|
|
328
|
+
throw new Error("Browser doesn't support microphone access");
|
|
329
|
+
}
|
|
330
|
+
// This will automatically trigger the browser's permission prompt
|
|
331
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
332
|
+
audio: {
|
|
333
|
+
echoCancellation: true,
|
|
334
|
+
noiseSuppression: true,
|
|
335
|
+
sampleRate: 44100,
|
|
336
|
+
},
|
|
337
|
+
});
|
|
338
|
+
// Create MediaRecorder
|
|
339
|
+
const mediaRecorder = new MediaRecorder(stream, {
|
|
340
|
+
mimeType: MediaRecorder.isTypeSupported("audio/webm")
|
|
341
|
+
? "audio/webm"
|
|
342
|
+
: "audio/mp4",
|
|
343
|
+
});
|
|
344
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
345
|
+
audioChunksRef.current = [];
|
|
346
|
+
// Set up event listeners
|
|
347
|
+
mediaRecorder.ondataavailable = (event) => {
|
|
348
|
+
if (event.data.size > 0) {
|
|
349
|
+
audioChunksRef.current.push(event.data);
|
|
350
|
+
}
|
|
351
|
+
};
|
|
352
|
+
// Start recording
|
|
353
|
+
mediaRecorder.start(200); // Collect data every 200 ms
|
|
354
|
+
setIsRecording(true);
|
|
355
|
+
setRecordingStartTime(Date.now());
|
|
356
|
+
setShowVoiceOverlay(true);
|
|
357
|
+
// Set up 60 second auto-timeout
|
|
358
|
+
const timeout = setTimeout(async () => {
|
|
359
|
+
await stopRecording();
|
|
360
|
+
}, 60000);
|
|
361
|
+
setRecordingTimeout(timeout);
|
|
362
|
+
}
|
|
363
|
+
catch (error) {
|
|
364
|
+
console.error("Microphone access error:", error);
|
|
365
|
+
let errorMessage = "Please allow microphone access to use voice recording";
|
|
366
|
+
let errorTitle = "Microphone Access Required";
|
|
367
|
+
if (error instanceof Error) {
|
|
368
|
+
if (error.name === "NotAllowedError") {
|
|
369
|
+
errorMessage =
|
|
370
|
+
"Microphone access was denied. Please click the microphone icon in your browser's address bar and allow access, then try again.";
|
|
371
|
+
}
|
|
372
|
+
else if (error.name === "NotFoundError") {
|
|
373
|
+
errorTitle = "No Microphone Found";
|
|
374
|
+
errorMessage =
|
|
375
|
+
"No microphone was detected. Please connect a microphone and try again.";
|
|
376
|
+
}
|
|
377
|
+
else if (error.name === "NotReadableError") {
|
|
378
|
+
errorTitle = "Microphone In Use";
|
|
379
|
+
errorMessage =
|
|
380
|
+
"Your microphone is being used by another application. Please close other apps and try again.";
|
|
381
|
+
}
|
|
382
|
+
else if (error.name === "NotSupportedError") {
|
|
383
|
+
errorTitle = "Not Supported";
|
|
384
|
+
errorMessage =
|
|
385
|
+
"Your browser doesn't support microphone access. Please use a modern browser.";
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
// toast({
|
|
389
|
+
// title: errorTitle,
|
|
390
|
+
// description: errorMessage,
|
|
391
|
+
// variant: "destructive",
|
|
392
|
+
// });
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
};
|
|
396
|
+
const handleSubmit = (e) => {
|
|
397
|
+
e.preventDefault();
|
|
398
|
+
if (!query.trim()) {
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
try {
|
|
402
|
+
if (playgroundUid) {
|
|
403
|
+
const encodedQuery = encodeURIComponent(query.trim());
|
|
404
|
+
const url = `https://${playgroundUid}.sampleapp.ai?q=${encodedQuery}`;
|
|
405
|
+
window.open(url, "_blank");
|
|
406
|
+
}
|
|
407
|
+
if (onSubmit) {
|
|
408
|
+
onSubmit(e);
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
catch (error) {
|
|
412
|
+
console.error("Submit error:", error);
|
|
413
|
+
}
|
|
414
|
+
};
|
|
415
|
+
return (React.createElement("div", null,
|
|
416
|
+
React.createElement("div", { style: {
|
|
417
|
+
position: "relative",
|
|
418
|
+
padding: "2px",
|
|
419
|
+
width: "100%",
|
|
420
|
+
maxWidth: "100%",
|
|
421
|
+
marginLeft: "auto",
|
|
422
|
+
marginRight: "auto",
|
|
423
|
+
} },
|
|
424
|
+
React.createElement("div", { style: {
|
|
425
|
+
backgroundColor: ((_a = colorScheme.shadow) === null || _a === void 0 ? void 0 : _a.dark) || "rgba(0, 0, 0, 0.2)",
|
|
426
|
+
position: "absolute",
|
|
427
|
+
top: "0",
|
|
428
|
+
right: "0",
|
|
429
|
+
bottom: "0",
|
|
430
|
+
left: "0",
|
|
431
|
+
borderRadius: "0.85rem",
|
|
432
|
+
transition: "opacity 0.3s ease-in-out",
|
|
433
|
+
boxShadow: "0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06)",
|
|
434
|
+
opacity: "1",
|
|
435
|
+
pointerEvents: "none",
|
|
436
|
+
} }),
|
|
437
|
+
React.createElement("div", { style: {
|
|
438
|
+
background: colorScheme.gradient || DEFAULT_COLOR_SCHEME.gradient,
|
|
439
|
+
backgroundSize: "400% 400%",
|
|
440
|
+
position: "absolute",
|
|
441
|
+
top: "0",
|
|
442
|
+
right: "0",
|
|
443
|
+
bottom: "0",
|
|
444
|
+
left: "0",
|
|
445
|
+
borderRadius: "0.85rem",
|
|
446
|
+
zIndex: 1,
|
|
447
|
+
filter: "blur(12px)",
|
|
448
|
+
transition: "opacity 0.3s ease-in-out",
|
|
449
|
+
pointerEvents: "none",
|
|
450
|
+
opacity: isFocused ? "0.8" : "0.4",
|
|
451
|
+
animation: "gradient-bg 5s ease infinite",
|
|
452
|
+
} }),
|
|
453
|
+
React.createElement("div", { style: {
|
|
454
|
+
background: colorScheme.gradient || DEFAULT_COLOR_SCHEME.gradient,
|
|
455
|
+
backgroundSize: "400% 400%",
|
|
456
|
+
position: "absolute",
|
|
457
|
+
top: "0",
|
|
458
|
+
right: "0",
|
|
459
|
+
bottom: "0",
|
|
460
|
+
left: "0",
|
|
461
|
+
borderRadius: "0.85rem",
|
|
462
|
+
zIndex: 1,
|
|
463
|
+
transition: "opacity 0.3s ease-in-out",
|
|
464
|
+
pointerEvents: "none",
|
|
465
|
+
opacity: isFocused ? "1" : "0.7",
|
|
466
|
+
animation: "gradient-bg 5s ease infinite",
|
|
467
|
+
} }),
|
|
468
|
+
React.createElement("div", { style: {
|
|
469
|
+
position: "relative",
|
|
470
|
+
zIndex: 10,
|
|
471
|
+
borderRadius: "0.75rem",
|
|
472
|
+
backgroundColor: "rgb(24, 24, 27)", // zinc-900 equivalent
|
|
473
|
+
width: "100%",
|
|
474
|
+
paddingLeft: "0.25rem",
|
|
475
|
+
paddingRight: "0.25rem",
|
|
476
|
+
paddingTop: "0.5rem",
|
|
477
|
+
paddingBottom: "0.5rem",
|
|
478
|
+
} },
|
|
479
|
+
React.createElement("div", { style: { display: "flex", flexDirection: "column", gap: "4px" } },
|
|
480
|
+
React.createElement("div", { style: {
|
|
481
|
+
position: "relative",
|
|
482
|
+
display: "flex",
|
|
483
|
+
paddingLeft: "8px",
|
|
484
|
+
paddingRight: "8px",
|
|
485
|
+
gap: "8px",
|
|
486
|
+
paddingTop: "4px",
|
|
487
|
+
paddingBottom: "4px",
|
|
488
|
+
} },
|
|
489
|
+
typingTexts.length > 0 ? (React.createElement(TypingTextarea, { texts: typingTexts, ref: textareaRef, style: {
|
|
490
|
+
flex: 1,
|
|
491
|
+
width: "100%",
|
|
492
|
+
border: "none",
|
|
493
|
+
boxShadow: "none",
|
|
494
|
+
outline: "none",
|
|
495
|
+
paddingLeft: "4px",
|
|
496
|
+
paddingRight: "4px",
|
|
497
|
+
resize: "none",
|
|
498
|
+
fontSize: "16px",
|
|
499
|
+
overflow: "hidden",
|
|
500
|
+
backgroundColor: "transparent",
|
|
501
|
+
display: "block",
|
|
502
|
+
paddingTop: "0",
|
|
503
|
+
paddingBottom: "0",
|
|
504
|
+
lineHeight: "1.5",
|
|
505
|
+
overflowY: "auto",
|
|
506
|
+
maxHeight: "192px",
|
|
507
|
+
marginTop: "4px",
|
|
508
|
+
color: "#f4f4f5",
|
|
509
|
+
fontFamily: "inherit",
|
|
510
|
+
}, value: query, onFocus: handleFocus, onBlur: handleBlur, onChange: (e) => setQuery(e.target.value), onSubmit: (e) => {
|
|
511
|
+
// Prevent submission if recording
|
|
512
|
+
if (isRecording || isTranscribing) {
|
|
513
|
+
e.preventDefault();
|
|
514
|
+
return;
|
|
515
|
+
}
|
|
516
|
+
handleSubmit(e);
|
|
517
|
+
}, rows: 1 })) : (React.createElement(Textarea, { ref: textareaRef, placeholder: placeholder, style: {
|
|
518
|
+
flex: 1,
|
|
519
|
+
width: "100%",
|
|
520
|
+
border: "none",
|
|
521
|
+
boxShadow: "none",
|
|
522
|
+
outline: "none",
|
|
523
|
+
paddingLeft: "4px",
|
|
524
|
+
paddingRight: "4px",
|
|
525
|
+
resize: "none",
|
|
526
|
+
fontSize: "16px",
|
|
527
|
+
overflow: "hidden",
|
|
528
|
+
backgroundColor: "transparent",
|
|
529
|
+
display: "block",
|
|
530
|
+
paddingTop: "0",
|
|
531
|
+
paddingBottom: "0",
|
|
532
|
+
lineHeight: "1.5",
|
|
533
|
+
overflowY: "auto",
|
|
534
|
+
maxHeight: "192px",
|
|
535
|
+
marginTop: "4px",
|
|
536
|
+
color: "#f4f4f5",
|
|
537
|
+
fontFamily: "inherit",
|
|
538
|
+
}, value: query, onFocus: handleFocus, onBlur: handleBlur, onChange: (e) => setQuery(e.target.value), onKeyDown: (e) => {
|
|
539
|
+
if (e.key === "Enter") {
|
|
540
|
+
if (e.shiftKey) {
|
|
541
|
+
return;
|
|
542
|
+
}
|
|
543
|
+
e.preventDefault();
|
|
544
|
+
handleSubmit(e);
|
|
545
|
+
}
|
|
546
|
+
}, rows: 1 })),
|
|
547
|
+
React.createElement("div", { style: { display: "flex", flexDirection: "row", gap: "4px" } },
|
|
548
|
+
React.createElement(VoiceButton, { isRecording: isRecording, isTranscribing: isTranscribing, onVoiceRecording: handleVoiceRecording }),
|
|
549
|
+
React.createElement(Button, { style: {
|
|
550
|
+
height: "32px",
|
|
551
|
+
width: "32px",
|
|
552
|
+
}, disabled: query.length === 0 || isSubmitting || hasPendingEnv, variant: "secondary", onClick: handleSubmit, size: "icon" }, isSubmitting ? (React.createElement(LoaderIcon, { size: 20, style: { animation: "spin 1s linear infinite" } })) : (React.createElement(ArrowUpIcon, { style: { width: "20px", height: "20px" } }))))),
|
|
553
|
+
React.createElement("div", { style: {
|
|
554
|
+
display: "flex",
|
|
555
|
+
alignItems: "center",
|
|
556
|
+
justifyContent: "space-between",
|
|
557
|
+
paddingLeft: "8px",
|
|
558
|
+
paddingRight: "8px",
|
|
559
|
+
} },
|
|
560
|
+
React.createElement("div", { style: { display: "flex", alignItems: "center", gap: "4px" } }, showModelSelector && (React.createElement("div", { style: { display: "flex", alignItems: "center", gap: "8px" } })))))),
|
|
561
|
+
React.createElement("style", null, `
|
|
562
|
+
@keyframes gradient-bg {
|
|
563
|
+
0%,
|
|
564
|
+
100% {
|
|
565
|
+
background-position: 0% 50%;
|
|
566
|
+
}
|
|
567
|
+
50% {
|
|
568
|
+
background-position: 100% 50%;
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
@keyframes spin {
|
|
572
|
+
from {
|
|
573
|
+
transform: rotate(0deg);
|
|
574
|
+
}
|
|
575
|
+
to {
|
|
576
|
+
transform: rotate(360deg);
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
`))));
|
|
580
|
+
};
|