nitrostack 1.0.71 → 1.0.72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ import type { Tool } from '@/lib/types';
4
4
  import { useStudioStore } from '@/lib/store';
5
5
  import { WidgetRenderer } from './WidgetRenderer';
6
6
  import { useRouter } from 'next/navigation';
7
- import { Zap, Palette, Maximize2, Play, Sparkles, MessageSquare } from 'lucide-react';
7
+ import { BoltIcon, PaintBrushIcon, ArrowsPointingOutIcon, PlayIcon, SparklesIcon, ChatBubbleLeftIcon } from '@heroicons/react/24/outline';
8
8
 
9
9
  interface ToolCardProps {
10
10
  tool: Tool;
@@ -16,16 +16,16 @@ export function ToolCard({ tool, onExecute }: ToolCardProps) {
16
16
  const router = useRouter();
17
17
 
18
18
  // Check if tool has widget - check multiple sources
19
- const widgetUri =
20
- tool.widget?.route ||
21
- tool.outputTemplate ||
22
- tool._meta?.['ui/template'] ||
19
+ const widgetUri =
20
+ tool.widget?.route ||
21
+ tool.outputTemplate ||
22
+ tool._meta?.['ui/template'] ||
23
23
  tool._meta?.['openai/outputTemplate'];
24
24
  const hasWidget = !!widgetUri && widgetUri.trim().length > 0;
25
-
25
+
26
26
  // Get example data for preview - check both examples and _meta
27
27
  const exampleData = tool.examples?.response || tool._meta?.['tool/examples']?.response;
28
-
28
+
29
29
  // Debug logging for widget detection
30
30
  if (hasWidget) {
31
31
  console.log('ToolCard - Widget detected:', {
@@ -41,16 +41,16 @@ export function ToolCard({ tool, onExecute }: ToolCardProps) {
41
41
 
42
42
  const handleUseInChat = (e: React.MouseEvent) => {
43
43
  e.stopPropagation();
44
-
44
+
45
45
  // Build the tool execution message
46
46
  const toolMessage = `Use the ${tool.name} tool`;
47
-
47
+
48
48
  // Store the message in localStorage
49
49
  if (typeof window !== 'undefined') {
50
50
  window.localStorage.setItem('chatInput', toolMessage);
51
51
  window.localStorage.setItem('suggestedTool', tool.name);
52
52
  }
53
-
53
+
54
54
  router.push('/chat');
55
55
  };
56
56
 
@@ -64,24 +64,29 @@ export function ToolCard({ tool, onExecute }: ToolCardProps) {
64
64
  className="card card-hover p-6 animate-fade-in cursor-pointer"
65
65
  onClick={() => onExecute(tool)}
66
66
  >
67
- {/* Header */}
68
- <div className="flex items-start justify-between mb-4">
69
- <div className="flex items-center gap-3">
70
- <div className={`w-12 h-12 rounded-lg flex items-center justify-center ${hasWidget ? 'bg-purple-500/10' : 'bg-primary/10'}`}>
71
- {hasWidget ? (
72
- <Palette className="w-6 h-6 text-purple-500" />
73
- ) : (
74
- <Zap className="w-6 h-6 text-primary" />
67
+ {/* Clean Minimal Header */}
68
+ <div className="flex items-center gap-3 mb-3">
69
+ {/* Icon - MCP Brand Colors Only */}
70
+ {hasWidget ? (
71
+ <PaintBrushIcon className="h-5 w-5 text-secondary flex-shrink-0" />
72
+ ) : (
73
+ <BoltIcon className="h-5 w-5 text-muted-foreground flex-shrink-0" />
74
+ )}
75
+
76
+ {/* Content */}
77
+ <div className="flex-1 min-w-0">
78
+ <h3 className="font-medium text-[15px] text-foreground leading-tight truncate">
79
+ {tool.name}
80
+ </h3>
81
+ <div className="flex items-center gap-1.5 text-xs text-muted-foreground leading-none mt-1">
82
+ <span>Tool</span>
83
+ {hasWidget && (
84
+ <>
85
+ <span className="text-muted-foreground/40">•</span>
86
+ <span className="text-secondary">Widget</span>
87
+ </>
75
88
  )}
76
89
  </div>
77
- <div>
78
- <h3 className="font-semibold text-lg text-foreground">
79
- {tool.name}
80
- </h3>
81
- <span className={`badge ${hasWidget ? 'badge-secondary' : 'badge-primary'} text-xs mt-1`}>
82
- {hasWidget ? 'tool + widget' : 'tool'}
83
- </span>
84
- </div>
85
90
  </div>
86
91
  </div>
87
92
 
@@ -94,7 +99,7 @@ export function ToolCard({ tool, onExecute }: ToolCardProps) {
94
99
  {hasWidget && widgetUri && exampleData && (
95
100
  <div className="relative mb-4 rounded-lg overflow-hidden border border-border bg-muted/20">
96
101
  <div className="absolute top-2 left-2 z-10 flex items-center gap-1 bg-primary/90 backdrop-blur-sm text-black px-2 py-1 rounded-md text-xs font-semibold shadow-lg">
97
- <Sparkles className="w-3 h-3" />
102
+ <SparklesIcon className="h-3 w-3" />
98
103
  Widget Preview
99
104
  </div>
100
105
  <div className="h-64 relative">
@@ -107,31 +112,35 @@ export function ToolCard({ tool, onExecute }: ToolCardProps) {
107
112
  </div>
108
113
  )}
109
114
 
110
- {/* Action Buttons */}
111
- <div className="flex flex-wrap items-center gap-2" onClick={(e) => e.stopPropagation()}>
115
+ {/* Premium Action Toolbar */}
116
+ <div className="flex items-center gap-1.5 pt-4 border-t border-border/50" onClick={(e) => e.stopPropagation()}>
117
+ {/* Secondary: Enlarge Widget (Icon-only) */}
112
118
  {hasWidget && (
113
119
  <button
114
120
  onClick={handleEnlarge}
115
- className="btn btn-secondary flex-1 min-w-[90px] text-xs sm:text-sm gap-1.5 px-2.5 py-1.5 sm:px-4 sm:py-2"
121
+ className="group relative h-8 w-8 rounded hover:bg-muted transition-all duration-200 flex items-center justify-center"
122
+ title="View fullscreen"
116
123
  >
117
- <Maximize2 className="w-3.5 h-3.5 sm:w-4 sm:h-4 flex-shrink-0" />
118
- <span className="truncate">Enlarge</span>
124
+ <ArrowsPointingOutIcon className="h-4 w-4 text-muted-foreground group-hover:text-foreground group-hover:scale-110 transition-all" />
119
125
  </button>
120
126
  )}
121
- <button
122
- onClick={() => onExecute(tool)}
123
- className="btn btn-primary flex-1 min-w-[90px] text-xs sm:text-sm gap-1.5 px-2.5 py-1.5 sm:px-4 sm:py-2"
127
+
128
+ {/* Primary: Execute Tool (Prominent Button) */}
129
+ <button
130
+ onClick={() => onExecute(tool)}
131
+ className="group relative flex-1 h-8 rounded bg-primary hover:bg-primary/90 transition-all duration-200 flex items-center justify-center gap-1.5 shadow-sm hover:shadow-md"
124
132
  >
125
- <Play className="w-3.5 h-3.5 sm:w-4 sm:h-4 flex-shrink-0" />
126
- <span className="truncate">Execute</span>
133
+ <PlayIcon className="h-4 w-4 text-white" />
134
+ <span className="text-sm font-medium text-white">Execute</span>
127
135
  </button>
136
+
137
+ {/* Secondary: Use in Chat (Icon-only) */}
128
138
  <button
129
139
  onClick={handleUseInChat}
130
- className="btn btn-secondary flex-1 min-w-[90px] text-xs sm:text-sm gap-1.5 px-2.5 py-1.5 sm:px-4 sm:py-2"
131
- title="Use in Chat"
140
+ className="group relative h-8 w-8 rounded hover:bg-muted transition-all duration-200 flex items-center justify-center"
141
+ title="Use in AI chat"
132
142
  >
133
- <MessageSquare className="w-3.5 h-3.5 sm:w-4 sm:h-4 flex-shrink-0" />
134
- <span className="truncate">Chat</span>
143
+ <ChatBubbleLeftIcon className="h-4 w-4 text-muted-foreground group-hover:text-foreground group-hover:scale-110 transition-all" />
135
144
  </button>
136
145
  </div>
137
146
  </div>
@@ -0,0 +1,469 @@
1
+ 'use client';
2
+
3
+ import { useState, useEffect, useRef, useCallback } from 'react';
4
+ import {
5
+ MicrophoneIcon,
6
+ XMarkIcon,
7
+ Cog6ToothIcon,
8
+ ChatBubbleLeftRightIcon,
9
+ SpeakerWaveIcon
10
+ } from '@heroicons/react/24/outline';
11
+
12
+ // LLM State type
13
+ type LLMState = 'idle' | 'listening' | 'thinking' | 'speaking';
14
+
15
+ interface VoiceOrbOverlayProps {
16
+ isOpen: boolean;
17
+ onClose: () => void;
18
+ onSendMessage: (text: string) => void;
19
+ elevenLabsApiKey: string;
20
+ llmState: LLMState;
21
+ spokenText?: string;
22
+ onGreet?: () => void;
23
+ onSettingsClick?: () => void;
24
+ displayMode?: 'voice-only' | 'voice-chat';
25
+ onDisplayModeChange?: (mode: 'voice-only' | 'voice-chat') => void;
26
+ inputLanguage?: string;
27
+ onInterrupt?: () => void;
28
+ voiceModeActive?: boolean; // Keep speech recognition active even when overlay closed
29
+ }
30
+
31
+ export function VoiceOrbOverlay({
32
+ isOpen,
33
+ onClose,
34
+ onSendMessage,
35
+ elevenLabsApiKey,
36
+ llmState,
37
+ spokenText,
38
+ onGreet,
39
+ onSettingsClick,
40
+ displayMode = 'voice-only',
41
+ onDisplayModeChange,
42
+ inputLanguage = 'en-US',
43
+ onInterrupt,
44
+ voiceModeActive = false
45
+ }: VoiceOrbOverlayProps) {
46
+ const [transcript, setTranscript] = useState('');
47
+ const [hasGreeted, setHasGreeted] = useState(false);
48
+
49
+ const recognitionRef = useRef<any>(null);
50
+ const silenceTimeoutRef = useRef<NodeJS.Timeout | null>(null);
51
+ const isListeningRef = useRef(false);
52
+
53
+ // Keep ref in sync
54
+ useEffect(() => {
55
+ isListeningRef.current = llmState === 'listening';
56
+ }, [llmState]);
57
+
58
+ // Handle sending message
59
+ const handleSend = useCallback((text: string) => {
60
+ if (!text.trim()) return;
61
+
62
+ if (recognitionRef.current) {
63
+ try {
64
+ recognitionRef.current.stop();
65
+ } catch (e) { }
66
+ }
67
+ setTranscript('');
68
+ onSendMessage(text.trim());
69
+ }, [onSendMessage]);
70
+
71
+ // Initialize Speech Recognition
72
+ useEffect(() => {
73
+ if (typeof window === 'undefined') return;
74
+
75
+ const SpeechRecognition = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
76
+ if (!SpeechRecognition) {
77
+ console.error('Speech Recognition not supported');
78
+ return;
79
+ }
80
+
81
+ const recognition = new SpeechRecognition();
82
+ recognition.continuous = false;
83
+ recognition.interimResults = true;
84
+ recognition.lang = inputLanguage; // Use configured input language
85
+
86
+ let currentTranscript = '';
87
+
88
+ recognition.onresult = (event: any) => {
89
+ let finalTranscript = '';
90
+ let interimTranscript = '';
91
+
92
+ for (let i = event.resultIndex; i < event.results.length; i++) {
93
+ const text = event.results[i][0].transcript;
94
+ if (event.results[i].isFinal) {
95
+ finalTranscript += text;
96
+ } else {
97
+ interimTranscript += text;
98
+ }
99
+ }
100
+
101
+ // Talk-to-interrupt: if user speaks during TTS, stop it immediately
102
+ if ((finalTranscript || interimTranscript) && llmState === 'speaking' && onInterrupt) {
103
+ console.log('🛑 User interrupted TTS');
104
+ onInterrupt();
105
+ }
106
+
107
+ currentTranscript = finalTranscript || interimTranscript;
108
+ setTranscript(currentTranscript);
109
+
110
+ if (finalTranscript.trim()) {
111
+ if (silenceTimeoutRef.current) {
112
+ clearTimeout(silenceTimeoutRef.current);
113
+ }
114
+
115
+ silenceTimeoutRef.current = setTimeout(() => {
116
+ if (currentTranscript.trim() && llmState === 'listening') {
117
+ handleSend(currentTranscript.trim());
118
+ currentTranscript = '';
119
+ }
120
+ }, 800);
121
+ }
122
+ };
123
+
124
+ recognition.onerror = (event: any) => {
125
+ console.error('Speech recognition error:', event.error);
126
+ };
127
+
128
+ recognition.onend = () => {
129
+ setTimeout(() => {
130
+ if (isListeningRef.current && llmState === 'listening') {
131
+ try {
132
+ recognition.start();
133
+ } catch (e) { }
134
+ }
135
+ }, 100);
136
+ };
137
+
138
+ recognitionRef.current = recognition;
139
+
140
+ return () => {
141
+ if (silenceTimeoutRef.current) {
142
+ clearTimeout(silenceTimeoutRef.current);
143
+ }
144
+ try {
145
+ recognition.stop();
146
+ } catch (e) { }
147
+ };
148
+ }, [handleSend, llmState]);
149
+
150
+ // Start listening
151
+ const startListening = useCallback(() => {
152
+ if (recognitionRef.current && llmState !== 'speaking' && llmState !== 'thinking') {
153
+ try {
154
+ recognitionRef.current.start();
155
+ } catch (e) {
156
+ console.error('Failed to start speech recognition:', e);
157
+ }
158
+ }
159
+ }, [llmState]);
160
+
161
+ // Stop listening
162
+ const stopListening = useCallback(() => {
163
+ if (recognitionRef.current) {
164
+ try {
165
+ recognitionRef.current.stop();
166
+ } catch (e) { }
167
+ }
168
+ if (silenceTimeoutRef.current) {
169
+ clearTimeout(silenceTimeoutRef.current);
170
+ }
171
+ }, []);
172
+
173
+ // Greet on open
174
+ useEffect(() => {
175
+ if (isOpen && !hasGreeted && onGreet) {
176
+ setHasGreeted(true);
177
+ onGreet();
178
+ }
179
+ if (!isOpen && !voiceModeActive) {
180
+ setHasGreeted(false);
181
+ setTranscript('');
182
+ }
183
+ }, [isOpen, hasGreeted, onGreet, voiceModeActive]);
184
+
185
+ // Start listening after greeting or when state becomes listening
186
+ // Also listen when voiceModeActive is true (voice+chat mode)
187
+ useEffect(() => {
188
+ const shouldListen = (isOpen || voiceModeActive) && llmState === 'listening';
189
+ if (shouldListen) {
190
+ startListening();
191
+ } else if (!isOpen && !voiceModeActive) {
192
+ stopListening();
193
+ }
194
+ }, [isOpen, voiceModeActive, llmState, startListening, stopListening]);
195
+
196
+ // Cleanup on close - only if voice mode is completely off
197
+ useEffect(() => {
198
+ if (!isOpen && !voiceModeActive) {
199
+ stopListening();
200
+ }
201
+ }, [isOpen, voiceModeActive, stopListening]);
202
+
203
+ // Handle mic click
204
+ const handleMicClick = () => {
205
+ if (llmState === 'listening' && transcript.trim()) {
206
+ handleSend(transcript.trim());
207
+ }
208
+ };
209
+
210
+ // Handle close
211
+ const handleClose = () => {
212
+ stopListening();
213
+ setHasGreeted(false);
214
+ onClose();
215
+ };
216
+
217
+ if (!isOpen) return null;
218
+
219
+ // Status text based on state
220
+ const getStatusText = () => {
221
+ switch (llmState) {
222
+ case 'speaking':
223
+ return spokenText || 'Speaking...';
224
+ case 'thinking':
225
+ return 'Processing your request...';
226
+ case 'listening':
227
+ return transcript || 'Listening...';
228
+ default:
229
+ return 'Ready';
230
+ }
231
+ };
232
+
233
+ return (
234
+ <div className="fixed inset-0 z-50 flex items-center justify-center bg-[#0a0a0a]/98 backdrop-blur-xl" style={{ left: 'var(--sidebar-width, 15rem)' }}>
235
+ {/* Main Container - Properly Centered */}
236
+ <div className="flex flex-col items-center justify-center gap-10 w-full max-w-lg px-6 h-full">
237
+
238
+ {/* Professional Orb */}
239
+ <div className="relative flex items-center justify-center">
240
+ {/* Ambient glow */}
241
+ <div
242
+ className={`absolute w-64 h-64 rounded-full transition-all duration-700 ${llmState === 'speaking'
243
+ ? 'bg-gradient-to-br from-blue-500/30 via-violet-500/20 to-cyan-500/30 scale-110 blur-3xl animate-pulse'
244
+ : llmState === 'thinking'
245
+ ? 'bg-gradient-to-br from-amber-500/20 via-orange-500/15 to-yellow-500/20 scale-100 blur-3xl animate-spin-slow'
246
+ : llmState === 'listening'
247
+ ? 'bg-gradient-to-br from-blue-500/25 via-cyan-500/20 to-blue-600/25 scale-105 blur-3xl animate-pulse'
248
+ : 'bg-gradient-to-br from-slate-500/15 via-slate-600/10 to-slate-500/15 scale-100 blur-3xl'
249
+ }`}
250
+ />
251
+
252
+ {/* Orb container */}
253
+ <div
254
+ className={`relative w-44 h-44 rounded-full transition-transform duration-500 ${llmState === 'speaking' ? 'scale-110'
255
+ : llmState === 'thinking' ? 'scale-95'
256
+ : llmState === 'listening' ? 'scale-105'
257
+ : 'scale-100'
258
+ }`}
259
+ >
260
+ {/* Rotating gradient ring */}
261
+ <div
262
+ className={`absolute inset-0 rounded-full ${llmState === 'thinking' ? 'animate-spin-slow' : ''
263
+ }`}
264
+ style={{
265
+ background: llmState === 'thinking'
266
+ ? 'conic-gradient(from 0deg, #f59e0b, #f97316, #ef4444, #f59e0b)'
267
+ : llmState === 'speaking'
268
+ ? 'conic-gradient(from 0deg, #3b82f6, #8b5cf6, #06b6d4, #3b82f6)'
269
+ : llmState === 'listening'
270
+ ? 'conic-gradient(from 0deg, #3b82f6, #60a5fa, #3b82f6)'
271
+ : 'conic-gradient(from 0deg, #475569, #64748b, #475569)',
272
+ padding: '3px',
273
+ borderRadius: '50%'
274
+ }}
275
+ >
276
+ {/* Inner orb */}
277
+ <div
278
+ className="w-full h-full rounded-full bg-[#0a0a0a] flex items-center justify-center"
279
+ style={{
280
+ boxShadow: llmState === 'speaking'
281
+ ? '0 0 60px 10px rgba(59, 130, 246, 0.3), inset 0 0 30px rgba(139, 92, 246, 0.2)'
282
+ : llmState === 'thinking'
283
+ ? '0 0 40px 5px rgba(245, 158, 11, 0.2), inset 0 0 20px rgba(249, 115, 22, 0.1)'
284
+ : llmState === 'listening'
285
+ ? '0 0 50px 8px rgba(59, 130, 246, 0.25), inset 0 0 25px rgba(96, 165, 250, 0.15)'
286
+ : '0 0 30px 5px rgba(71, 85, 105, 0.15)'
287
+ }}
288
+ >
289
+ {/* Center gradient */}
290
+ <div
291
+ className={`w-32 h-32 rounded-full transition-all duration-500 ${llmState === 'speaking' ? 'animate-pulse-fast'
292
+ : llmState === 'listening' ? 'animate-pulse'
293
+ : ''
294
+ }`}
295
+ style={{
296
+ background: llmState === 'thinking'
297
+ ? 'radial-gradient(circle, #f59e0b 0%, #0a0a0a 70%)'
298
+ : llmState === 'speaking'
299
+ ? 'radial-gradient(circle, #8b5cf6 0%, #3b82f6 40%, #0a0a0a 70%)'
300
+ : llmState === 'listening'
301
+ ? 'radial-gradient(circle, #60a5fa 0%, #3b82f6 40%, #0a0a0a 70%)'
302
+ : 'radial-gradient(circle, #64748b 0%, #0a0a0a 60%)'
303
+ }}
304
+ />
305
+ </div>
306
+ </div>
307
+ </div>
308
+ </div>
309
+
310
+ {/* Status Text */}
311
+ <div className="text-center max-w-md min-h-[80px] flex items-center justify-center">
312
+ <p
313
+ className={`text-lg font-light leading-relaxed transition-all duration-300 ${llmState === 'speaking'
314
+ ? 'text-white/90'
315
+ : llmState === 'thinking'
316
+ ? 'text-amber-400/80 animate-pulse'
317
+ : llmState === 'listening' && transcript
318
+ ? 'text-white/80'
319
+ : 'text-white/50'
320
+ }`}
321
+ >
322
+ {getStatusText()}
323
+ </p>
324
+ </div>
325
+
326
+ {/* Control Bar */}
327
+ <div className="flex items-center gap-3">
328
+ {/* Settings */}
329
+ {onSettingsClick && (
330
+ <button
331
+ onClick={onSettingsClick}
332
+ className="w-12 h-12 rounded-full bg-white/5 hover:bg-white/10 border border-white/10 flex items-center justify-center transition-all"
333
+ title="Voice Settings"
334
+ >
335
+ <Cog6ToothIcon className="w-5 h-5 text-white/60" />
336
+ </button>
337
+ )}
338
+
339
+ {/* Main mic button */}
340
+ <button
341
+ onClick={handleMicClick}
342
+ disabled={llmState === 'thinking' || llmState === 'speaking'}
343
+ className={`w-16 h-16 rounded-full flex items-center justify-center transition-all duration-300 ${llmState === 'listening'
344
+ ? 'bg-blue-500 text-white shadow-lg shadow-blue-500/30 scale-110'
345
+ : llmState === 'thinking'
346
+ ? 'bg-amber-500/20 text-amber-400 cursor-wait'
347
+ : llmState === 'speaking'
348
+ ? 'bg-violet-500/20 text-violet-400 cursor-not-allowed'
349
+ : 'bg-white/10 text-white/60 hover:bg-white/20'
350
+ }`}
351
+ >
352
+ {llmState === 'speaking' ? (
353
+ <SpeakerWaveIcon className="w-7 h-7 animate-pulse" />
354
+ ) : (
355
+ <MicrophoneIcon className="w-7 h-7" />
356
+ )}
357
+ </button>
358
+
359
+ {/* Display mode toggle */}
360
+ {onDisplayModeChange && (
361
+ <button
362
+ onClick={() => onDisplayModeChange(displayMode === 'voice-only' ? 'voice-chat' : 'voice-only')}
363
+ className={`w-12 h-12 rounded-full border border-white/10 flex items-center justify-center transition-all ${displayMode === 'voice-chat'
364
+ ? 'bg-blue-500/20 text-blue-400'
365
+ : 'bg-white/5 hover:bg-white/10 text-white/60'
366
+ }`}
367
+ title={displayMode === 'voice-only' ? 'Show Chat' : 'Voice Only'}
368
+ >
369
+ <ChatBubbleLeftRightIcon className="w-5 h-5" />
370
+ </button>
371
+ )}
372
+
373
+ {/* Close */}
374
+ <button
375
+ onClick={handleClose}
376
+ className="w-12 h-12 rounded-full bg-white/5 hover:bg-red-500/20 border border-white/10 hover:border-red-500/30 flex items-center justify-center transition-all group"
377
+ title="End Voice Mode"
378
+ >
379
+ <XMarkIcon className="w-5 h-5 text-white/60 group-hover:text-red-400" />
380
+ </button>
381
+ </div>
382
+
383
+ {/* State Indicator Pills */}
384
+ <div className="flex items-center gap-2 text-xs">
385
+ <div className={`px-3 py-1 rounded-full border transition-all ${llmState === 'listening'
386
+ ? 'bg-blue-500/20 border-blue-500/30 text-blue-400'
387
+ : 'bg-white/5 border-white/10 text-white/30'
388
+ }`}>
389
+ Listening
390
+ </div>
391
+ <div className={`px-3 py-1 rounded-full border transition-all ${llmState === 'thinking'
392
+ ? 'bg-amber-500/20 border-amber-500/30 text-amber-400'
393
+ : 'bg-white/5 border-white/10 text-white/30'
394
+ }`}>
395
+ Processing
396
+ </div>
397
+ <div className={`px-3 py-1 rounded-full border transition-all ${llmState === 'speaking'
398
+ ? 'bg-violet-500/20 border-violet-500/30 text-violet-400'
399
+ : 'bg-white/5 border-white/10 text-white/30'
400
+ }`}>
401
+ Speaking
402
+ </div>
403
+ </div>
404
+ </div>
405
+
406
+ {/* Custom CSS animations */}
407
+ <style jsx>{`
408
+ @keyframes spin-slow {
409
+ from { transform: rotate(0deg); }
410
+ to { transform: rotate(360deg); }
411
+ }
412
+ @keyframes pulse-fast {
413
+ 0%, 100% { opacity: 1; transform: scale(1); }
414
+ 50% { opacity: 0.8; transform: scale(1.05); }
415
+ }
416
+ .animate-spin-slow {
417
+ animation: spin-slow 3s linear infinite;
418
+ }
419
+ .animate-pulse-fast {
420
+ animation: pulse-fast 0.8s ease-in-out infinite;
421
+ }
422
+ `}</style>
423
+ </div>
424
+ );
425
+ }
426
+
427
+ // Header Voice Badge for Voice+Chat mode - shows orb + state text
428
+ export function MiniVoiceOrb({
429
+ llmState,
430
+ onClick
431
+ }: {
432
+ llmState: LLMState;
433
+ onClick?: () => void;
434
+ }) {
435
+ const getStateInfo = () => {
436
+ switch (llmState) {
437
+ case 'speaking':
438
+ return { text: 'Speaking', bgClass: 'bg-violet-500/10 border-violet-500/30', dotClass: 'bg-violet-500', textClass: 'text-violet-400' };
439
+ case 'thinking':
440
+ return { text: 'Processing', bgClass: 'bg-amber-500/10 border-amber-500/30', dotClass: 'bg-amber-500', textClass: 'text-amber-400' };
441
+ case 'listening':
442
+ return { text: 'Listening', bgClass: 'bg-blue-500/10 border-blue-500/30', dotClass: 'bg-blue-500', textClass: 'text-blue-400' };
443
+ default:
444
+ return { text: 'Ready', bgClass: 'bg-slate-500/10 border-slate-500/30', dotClass: 'bg-slate-500', textClass: 'text-slate-400' };
445
+ }
446
+ };
447
+
448
+ const stateInfo = getStateInfo();
449
+
450
+ return (
451
+ <button
452
+ onClick={onClick}
453
+ className={`flex items-center gap-2 px-3 py-1.5 rounded-full border transition-all hover:scale-105 ${stateInfo.bgClass}`}
454
+ title="Click to expand voice mode"
455
+ >
456
+ {/* Mini orb */}
457
+ <div className="relative">
458
+ <div className={`w-2.5 h-2.5 rounded-full ${stateInfo.dotClass} ${llmState === 'listening' || llmState === 'speaking' ? 'animate-pulse' : llmState === 'thinking' ? 'animate-spin' : ''}`} />
459
+ {(llmState === 'listening' || llmState === 'speaking') && (
460
+ <div className={`absolute inset-0 w-2.5 h-2.5 rounded-full ${stateInfo.dotClass} animate-ping opacity-75`} />
461
+ )}
462
+ </div>
463
+ {/* State text */}
464
+ <span className={`text-xs font-medium ${stateInfo.textClass}`}>
465
+ {stateInfo.text}
466
+ </span>
467
+ </button>
468
+ );
469
+ }
@@ -210,9 +210,11 @@ export function WidgetRenderer({ uri, data, className = '' }: WidgetRendererProp
210
210
  const { height } = event.data;
211
211
  console.log('📏 Received widget resize:', height);
212
212
  if (height && typeof height === 'number') {
213
- const newHeight = Math.min(height, 400);
213
+ const isExpanded = className?.includes('widget-expanded');
214
+ const max = isExpanded ? 800 : 400;
215
+ const newHeight = Math.min(height, max);
214
216
  console.log('📏 Setting content height to:', newHeight);
215
- setContentHeight(newHeight); // Cap at 400px max
217
+ setContentHeight(newHeight); // Cap at max height
216
218
  }
217
219
  }
218
220
  };
@@ -323,6 +325,9 @@ export function WidgetRenderer({ uri, data, className = '' }: WidgetRendererProp
323
325
  }, [uri, data, isDevMode]);
324
326
 
325
327
  const isInChat = className?.includes('widget-in-chat');
328
+ const isExpanded = className?.includes('widget-expanded');
329
+ const maxHeight = isExpanded ? 800 : 400;
330
+
326
331
  const finalHeight = isInChat ? `${contentHeight}px` : '100%';
327
332
 
328
333
  return (
@@ -333,7 +338,7 @@ export function WidgetRenderer({ uri, data, className = '' }: WidgetRendererProp
333
338
  style={{
334
339
  width: '100%',
335
340
  height: finalHeight,
336
- maxHeight: isInChat ? '400px' : '100%',
341
+ maxHeight: isInChat ? `${maxHeight}px` : '100%',
337
342
  border: 'none',
338
343
  background: 'transparent',
339
344
  overflow: 'hidden',