@mcp-b/embedded-agent 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1153 @@
1
+ import { AssistantRuntime, ThreadMessageLike, ToolCallMessagePartComponent } from "@assistant-ui/react";
2
+ import { UIDataTypes, UIMessage, UITools } from "ai";
3
+ import * as react0 from "react";
4
+ import { ComponentPropsWithRef, FC, HTMLAttributes, ReactNode } from "react";
5
+ import { ClassValue } from "clsx";
6
+ import * as react_jsx_runtime5 from "react/jsx-runtime";
7
+ import * as AvatarPrimitive from "@radix-ui/react-avatar";
8
+ import * as DialogPrimitive from "@radix-ui/react-dialog";
9
+ import * as TooltipPrimitive from "@radix-ui/react-tooltip";
10
+ import { VariantProps } from "class-variance-authority";
11
+ import { Client } from "@modelcontextprotocol/sdk/client/index.js";
12
+ import { Tool } from "@modelcontextprotocol/sdk/types.js";
13
+ import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area";
14
+ import * as SeparatorPrimitive from "@radix-ui/react-separator";
15
+ import * as class_variance_authority_types0 from "class-variance-authority/types";
16
+ import { CallToolResult, GetPromptResult, MCPConnectionState, MCPSource, MCPSourceConfig, Prompt, ReadResourceResult, Resource, ResourceTemplate, SamplingHandler, Tool as Tool$1, ToolWithSource } from "@mcp-b/cloud-mirror-types";
17
+ import { AgentChatState, PendingTool } from "@mcp-b/agents";
18
+ import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
19
+
20
+ //#region src/web-component.d.ts
21
+
22
+ /**
23
+ * Props for the EmbeddedAgent React component
24
+ *
25
+ * These props map to HTML attributes in kebab-case when used as a web component:
26
+ * - appId -> app-id
27
+ * - apiBase -> api-base
28
+ * - tokenEndpoint -> token-endpoint
29
+ * - autoConnectLocal -> auto-connect-local
30
+ */
31
+ interface EmbeddedAgentProps {
32
+ /** Your app/workspace ID */
33
+ appId: string;
34
+ /** API base URL for your WebMCP worker */
35
+ apiBase?: string;
36
+ /** Voice mode token endpoint (defaults to {apiBase}/api/realtime/session) */
37
+ tokenEndpoint?: string;
38
+ /** Auto-connect to local MCP source (default: true) */
39
+ autoConnectLocal?: boolean;
40
+ /** Callback when tools are updated */
41
+ onToolsChange?: (tools: unknown[]) => void;
42
+ /** Callback when voice mode errors */
43
+ onVoiceError?: (error: string) => void;
44
+ /** Callback when voice mode connects */
45
+ onVoiceConnect?: () => void;
46
+ /** Callback when voice mode disconnects */
47
+ onVoiceDisconnect?: (duration: number) => void;
48
+ }
49
+ /**
50
+ * EmbeddedAgent React Component
51
+ *
52
+ * A fully self-contained chat widget with MCP tool support and optional voice mode.
53
+ * Can be used as a React component or converted to a web component.
54
+ */
55
+ declare const EmbeddedAgent: FC<EmbeddedAgentProps>;
56
+ /**
57
+ * Web Component Definition
58
+ *
59
+ * Converts EmbeddedAgent to a custom element that can be used in any HTML page.
60
+ * Attributes are automatically converted from kebab-case to camelCase props.
61
+ */
62
+ declare const WebMCPAgentElement: CustomElementConstructor;
63
+ /**
64
+ * Register the custom element
65
+ *
66
+ * Call this function to register the <webmcp-agent> custom element.
67
+ * This is automatically called when importing the web-component entry point.
68
+ * Styles are automatically injected into the document head.
69
+ */
70
+ declare function registerWebMCPAgent(tagName?: string): void;
71
+ //#endregion
72
+ //#region src/components/assistant-modal.d.ts
73
+ declare const AssistantModal: FC;
74
+ //#endregion
75
+ //#region src/components/thread-with-voice.d.ts
76
+ declare const ThreadWithVoice: FC;
77
+ //#endregion
78
+ //#region src/components/Thread.d.ts
79
+ declare const Thread: FC;
80
+ //#endregion
81
+ //#region src/components/AssistantMessage.d.ts
82
+ declare const AssistantMessage: FC;
83
+ //#endregion
84
+ //#region src/components/UserMessage.d.ts
85
+ declare const UserMessage: FC;
86
+ //#endregion
87
+ //#region src/components/Composer.d.ts
88
+ declare const Composer: FC;
89
+ //#endregion
90
+ //#region src/components/markdown-text.d.ts
91
+ declare const MarkdownText: react0.MemoExoticComponent<() => react_jsx_runtime5.JSX.Element>;
92
+ //#endregion
93
+ //#region src/components/tool-fallback.d.ts
94
+ declare const ToolFallback: ToolCallMessagePartComponent;
95
+ //#endregion
96
+ //#region src/components/attachment.d.ts
97
+ declare const UserMessageAttachments: FC;
98
+ declare const ComposerAttachments: FC;
99
+ declare const ComposerAddAttachment: FC;
100
+ //#endregion
101
+ //#region src/services/realtime/types.d.ts
102
+ /**
103
+ * Types for OpenAI Realtime API integration
104
+ */
105
+ interface RealtimeConfig {
106
+ model?: string;
107
+ voice?: string;
108
+ apiUrl?: string;
109
+ tokenEndpoint?: string;
110
+ }
111
+ interface RealtimeSession {
112
+ pc: RTCPeerConnection;
113
+ dataChannel: RTCDataChannel;
114
+ audioElement: HTMLAudioElement;
115
+ localStream: MediaStream | null;
116
+ remoteStream: MediaStream | null;
117
+ cleanup: () => void;
118
+ }
119
+ type EventCallback = (data: unknown) => void;
120
+ interface AudioLevelData {
121
+ micLevel: number;
122
+ micFrequency: number[];
123
+ speakerLevel: number;
124
+ speakerFrequency: number[];
125
+ }
126
+ interface TranscriptData {
127
+ type: 'user' | 'assistant';
128
+ text: string;
129
+ isDone: boolean;
130
+ }
131
+ interface ToolCallData {
132
+ status: 'started' | 'completed';
133
+ toolName: string;
134
+ error?: string;
135
+ }
136
+ interface VoiceModeState {
137
+ isActive: boolean;
138
+ isConnecting: boolean;
139
+ isMuted: boolean;
140
+ isError: boolean;
141
+ connectionState: string;
142
+ audioLevel?: AudioLevelData;
143
+ transcript?: TranscriptData;
144
+ toolCall?: ToolCallData;
145
+ error?: string;
146
+ }
147
+ /**
148
+ * Session state event data from the realtime service
149
+ */
150
+ interface SessionStateEventData {
151
+ state: 'connecting' | 'connected' | 'disconnected' | 'error';
152
+ isActive: boolean;
153
+ isMuted: boolean;
154
+ durationSeconds?: number;
155
+ }
156
+ /**
157
+ * User transcript event data
158
+ */
159
+ interface UserTranscriptEventData {
160
+ text: string;
161
+ }
162
+ /**
163
+ * Assistant transcript streaming event data
164
+ */
165
+ interface AssistantTranscriptEventData {
166
+ delta?: string;
167
+ transcript?: string;
168
+ }
169
+ /**
170
+ * Tool call started event data
171
+ */
172
+ interface ToolCallStartedEventData {
173
+ name: string;
174
+ }
175
+ /**
176
+ * Tool call completed event data
177
+ */
178
+ interface ToolCallCompletedEventData {
179
+ name: string;
180
+ error?: string;
181
+ }
182
+ /**
183
+ * Error event data
184
+ */
185
+ interface ErrorEventData {
186
+ error?: string;
187
+ message?: string;
188
+ type?: string;
189
+ }
190
+ /**
191
+ * Type guard for SessionStateEventData
192
+ */
193
+ declare function isSessionStateEventData(data: unknown): data is SessionStateEventData;
194
+ /**
195
+ * Type guard for UserTranscriptEventData
196
+ */
197
+ declare function isUserTranscriptEventData(data: unknown): data is UserTranscriptEventData;
198
+ /**
199
+ * Type guard for AssistantTranscriptEventData
200
+ */
201
+ declare function isAssistantTranscriptEventData(data: unknown): data is AssistantTranscriptEventData;
202
+ /**
203
+ * Type guard for ToolCallStartedEventData
204
+ */
205
+ declare function isToolCallStartedEventData(data: unknown): data is ToolCallStartedEventData;
206
+ /**
207
+ * Type guard for ToolCallCompletedEventData
208
+ */
209
+ declare function isToolCallCompletedEventData(data: unknown): data is ToolCallCompletedEventData;
210
+ /**
211
+ * Type guard for ErrorEventData
212
+ */
213
+ declare function isErrorEventData(data: unknown): data is ErrorEventData;
214
+ /**
215
+ * Type guard for AudioLevelData
216
+ */
217
+ declare function isAudioLevelData(data: unknown): data is AudioLevelData;
218
+ //#endregion
219
+ //#region src/services/realtime/tool-manager.d.ts
220
+ interface RegisteredTool {
221
+ name: string;
222
+ description: string;
223
+ inputSchema?: Record<string, unknown>;
224
+ }
225
+ type ToolExecutor = (name: string, args: Record<string, unknown>) => Promise<unknown>;
226
+ //#endregion
227
+ //#region src/services/realtime/openai-realtime-service.d.ts
228
+ /**
229
+ * OpenAI Realtime API Service
230
+ *
231
+ * Manages voice conversations with OpenAI's Realtime API,
232
+ * including WebRTC connections, tool integration, and message handling.
233
+ */
234
+ declare class OpenAIRealtimeService {
235
+ private session;
236
+ private webrtcManager;
237
+ private toolManager;
238
+ private messageHandler;
239
+ private eventEmitter;
240
+ private muted;
241
+ private lastState;
242
+ private localAnalyzer;
243
+ private remoteAnalyzer;
244
+ private visualizationInterval;
245
+ private sessionStartTime;
246
+ private onToolsChangedCallback;
247
+ constructor(tokenEndpoint: string);
248
+ /**
249
+ * Set the tools available for the voice session
250
+ */
251
+ setTools(tools: RegisteredTool[]): void;
252
+ /**
253
+ * Set the tool executor function
254
+ */
255
+ setToolExecutor(executor: ToolExecutor): void;
256
+ /**
257
+ * Register callback for when tools change
258
+ */
259
+ onToolsChanged(callback: () => void): void;
260
+ /**
261
+ * Start a new realtime session
262
+ */
263
+ startSession(config?: RealtimeConfig): Promise<RealtimeSession>;
264
+ /**
265
+ * Stop the current session
266
+ */
267
+ stopSession(): void;
268
+ /**
269
+ * Send a text message to the assistant
270
+ */
271
+ sendUserMessage(text: string): void;
272
+ /**
273
+ * Check if session is active
274
+ */
275
+ isSessionActive(): boolean;
276
+ /**
277
+ * Get the local audio stream
278
+ */
279
+ getLocalStream(): MediaStream | null;
280
+ /**
281
+ * Get the remote audio stream
282
+ */
283
+ getRemoteStream(): MediaStream | null;
284
+ /**
285
+ * Toggle audio mute
286
+ */
287
+ toggleMute(muted: boolean): void;
288
+ /**
289
+ * Event handling
290
+ */
291
+ on(event: string, callback: EventCallback): void;
292
+ off(event: string, callback: EventCallback): void;
293
+ getSessionStatus(): {
294
+ state: 'connecting' | 'connected' | 'disconnected' | 'error';
295
+ isActive: boolean;
296
+ isMuted: boolean;
297
+ };
298
+ /**
299
+ * Private methods
300
+ */
301
+ private setupDataChannel;
302
+ private setupPeerConnectionMonitoring;
303
+ private configureSession;
304
+ private updateSessionTools;
305
+ private getSessionInstructions;
306
+ private handleSessionError;
307
+ private initializeLocalAnalyzer;
308
+ private initializeRemoteAnalyzer;
309
+ private startAudioVisualization;
310
+ private emitSessionState;
311
+ }
312
+ //#endregion
313
+ //#region src/components/voice-indicator.d.ts
314
+ interface VoiceIndicatorProps {
315
+ isActive: boolean;
316
+ isConnecting: boolean;
317
+ isMuted: boolean;
318
+ audioLevel?: AudioLevelData;
319
+ toolCall?: ToolCallData;
320
+ onStart: () => void;
321
+ onStop: () => void;
322
+ onToggleMute: () => void;
323
+ className?: string;
324
+ }
325
+ declare function VoiceIndicator({
326
+ isActive,
327
+ isConnecting,
328
+ isMuted,
329
+ audioLevel,
330
+ toolCall,
331
+ onStart,
332
+ onStop,
333
+ onToggleMute,
334
+ className
335
+ }: VoiceIndicatorProps): react_jsx_runtime5.JSX.Element;
336
+ //#endregion
337
+ //#region src/components/live-waveform.d.ts
338
+ type LiveWaveformProps = HTMLAttributes<HTMLDivElement> & {
339
+ active?: boolean;
340
+ processing?: boolean;
341
+ barWidth?: number;
342
+ barGap?: number;
343
+ barRadius?: number;
344
+ barColor?: string;
345
+ fadeEdges?: boolean;
346
+ fadeWidth?: number;
347
+ height?: string | number;
348
+ sensitivity?: number;
349
+ smoothingTimeConstant?: number;
350
+ mode?: 'scrolling' | 'static';
351
+ /** Manual audio level (0-1) instead of using microphone */
352
+ manualAudioLevel?: number;
353
+ };
354
+ declare const LiveWaveform: ({
355
+ active,
356
+ processing,
357
+ barWidth,
358
+ barGap,
359
+ barRadius,
360
+ barColor,
361
+ fadeEdges,
362
+ fadeWidth,
363
+ height,
364
+ sensitivity,
365
+ mode,
366
+ manualAudioLevel,
367
+ className,
368
+ ...props
369
+ }: LiveWaveformProps) => react_jsx_runtime5.JSX.Element;
370
+ //#endregion
371
+ //#region src/components/realtime-tool-card.d.ts
372
+ interface RealtimeToolCardProps {
373
+ toolCall: ToolCallData;
374
+ className?: string;
375
+ }
376
+ /**
377
+ * Displays a tool call status card for voice mode.
378
+ * Shows running, success, or error state with the tool name.
379
+ */
380
+ declare function RealtimeToolCard({
381
+ toolCall,
382
+ className
383
+ }: RealtimeToolCardProps): react_jsx_runtime5.JSX.Element;
384
+ //#endregion
385
+ //#region src/components/MCPToolRegistry.d.ts
386
+ /**
387
+ * Tool registry that registers all MCP tools with assistant-ui.
388
+ * Routes tool calls automatically based on _sourceId.
389
+ */
390
+ declare const MCPToolRegistry: FC;
391
+ //#endregion
392
+ //#region src/components/RemoteMCPSettings.d.ts
393
+ declare const RemoteMCPSettings: FC;
394
+ //#endregion
395
+ //#region src/components/button.d.ts
396
+ interface ButtonProps extends react0.ComponentProps<'button'>, VariantProps<typeof buttonVariants> {
397
+ asChild?: boolean;
398
+ }
399
+ declare const buttonVariants: (props?: ({
400
+ variant?: "default" | "destructive" | "outline" | "secondary" | "ghost" | "link" | null | undefined;
401
+ size?: "default" | "sm" | "lg" | "icon" | "icon-sm" | "icon-lg" | null | undefined;
402
+ } & class_variance_authority_types0.ClassProp) | undefined) => string;
403
+ declare function Button({
404
+ className,
405
+ variant,
406
+ size,
407
+ asChild,
408
+ ...props
409
+ }: react0.ComponentProps<'button'> & VariantProps<typeof buttonVariants> & {
410
+ asChild?: boolean;
411
+ }): react_jsx_runtime5.JSX.Element;
412
+ //#endregion
413
+ //#region src/components/avatar.d.ts
414
+ declare function Avatar({
415
+ className,
416
+ ...props
417
+ }: react0.ComponentProps<typeof AvatarPrimitive.Root>): react_jsx_runtime5.JSX.Element;
418
+ declare function AvatarImage({
419
+ className,
420
+ ...props
421
+ }: react0.ComponentProps<typeof AvatarPrimitive.Image>): react_jsx_runtime5.JSX.Element;
422
+ declare function AvatarFallback({
423
+ className,
424
+ ...props
425
+ }: react0.ComponentProps<typeof AvatarPrimitive.Fallback>): react_jsx_runtime5.JSX.Element;
426
+ //#endregion
427
+ //#region src/components/badge.d.ts
428
+ declare const badgeVariants: (props?: ({
429
+ variant?: "default" | "destructive" | "outline" | "secondary" | null | undefined;
430
+ } & class_variance_authority_types0.ClassProp) | undefined) => string;
431
+ interface BadgeProps extends react0.HTMLAttributes<HTMLDivElement>, VariantProps<typeof badgeVariants> {}
432
+ declare function Badge({
433
+ className,
434
+ variant,
435
+ ...props
436
+ }: BadgeProps): react_jsx_runtime5.JSX.Element;
437
+ //#endregion
438
+ //#region src/components/dialog.d.ts
439
+ declare function Dialog({
440
+ ...props
441
+ }: react0.ComponentProps<typeof DialogPrimitive.Root>): react_jsx_runtime5.JSX.Element;
442
+ declare function DialogTrigger({
443
+ ...props
444
+ }: react0.ComponentProps<typeof DialogPrimitive.Trigger>): react_jsx_runtime5.JSX.Element;
445
+ declare function DialogPortal({
446
+ ...props
447
+ }: react0.ComponentProps<typeof DialogPrimitive.Portal>): react_jsx_runtime5.JSX.Element;
448
+ declare function DialogClose({
449
+ ...props
450
+ }: react0.ComponentProps<typeof DialogPrimitive.Close>): react_jsx_runtime5.JSX.Element;
451
+ declare function DialogOverlay({
452
+ className,
453
+ ...props
454
+ }: react0.ComponentProps<typeof DialogPrimitive.Overlay>): react_jsx_runtime5.JSX.Element;
455
+ declare function DialogContent({
456
+ className,
457
+ children,
458
+ showCloseButton,
459
+ ...props
460
+ }: react0.ComponentProps<typeof DialogPrimitive.Content> & {
461
+ showCloseButton?: boolean;
462
+ }): react_jsx_runtime5.JSX.Element;
463
+ declare function DialogHeader({
464
+ className,
465
+ ...props
466
+ }: react0.ComponentProps<'div'>): react_jsx_runtime5.JSX.Element;
467
+ declare function DialogFooter({
468
+ className,
469
+ ...props
470
+ }: react0.ComponentProps<'div'>): react_jsx_runtime5.JSX.Element;
471
+ declare function DialogTitle({
472
+ className,
473
+ ...props
474
+ }: react0.ComponentProps<typeof DialogPrimitive.Title>): react_jsx_runtime5.JSX.Element;
475
+ declare function DialogDescription({
476
+ className,
477
+ ...props
478
+ }: react0.ComponentProps<typeof DialogPrimitive.Description>): react_jsx_runtime5.JSX.Element;
479
+ //#endregion
480
+ //#region src/components/scroll-area.d.ts
481
+ declare function ScrollArea({
482
+ className,
483
+ children,
484
+ ...props
485
+ }: react0.ComponentProps<typeof ScrollAreaPrimitive.Root>): react_jsx_runtime5.JSX.Element;
486
+ declare function ScrollBar({
487
+ className,
488
+ orientation,
489
+ ...props
490
+ }: react0.ComponentProps<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>): react_jsx_runtime5.JSX.Element;
491
+ //#endregion
492
+ //#region src/components/separator.d.ts
493
+ declare function Separator({
494
+ className,
495
+ orientation,
496
+ decorative,
497
+ ...props
498
+ }: react0.ComponentProps<typeof SeparatorPrimitive.Root>): react_jsx_runtime5.JSX.Element;
499
+ //#endregion
500
+ //#region src/components/tooltip.d.ts
501
+ declare function TooltipProvider({
502
+ delayDuration,
503
+ ...props
504
+ }: react0.ComponentProps<typeof TooltipPrimitive.Provider>): react_jsx_runtime5.JSX.Element;
505
+ declare function Tooltip({
506
+ ...props
507
+ }: react0.ComponentProps<typeof TooltipPrimitive.Root>): react_jsx_runtime5.JSX.Element;
508
+ declare function TooltipTrigger({
509
+ ...props
510
+ }: react0.ComponentProps<typeof TooltipPrimitive.Trigger>): react_jsx_runtime5.JSX.Element;
511
+ declare function TooltipContent({
512
+ className,
513
+ sideOffset,
514
+ children,
515
+ ...props
516
+ }: react0.ComponentProps<typeof TooltipPrimitive.Content>): react_jsx_runtime5.JSX.Element;
517
+ //#endregion
518
+ //#region src/components/tooltip-icon-button.d.ts
519
+ type TooltipIconButtonProps = ComponentPropsWithRef<typeof Button> & {
520
+ tooltip: string;
521
+ side?: 'top' | 'bottom' | 'left' | 'right';
522
+ };
523
+ declare const TooltipIconButton: react0.ForwardRefExoticComponent<Omit<TooltipIconButtonProps, "ref"> & react0.RefAttributes<HTMLButtonElement>>;
524
+ //#endregion
525
+ //#region src/providers/MCPToolsProvider.d.ts
526
+ /** Well-known source IDs */
527
+ declare const SOURCE_LOCAL = "local";
528
+ declare const SOURCE_REMOTE = "remote";
529
+ /**
530
+ * MCP Tools Context Value
531
+ */
532
+ interface MCPToolsContextValue {
533
+ /** All tools from all sources, tagged with _sourceId */
534
+ tools: ToolWithSource[];
535
+ /** All sources and their states */
536
+ sources: Map<string, MCPSource>;
537
+ /** Add a new source */
538
+ addSource: (id: string, config: MCPSourceConfig) => Promise<void>;
539
+ /** Remove a source */
540
+ removeSource: (id: string) => Promise<void>;
541
+ /** Get a specific source */
542
+ getSource: (id: string) => MCPSource | undefined;
543
+ /** Check if a source is connected */
544
+ isConnected: (id: string) => boolean;
545
+ /** Call a tool (auto-routes based on tool name lookup) */
546
+ callTool: (name: string, args: Record<string, unknown>) => Promise<CallToolResult>;
547
+ /** Call a tool on a specific source */
548
+ callToolOnSource: (sourceId: string, name: string, args: Record<string, unknown>) => Promise<CallToolResult>;
549
+ }
550
+ interface MCPToolsProviderProps {
551
+ children: ReactNode;
552
+ /** Auto-connect to same-tab source on mount (default: true) */
553
+ autoConnectLocal?: boolean;
554
+ /** Callback when tools change */
555
+ onToolsChange?: (tools: ToolWithSource[]) => void;
556
+ }
557
+ declare function MCPToolsProvider({
558
+ children,
559
+ autoConnectLocal,
560
+ onToolsChange
561
+ }: MCPToolsProviderProps): react_jsx_runtime5.JSX.Element;
562
+ /**
563
+ * Hook to access MCP Tools context
564
+ */
565
+ declare function useMCPTools(): MCPToolsContextValue;
566
+ /**
567
+ * Hook to optionally access MCP Tools context
568
+ */
569
+ declare function useOptionalMCPTools(): MCPToolsContextValue | null;
570
+ //#endregion
571
+ //#region src/hooks/useVoiceMode.d.ts
572
+ interface UseVoiceModeOptions {
573
+ /** Endpoint to get ephemeral tokens from */
574
+ tokenEndpoint: string;
575
+ /** Tools available for the voice session */
576
+ tools?: RegisteredTool[];
577
+ /** Tool executor function */
578
+ toolExecutor?: ToolExecutor;
579
+ /** Callback when session connects */
580
+ onConnect?: () => void;
581
+ /** Callback when session disconnects */
582
+ onDisconnect?: (durationSeconds: number) => void;
583
+ /** Callback when error occurs */
584
+ onError?: (error: string) => void;
585
+ /** Callback when user transcript is complete */
586
+ onUserTranscript?: (text: string) => void;
587
+ /** Callback when assistant transcript is complete */
588
+ onAssistantTranscript?: (text: string) => void;
589
+ }
590
+ interface UseVoiceModeReturn extends VoiceModeState {
591
+ /** Start voice session */
592
+ startSession: (config?: RealtimeConfig) => Promise<void>;
593
+ /** Stop voice session */
594
+ stopSession: () => void;
595
+ /** Toggle microphone mute */
596
+ toggleMute: (muted?: boolean) => void;
597
+ /** Send text message while in voice mode */
598
+ sendMessage: (text: string) => void;
599
+ }
600
+ declare function useVoiceMode(options: UseVoiceModeOptions): UseVoiceModeReturn;
601
+ //#endregion
602
+ //#region src/providers/VoiceModeProvider.d.ts
603
+ /**
604
+ * Voice Mode Provider
605
+ *
606
+ * Provides voice mode state and controls to the component tree.
607
+ */
608
+ interface VoiceModeContextValue extends UseVoiceModeReturn {
609
+ isSupported: boolean;
610
+ }
611
+ interface VoiceModeProviderProps {
612
+ children: ReactNode;
613
+ /** Backend endpoint for ephemeral tokens */
614
+ tokenEndpoint: string;
615
+ /** Tools available for voice mode */
616
+ tools?: RegisteredTool[];
617
+ /** Tool executor function */
618
+ toolExecutor?: ToolExecutor;
619
+ /** Callback when session connects */
620
+ onConnect?: () => void;
621
+ /** Callback when session disconnects */
622
+ onDisconnect?: (durationSeconds: number) => void;
623
+ /** Callback when error occurs */
624
+ onError?: (error: string) => void;
625
+ /** Callback when user transcript is complete */
626
+ onUserTranscript?: (text: string) => void;
627
+ /** Callback when assistant transcript is complete */
628
+ onAssistantTranscript?: (text: string) => void;
629
+ }
630
+ declare function VoiceModeProvider({
631
+ children,
632
+ tokenEndpoint,
633
+ tools,
634
+ toolExecutor,
635
+ onConnect,
636
+ onDisconnect,
637
+ onError,
638
+ onUserTranscript,
639
+ onAssistantTranscript
640
+ }: VoiceModeProviderProps): react_jsx_runtime5.JSX.Element;
641
+ /**
642
+ * Hook to access voice mode context
643
+ */
644
+ declare function useVoiceModeContext(): VoiceModeContextValue;
645
+ /**
646
+ * Hook to optionally access voice mode context (returns null if not in provider)
647
+ */
648
+ declare function useOptionalVoiceModeContext(): VoiceModeContextValue | null;
649
+ //#endregion
650
+ //#region src/providers/ChatRuntimeProvider.d.ts
651
+ interface ChatRuntimeProviderProps {
652
+ children: ReactNode;
653
+ }
654
+ /**
655
+ * Provider that sets up the Assistant UI runtime with Cloudflare Workers backend.
656
+ *
657
+ * This component integrates the Cloudflare Agents SDK with Assistant UI,
658
+ * handling message conversion, tool calls, and HITL (Human-in-the-Loop) approvals.
659
+ *
660
+ * @example
661
+ * ```tsx
662
+ * import { ChatRuntimeProvider, Thread } from '@mcp-b/webmcp-client';
663
+ *
664
+ * function App() {
665
+ * return (
666
+ * <ChatRuntimeProvider>
667
+ * <Thread />
668
+ * </ChatRuntimeProvider>
669
+ * );
670
+ * }
671
+ * ```
672
+ */
673
+ declare const ChatRuntimeProvider: FC<ChatRuntimeProviderProps>;
674
+ //#endregion
675
+ //#region src/hooks/useMCPSource.d.ts
676
+ /**
677
+ * Options for the useMCPSource hook
678
+ */
679
+ interface UseMCPSourceOptions {
680
+ /** Auto-connect on mount (default: true) */
681
+ autoConnect?: boolean;
682
+ /** Callback when tools are updated */
683
+ onToolsUpdate?: (tools: Tool$1[]) => void;
684
+ /** Callback when resources are updated */
685
+ onResourcesUpdate?: (resources: Resource[], templates: ResourceTemplate[]) => void;
686
+ /** Callback when prompts are updated */
687
+ onPromptsUpdate?: (prompts: Prompt[]) => void;
688
+ /** Callback when connection state changes */
689
+ onStateChange?: (state: MCPConnectionState) => void;
690
+ /** Callback on connection error */
691
+ onError?: (error: Error) => void;
692
+ /**
693
+ * Handler for sampling requests from the server.
694
+ * If provided, enables the sampling capability.
695
+ * The handler should call an LLM and return the response.
696
+ */
697
+ samplingHandler?: SamplingHandler;
698
+ }
699
+ /**
700
+ * Return value from the useMCPSource hook
701
+ */
702
+ interface UseMCPSourceReturn {
703
+ /** MCP client instance */
704
+ client: Client | null;
705
+ /** Available tools from this source */
706
+ tools: Tool$1[];
707
+ /** Available resources from this source */
708
+ resources: Resource[];
709
+ /** Available resource templates from this source */
710
+ resourceTemplates: ResourceTemplate[];
711
+ /** Available prompts from this source */
712
+ prompts: Prompt[];
713
+ /** Connection state */
714
+ state: MCPConnectionState;
715
+ /** Whether connected and ready */
716
+ isConnected: boolean;
717
+ /** Connection error (if any) */
718
+ error: Error | null;
719
+ /** Connect to the MCP server */
720
+ connect: () => Promise<void>;
721
+ /** Disconnect from the MCP server */
722
+ disconnect: () => Promise<void>;
723
+ /** Call a tool on this source */
724
+ callTool: (name: string, args: Record<string, unknown>) => Promise<CallToolResult>;
725
+ /** Read a resource by URI */
726
+ readResource: (uri: string) => Promise<ReadResourceResult>;
727
+ /** Get a prompt with optional arguments */
728
+ getPrompt: (name: string, args?: Record<string, string>) => Promise<GetPromptResult>;
729
+ /** Refresh the list of tools */
730
+ refreshTools: () => Promise<void>;
731
+ /** Refresh the list of resources */
732
+ refreshResources: () => Promise<void>;
733
+ /** Refresh the list of prompts */
734
+ refreshPrompts: () => Promise<void>;
735
+ }
736
+ /**
737
+ * Unified hook for connecting to MCP sources.
738
+ * Works with both tab-based (same-tab, iframe) and HTTP/SSE-based (remote) sources.
739
+ * Supports the full MCP specification including tools, resources, prompts, and sampling.
740
+ */
741
+ declare function useMCPSource(id: string, config: MCPSourceConfig, options?: UseMCPSourceOptions): UseMCPSourceReturn;
742
+ //#endregion
743
+ //#region src/hooks/useAgentChat.d.ts
744
+ /**
745
+ * Tool definition for frontend execution.
746
+ * Tools with execute function auto-execute.
747
+ * Tools without execute require manual confirmation via addToolResult.
748
+ */
749
+ interface FrontendTool<TInput = unknown, TOutput = unknown> {
750
+ /** Optional description for UI */
751
+ description?: string;
752
+ /** If provided, tool auto-executes. If omitted, requires manual confirmation. */
753
+ execute?: (input: TInput) => Promise<TOutput> | TOutput;
754
+ }
755
+ /**
756
+ * Map of tool names to tool definitions.
757
+ */
758
+ type FrontendTools = Record<string, FrontendTool>;
759
+ /**
760
+ * Agent connection interface (WebSocket-like).
761
+ */
762
+ interface AgentConnection {
763
+ send: (data: string) => void;
764
+ addEventListener: (event: 'message', handler: (event: MessageEvent) => void) => void;
765
+ removeEventListener: (event: 'message', handler: (event: MessageEvent) => void) => void;
766
+ readyState?: number;
767
+ }
768
+ /**
769
+ * Options for useAgentChat hook.
770
+ */
771
+ interface UseAgentChatOptions {
772
+ /** WebSocket connection to the agent */
773
+ agent: AgentConnection;
774
+ /** Frontend tools for execution */
775
+ tools?: FrontendTools;
776
+ /** Initial messages (optional, typically restored from sync) */
777
+ initialMessages?: UIMessage[];
778
+ /** Callback when tools need confirmation */
779
+ onToolsNeedConfirmation?: (tools: PendingTool[]) => void;
780
+ /** Callback when agent state changes */
781
+ onStateChange?: (state: AgentChatState) => void;
782
+ }
783
+ /**
784
+ * Return value from useAgentChat hook.
785
+ */
786
+ interface UseAgentChatReturn {
787
+ /** Current messages */
788
+ messages: UIMessage[];
789
+ /** Set messages manually */
790
+ setMessages: (messages: UIMessage[]) => void;
791
+ /** Current input value */
792
+ input: string;
793
+ /** Set input value */
794
+ setInput: (input: string) => void;
795
+ /** Handle input submission */
796
+ handleSubmit: (e?: React.FormEvent) => void;
797
+ /** Send a message programmatically */
798
+ sendMessage: (content: string) => void;
799
+ /** Add tool result (for confirmation flow) */
800
+ addToolResult: (toolCallId: string, toolName: string, output: unknown) => void;
801
+ /** Current agent state */
802
+ agentState: AgentChatState;
803
+ /** Whether currently streaming */
804
+ isStreaming: boolean;
805
+ /** Tools awaiting confirmation */
806
+ pendingConfirmationTools: PendingTool[];
807
+ /** Clear chat history */
808
+ clearHistory: () => void;
809
+ }
810
+ /**
811
+ * React hook for DO-first streaming chat.
812
+ *
813
+ * Handles:
814
+ * - Message sending and receiving
815
+ * - State synchronization on reconnect
816
+ * - Automatic tool execution
817
+ * - Confirmation-required tool flow
818
+ */
819
+ declare function useAgentChat(options: UseAgentChatOptions): UseAgentChatReturn;
820
+ //#endregion
821
+ //#region src/hooks/useCloudflareRuntime.d.ts
822
+ /**
823
+ * Options for the useCloudflareRuntime hook.
824
+ */
825
+ interface UseCloudflareRuntimeOptions {
826
+ /**
827
+ * The host for the Cloudflare Agent WebSocket connection.
828
+ * @default 'localhost:8787'
829
+ */
830
+ host?: string;
831
+ /**
832
+ * The agent binding name to connect to.
833
+ * @default 'TOOL_HUB'
834
+ */
835
+ agent?: string;
836
+ /**
837
+ * Frontend tools that can be executed by the client.
838
+ * Tools with an `execute` function will auto-execute.
839
+ * Tools without `execute` require manual confirmation via the UI.
840
+ */
841
+ tools?: FrontendTools;
842
+ /**
843
+ * Callback when tools need user confirmation.
844
+ * Called with the list of tools awaiting approval.
845
+ */
846
+ onToolsNeedConfirmation?: (tools: Array<{
847
+ toolCallId: string;
848
+ toolName: string;
849
+ input: unknown;
850
+ }>) => void;
851
+ }
852
+ /**
853
+ * Hook that provides Assistant UI runtime integration with Cloudflare Workers backend.
854
+ *
855
+ * This hook orchestrates the connection between the Assistant UI component library and
856
+ * a Cloudflare Workers agent backend with DO-first streaming. It handles:
857
+ *
858
+ * 1. **Agent Connection**: Establishes WebSocket connection to the Cloudflare Agent
859
+ * running on the local development server or Cloudflare Workers backend.
860
+ *
861
+ * 2. **DO-First Streaming**: Messages are accumulated on the Durable Object, making
862
+ * streaming refresh-safe. On reconnection, the client receives a sync message
863
+ * with the full state including any in-progress streaming message.
864
+ *
865
+ * 3. **Frontend Tool Execution**: When the LLM requests tool calls, the agent sends
866
+ * a tools_needed message. Tools with `execute` functions auto-run, while others
867
+ * require manual confirmation through the UI.
868
+ *
869
+ * 4. **User Input Handling**: Processes new user messages from the Assistant UI composer
870
+ * and sends them to the Cloudflare Agent for processing.
871
+ *
872
+ * 5. **Message Conversion**: Converts messages between the Vercel AI SDK format
873
+ * (used by Cloudflare Agents) and the Assistant UI format using the converter utility.
874
+ *
875
+ * @param options - Configuration options for the runtime
876
+ * @returns Assistant UI runtime configured with Cloudflare Workers backend integration
877
+ *
878
+ * @example
879
+ * ```tsx
880
+ * function ChatApp() {
881
+ * const runtime = useCloudflareRuntime({
882
+ * host: 'localhost:8787',
883
+ * tools: {
884
+ * getWeather: {
885
+ * execute: async (args) => {
886
+ * const data = await fetchWeather(args.city);
887
+ * return data;
888
+ * }
889
+ * }
890
+ * }
891
+ * });
892
+ * return <Thread runtime={runtime} />;
893
+ * }
894
+ * ```
895
+ */
896
+ declare function useCloudflareRuntime(options?: UseCloudflareRuntimeOptions): AssistantRuntime;
897
+ //#endregion
898
+ //#region src/hooks/useAssistantMCP.d.ts
899
+ interface UseAssistantMCPOptions {
900
+ /** MCP tools to register */
901
+ tools: Tool[];
902
+ /** MCP client for executing tools */
903
+ client: Client;
904
+ /** Optional system prompt to inject */
905
+ systemPrompt?: string;
906
+ }
907
+ /**
908
+ * Registers MCP tools with the Assistant UI runtime so they can be invoked by the
909
+ * assistant without any additional indirection.
910
+ *
911
+ * Tools are registered as frontend tools with execute functions that call the MCP client.
912
+ *
913
+ * @param options - Configuration options
914
+ *
915
+ * @example
916
+ * ```tsx
917
+ * function ChatThread() {
918
+ * const { client, tools } = useMcpClient();
919
+ *
920
+ * useAssistantMCP({
921
+ * tools,
922
+ * client,
923
+ * systemPrompt: 'You are a helpful assistant.',
924
+ * });
925
+ *
926
+ * return <Thread />;
927
+ * }
928
+ * ```
929
+ */
930
+ declare function useAssistantMCP(options: UseAssistantMCPOptions): void;
931
+ //#endregion
932
+ //#region src/lib/utils.d.ts
933
+ /**
934
+ * Merges Tailwind CSS classes with proper precedence handling.
935
+ * Combines clsx for conditional classes and tailwind-merge for deduplication.
936
+ */
937
+ declare function cn(...inputs: ClassValue[]): string;
938
+ //#endregion
939
+ //#region src/lib/converter.d.ts
940
+ /**
941
+ * Converts an array of UI messages to the Assistant UI format used by the thread component.
942
+ *
943
+ * This function transforms messages from the Vercel/Cloudflare AI format to the format
944
+ * expected by Assistant UI. Key transformations include:
945
+ * - Maps the message role to the corresponding Assistant UI role
946
+ * - Converts content parts, preserving text parts as-is
947
+ * - Transforms tool invocations from Vercel's combined call/result format to Assistant UI's
948
+ * distinct tool-call format, separating the execution state (PENDING vs COMPLETED)
949
+ *
950
+ * @param messages - Array of UI messages to convert
951
+ * @returns Array of messages formatted for use in the Assistant UI thread component
952
+ *
953
+ * @example
954
+ * ```ts
955
+ * const messages = [{ role: 'user', parts: [{ type: 'text', text: 'Hello' }] }];
956
+ * const threadMessages = convertToAssistantUiMessages(messages);
957
+ * ```
958
+ */
959
+ declare const convertToAssistantUiMessages: (messages: UIMessage[]) => ThreadMessageLike[];
960
+ /**
961
+ * Converts a single UI message to the Assistant UI format.
962
+ *
963
+ * This function performs the same transformation as convertToAssistantUiMessages but operates
964
+ * on a single message. It's useful for converting individual messages from the Vercel/Cloudflare
965
+ * AI format to the Assistant UI format, particularly for generic UI message types that need
966
+ * advanced type parameters.
967
+ *
968
+ * @param msg - Single UI message to convert
969
+ * @returns Message formatted for use in the Assistant UI thread component
970
+ *
971
+ * @example
972
+ * ```ts
973
+ * const msg: UIMessage = { role: 'assistant', parts: [{ type: 'text', text: 'Hello' }] };
974
+ * const threadMsg = convertAssistantUIMessage(msg);
975
+ * ```
976
+ */
977
+ declare const convertAssistantUIMessage: (msg: UIMessage<unknown, UIDataTypes, UITools>) => ThreadMessageLike;
978
+ //#endregion
979
+ //#region src/lib/mcp-transport.d.ts
980
+ /**
981
+ * Transport type options for HTTP-based connections
982
+ */
983
+ type TransportType = 'http' | 'sse';
984
+ /**
985
+ * Creates the appropriate MCP transport based on source configuration.
986
+ *
987
+ * @param config - The MCP source configuration
988
+ * @param transportType - For HTTP sources, specifies whether to use HTTP or SSE transport
989
+ * @returns A configured MCP Transport instance
990
+ *
991
+ * @example Tab-based transport (same-tab or iframe)
992
+ * ```ts
993
+ * const transport = createMCPTransport({ type: 'tab' })
994
+ * ```
995
+ *
996
+ * @example HTTP transport with auth
997
+ * ```ts
998
+ * const transport = createMCPTransport({
999
+ * type: 'http',
1000
+ * url: 'https://example.com/mcp',
1001
+ * authToken: 'secret'
1002
+ * }, 'http')
1003
+ * ```
1004
+ */
1005
+ declare function createMCPTransport(config: MCPSourceConfig, transportType?: TransportType): Transport;
1006
+ /**
1007
+ * Check if an error indicates we should try SSE fallback.
1008
+ *
1009
+ * This handles cases where HTTP streaming isn't supported (404/405) or
1010
+ * when the connection fails due to CORS/network issues.
1011
+ *
1012
+ * @param error - The error from the failed HTTP connection
1013
+ * @returns true if the error suggests trying SSE transport instead
1014
+ */
1015
+ declare function shouldFallbackToSSE(error: Error): boolean;
1016
+ /**
1017
+ * Calculate exponential backoff delay for reconnection attempts.
1018
+ *
1019
+ * @param attempt - The current attempt number (0-based)
1020
+ * @param baseDelay - Base delay in milliseconds
1021
+ * @param maxDelay - Maximum delay cap in milliseconds
1022
+ * @returns The calculated delay in milliseconds
1023
+ */
1024
+ declare function calculateReconnectDelay(attempt: number, baseDelay: number, maxDelay: number): number;
1025
+ //#endregion
1026
+ //#region src/lib/constants.d.ts
1027
+ /**
1028
+ * Shared Constants
1029
+ *
1030
+ * Centralized location for all magic numbers, timeouts, and configuration values.
1031
+ * Having these in one place makes the codebase more maintainable and easier to tune.
1032
+ */
1033
+ /** Maximum number of reconnection attempts before giving up */
1034
+ declare const MCP_MAX_RECONNECT_ATTEMPTS = 5;
1035
+ /** Base delay in milliseconds for exponential backoff reconnection */
1036
+ declare const MCP_BASE_RECONNECT_DELAY_MS = 1000;
1037
+ /** Maximum reconnect delay in milliseconds (caps exponential backoff) */
1038
+ declare const MCP_MAX_RECONNECT_DELAY_MS = 30000;
1039
+ /** Delay before auto-connecting to tab-based MCP sources (allows server initialization) */
1040
+ declare const MCP_TAB_CONNECT_DELAY_MS = 100;
1041
+ /** Number of frequency bins for audio visualization */
1042
+ declare const AUDIO_FREQUENCY_BINS = 32;
1043
+ /** Default OpenAI Realtime model */
1044
+ declare const REALTIME_DEFAULT_MODEL = "gpt-4o-realtime-preview-2024-12-17";
1045
+ /** Default voice for OpenAI Realtime */
1046
+ declare const REALTIME_DEFAULT_VOICE = "verse";
1047
+ /** Default OpenAI Realtime API URL */
1048
+ declare const REALTIME_DEFAULT_API_URL = "https://api.openai.com/v1/realtime";
1049
+ /**
1050
+ * Enable debug logging for development.
1051
+ * Can be enabled by setting window.__WEBMCP_DEBUG__ = true in the browser console.
1052
+ * In production builds, this defaults to false.
1053
+ */
1054
+ declare const DEBUG_LOGGING_ENABLED: boolean;
1055
+ /**
1056
+ * Conditional debug logger that only logs when DEBUG_LOGGING_ENABLED is true.
1057
+ * Can be enabled at runtime by setting window.__WEBMCP_DEBUG__ = true
1058
+ *
1059
+ * @param component - The component or service name for the log prefix
1060
+ * @param message - The log message
1061
+ * @param data - Optional data to include in the log
1062
+ */
1063
+ declare function debugLog(component: string, message: string, data?: unknown): void;
1064
+ //#endregion
1065
+ //#region src/embed.d.ts
1066
+ /**
1067
+ * WebMCP Embedded Agent - Intercom-style Installation
1068
+ *
1069
+ * @example Basic Installation (paste before </body>)
1070
+ * ```html
1071
+ * <script>
1072
+ * window.webmcpSettings = {
1073
+ * app_id: "YOUR_APP_ID",
1074
+ * api_base: "https://your-worker.workers.dev"
1075
+ * };
1076
+ * </script>
1077
+ * <script>
1078
+ * (function(){var w=window;var wm=w.WebMCP;if(typeof wm==="function"){wm('reattach');}else{var q=function(){q.c(arguments);};q.q=[];q.c=function(args){q.q.push(args);};w.WebMCP=q;var l=function(){var s=document.createElement('script');s.type='text/javascript';s.async=true;s.src='https://your-cdn.com/embed.iife.js';var x=document.getElementsByTagName('script')[0];x.parentNode.insertBefore(s,x);};if(document.readyState==='complete'){l();}else{w.addEventListener('load',l,false);}}})();
1079
+ * </script>
1080
+ * ```
1081
+ *
1082
+ * @example Programmatic Control
1083
+ * ```javascript
1084
+ * // Boot with user data
1085
+ * window.WebMCP('boot', {
1086
+ * app_id: 'YOUR_APP_ID',
1087
+ * user_id: 'user_123',
1088
+ * email: 'user@example.com'
1089
+ * });
1090
+ *
1091
+ * // Show/hide the messenger
1092
+ * window.WebMCP('show');
1093
+ * window.WebMCP('hide');
1094
+ *
1095
+ * // Update settings
1096
+ * window.WebMCP('update', { custom_data: { plan: 'pro' } });
1097
+ *
1098
+ * // Shutdown (clear session)
1099
+ * window.WebMCP('shutdown');
1100
+ * ```
1101
+ */
1102
+ /**
1103
+ * Configuration options (similar to intercomSettings)
1104
+ */
1105
+ interface WebMCPSettings {
1106
+ /** Your app/workspace ID */
1107
+ app_id: string;
1108
+ /** API base URL for your WebMCP worker */
1109
+ api_base?: string;
1110
+ /** Voice mode token endpoint */
1111
+ token_endpoint?: string;
1112
+ /** User ID for identified users */
1113
+ user_id?: string;
1114
+ /** User email */
1115
+ email?: string;
1116
+ /** User name */
1117
+ name?: string;
1118
+ /** Unix timestamp of user creation */
1119
+ created_at?: number;
1120
+ /** Custom user attributes */
1121
+ custom_data?: Record<string, unknown>;
1122
+ /** Auto-connect to local MCP source */
1123
+ auto_connect_local?: boolean;
1124
+ /** Custom container element ID */
1125
+ container_id?: string;
1126
+ }
1127
+ /**
1128
+ * Boot options (can override settings)
1129
+ */
1130
+ interface WebMCPBootOptions extends Partial<WebMCPSettings> {}
1131
+ /**
1132
+ * Handle method calls (Intercom-style API)
1133
+ */
1134
+ type WebMCPMethod = 'boot' | 'shutdown' | 'update' | 'show' | 'hide' | 'showNewMessage' | 'getVisitorId' | 'trackEvent' | 'reattach';
1135
+ /**
1136
+ * Main API function (Intercom-style)
1137
+ */
1138
+ interface WebMCPFunction {
1139
+ (method: WebMCPMethod, ...args: unknown[]): unknown;
1140
+ q?: unknown[][];
1141
+ c?: (args: unknown[]) => void;
1142
+ booted: boolean;
1143
+ visible: boolean;
1144
+ }
1145
+ declare global {
1146
+ interface Window {
1147
+ WebMCP: WebMCPFunction;
1148
+ webmcpSettings?: WebMCPSettings;
1149
+ }
1150
+ }
1151
+ //#endregion
1152
+ export { AUDIO_FREQUENCY_BINS, AssistantMessage, AssistantModal, type AssistantTranscriptEventData, type AudioLevelData, Avatar, AvatarFallback, AvatarImage, Badge, type BadgeProps, Button, type ButtonProps, ChatRuntimeProvider, type ChatRuntimeProviderProps, Composer, ComposerAddAttachment, ComposerAttachments, DEBUG_LOGGING_ENABLED, Dialog, DialogClose, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogOverlay, DialogPortal, DialogTitle, DialogTrigger, EmbeddedAgent, type EmbeddedAgentProps, type ErrorEventData, LiveWaveform, MCPToolRegistry, type MCPToolsContextValue, MCPToolsProvider, type MCPToolsProviderProps, MCP_BASE_RECONNECT_DELAY_MS, MCP_MAX_RECONNECT_ATTEMPTS, MCP_MAX_RECONNECT_DELAY_MS, MCP_TAB_CONNECT_DELAY_MS, MarkdownText, OpenAIRealtimeService, REALTIME_DEFAULT_API_URL, REALTIME_DEFAULT_MODEL, REALTIME_DEFAULT_VOICE, type RealtimeConfig, type RealtimeSession, RealtimeToolCard, type RegisteredTool, RemoteMCPSettings, SOURCE_LOCAL, SOURCE_REMOTE, ScrollArea, ScrollBar, Separator, type SessionStateEventData, Thread, ThreadWithVoice, type ToolCallCompletedEventData, type ToolCallData, type ToolCallStartedEventData, type ToolExecutor, ToolFallback, Tooltip, TooltipContent, TooltipIconButton, TooltipProvider, TooltipTrigger, type TranscriptData, type TransportType, type UseAgentChatOptions, type UseAgentChatReturn, type UseMCPSourceOptions, type UseMCPSourceReturn, type UseVoiceModeOptions, type UseVoiceModeReturn, UserMessage, UserMessageAttachments, type UserTranscriptEventData, VoiceIndicator, VoiceModeProvider, type VoiceModeProviderProps, type VoiceModeState, WebMCPAgentElement, type WebMCPBootOptions, type WebMCPFunction, type WebMCPMethod, type WebMCPSettings, badgeVariants, buttonVariants, calculateReconnectDelay, cn, convertAssistantUIMessage, convertToAssistantUiMessages, createMCPTransport, debugLog, isAssistantTranscriptEventData, isAudioLevelData, isErrorEventData, isSessionStateEventData, isToolCallCompletedEventData, isToolCallStartedEventData, isUserTranscriptEventData, registerWebMCPAgent, shouldFallbackToSSE, useAgentChat, useAssistantMCP, useCloudflareRuntime, useMCPSource, useMCPTools, useOptionalMCPTools, useOptionalVoiceModeContext, useVoiceMode, useVoiceModeContext };
1153
+ //# sourceMappingURL=index.d.ts.map