@yourgpt/copilot-sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1824 @@
1
+ import * as react_jsx_runtime from 'react/jsx-runtime';
2
+ import React__default from 'react';
3
+ import { M as MessageAttachment, A as ToolCall, f as Source, D as LLMConfig, T as ToolDefinition, af as PermissionLevel, i as ToolInputSchema, G as CopilotConfig, s as ToolsConfig, y as Message, O as ActionDefinition, v as CapturedContext, t as ToolConsentRequest, r as ToolType, aa as ToolExecution$1, u as ToolConsentResponse, S as ScreenshotOptions, C as ConsoleLogOptions, N as NetworkRequestOptions, I as IntentDetectionResult, k as ToolContext, h as ToolResponse$1, a4 as ToolRenderProps, a6 as ToolSet, a7 as UnifiedToolCall, Y as InternalKnowledgeBaseConfig, _ as InternalKnowledgeBaseSearchResponse, Z as InternalKnowledgeBaseResult, ah as PermissionStorageConfig, ai as PermissionStorageAdapter } from '../thread-C2FjuGLb.js';
4
+ export { K as ActionParameter, ab as AgentLoopConfig, E as CloudConfig, a1 as PersistenceConfig, $ as Thread, a0 as ThreadData, a2 as ThreadStorageAdapter, g as ToolExecutionStatus, ag as ToolPermission, o as generateSuggestionReason } from '../thread-C2FjuGLb.js';
5
+
6
+ /**
7
+ * Message Types
8
+ *
9
+ * Pure type definitions for chat messages.
10
+ * No logic, no side effects - just types.
11
+ */
12
+
13
+ /**
14
+ * Chat message roles
15
+ */
16
+ type MessageRole = "user" | "assistant" | "system" | "tool";
17
+ /**
18
+ * UIMessage - The source of truth for UI state
19
+ *
20
+ * Inspired by Vercel AI SDK's UIMessage pattern.
21
+ * This is what your UI renders and what gets persisted.
22
+ */
23
+ interface UIMessage {
24
+ /** Unique message ID */
25
+ id: string;
26
+ /** Message role */
27
+ role: MessageRole;
28
+ /** Message content */
29
+ content: string;
30
+ /** Thinking/reasoning content (for extended thinking models) */
31
+ thinking?: string;
32
+ /** Message attachments (images, PDFs, etc) */
33
+ attachments?: MessageAttachment[];
34
+ /** Tool calls made by assistant */
35
+ toolCalls?: ToolCall[];
36
+ /** Tool call ID (for tool result messages) */
37
+ toolCallId?: string;
38
+ /** Sources from knowledge base */
39
+ sources?: Source[];
40
+ /** Creation timestamp */
41
+ createdAt: Date;
42
+ /** Additional metadata */
43
+ metadata?: Record<string, unknown>;
44
+ }
45
+
46
+ /**
47
+ * ChatTransport Interface
48
+ *
49
+ * Contract for different transport implementations.
50
+ * HTTP, WebSocket, or mock for testing.
51
+ */
52
+ /**
53
+ * Chat request to send
54
+ */
55
+ interface ChatRequest {
56
+ /** Messages to send */
57
+ messages: Array<{
58
+ role: string;
59
+ content: string | null;
60
+ tool_calls?: unknown[];
61
+ tool_call_id?: string;
62
+ attachments?: unknown[];
63
+ }>;
64
+ /** Thread ID */
65
+ threadId?: string;
66
+ /** System prompt */
67
+ systemPrompt?: string;
68
+ /** LLM config */
69
+ llm?: Record<string, unknown>;
70
+ /** Tool definitions */
71
+ tools?: unknown[];
72
+ /** Action definitions */
73
+ actions?: unknown[];
74
+ /** Additional body properties */
75
+ body?: Record<string, unknown>;
76
+ }
77
+ /**
78
+ * Chat response (non-streaming)
79
+ */
80
+ interface ChatResponse {
81
+ /** Response messages */
82
+ messages: Array<{
83
+ role: string;
84
+ content: string | null;
85
+ tool_calls?: unknown[];
86
+ }>;
87
+ /** Whether client needs to execute tools */
88
+ requiresAction?: boolean;
89
+ }
90
+ /**
91
+ * Stream chunk types
92
+ */
93
+ type StreamChunk = {
94
+ type: "message:start";
95
+ id: string;
96
+ } | {
97
+ type: "message:delta";
98
+ content: string;
99
+ } | {
100
+ type: "message:end";
101
+ } | {
102
+ type: "thinking:delta";
103
+ content: string;
104
+ } | {
105
+ type: "tool_calls";
106
+ toolCalls: unknown[];
107
+ assistantMessage: unknown;
108
+ } | {
109
+ type: "source:add";
110
+ source: unknown;
111
+ } | {
112
+ type: "error";
113
+ message: string;
114
+ } | {
115
+ type: "done";
116
+ messages?: unknown[];
117
+ requiresAction?: boolean;
118
+ };
119
+ /**
120
+ * ChatTransport interface
121
+ *
122
+ * Allows different transport implementations:
123
+ * - HTTP (default) - uses fetch with SSE
124
+ * - WebSocket - for real-time connections
125
+ * - Mock - for testing
126
+ *
127
+ * @example
128
+ * ```typescript
129
+ * const transport = new HttpTransport({
130
+ * url: '/api/chat',
131
+ * headers: { ... }
132
+ * });
133
+ *
134
+ * const stream = await transport.send(request);
135
+ * for await (const chunk of stream) {
136
+ * console.log(chunk);
137
+ * }
138
+ * ```
139
+ */
140
+ interface ChatTransport {
141
+ /**
142
+ * Send a chat request
143
+ *
144
+ * @param request - The chat request
145
+ * @returns AsyncIterable of stream chunks, or ChatResponse for non-streaming
146
+ */
147
+ send(request: ChatRequest): Promise<AsyncIterable<StreamChunk> | ChatResponse>;
148
+ /**
149
+ * Abort the current request
150
+ */
151
+ abort(): void;
152
+ /**
153
+ * Check if currently streaming
154
+ */
155
+ isStreaming(): boolean;
156
+ }
157
+
158
+ /**
159
+ * ChatState Interface
160
+ *
161
+ * Contract for framework-specific state implementations.
162
+ * React, Vue, Svelte each implement this differently.
163
+ */
164
+
165
+ /**
166
+ * ChatState interface - Framework adapters implement this
167
+ *
168
+ * This is the key abstraction that enables framework-agnostic code.
169
+ * The AbstractChat class uses this interface, and each framework
170
+ * provides its own implementation.
171
+ *
172
+ * @example React implementation
173
+ * ```typescript
174
+ * class ReactChatState implements ChatState<UIMessage> {
175
+ * #messages: UIMessage[] = [];
176
+ * #callbacks = new Set<() => void>();
177
+ *
178
+ * get messages() { return this.#messages; }
179
+ * set messages(m) {
180
+ * this.#messages = m;
181
+ * this.#callbacks.forEach(cb => cb()); // Trigger re-render
182
+ * }
183
+ *
184
+ * subscribe(cb: () => void) {
185
+ * this.#callbacks.add(cb);
186
+ * return () => this.#callbacks.delete(cb);
187
+ * }
188
+ * }
189
+ * ```
190
+ *
191
+ * @example Vue implementation
192
+ * ```typescript
193
+ * class VueChatState implements ChatState<UIMessage> {
194
+ * messages = ref<UIMessage[]>([]);
195
+ * status = ref<ChatStatus>('ready');
196
+ * // Vue refs are automatically reactive
197
+ * }
198
+ * ```
199
+ */
200
+ interface ChatState$1<T extends UIMessage = UIMessage> {
201
+ /** All messages in the conversation */
202
+ messages: T[];
203
+ /** Current chat status */
204
+ status: ChatStatus$1;
205
+ /** Current error if any */
206
+ error: Error | undefined;
207
+ /**
208
+ * Add a message to the end
209
+ */
210
+ pushMessage(message: T): void;
211
+ /**
212
+ * Remove the last message
213
+ */
214
+ popMessage(): void;
215
+ /**
216
+ * Replace a message at index
217
+ */
218
+ replaceMessage(index: number, message: T): void;
219
+ /**
220
+ * Update the last message (common during streaming)
221
+ */
222
+ updateLastMessage(updater: (message: T) => T): void;
223
+ /**
224
+ * Set all messages (replace entire array)
225
+ */
226
+ setMessages(messages: T[]): void;
227
+ /**
228
+ * Clear all messages
229
+ */
230
+ clearMessages(): void;
231
+ /**
232
+ * Subscribe to state changes
233
+ * Returns unsubscribe function
234
+ *
235
+ * This is used by React's useSyncExternalStore.
236
+ * Vue/Svelte may not need this (they use refs/stores).
237
+ */
238
+ subscribe?(callback: () => void): () => void;
239
+ /**
240
+ * Get immutable snapshot of messages
241
+ * Used by useSyncExternalStore's getSnapshot
242
+ */
243
+ getMessagesSnapshot?(): T[];
244
+ /**
245
+ * Get status snapshot
246
+ */
247
+ getStatusSnapshot?(): ChatStatus$1;
248
+ /**
249
+ * Get error snapshot
250
+ */
251
+ getErrorSnapshot?(): Error | undefined;
252
+ }
253
+
254
+ /**
255
+ * Chat status
256
+ */
257
+ type ChatStatus$1 = "ready" | "submitted" | "streaming" | "error";
258
+ /**
259
+ * Chat configuration
260
+ */
261
+ interface ChatConfig {
262
+ /** Runtime API endpoint */
263
+ runtimeUrl: string;
264
+ /** LLM configuration */
265
+ llm?: Partial<LLMConfig>;
266
+ /** System prompt */
267
+ systemPrompt?: string;
268
+ /** Enable streaming (default: true) */
269
+ streaming?: boolean;
270
+ /** Request headers */
271
+ headers?: Record<string, string>;
272
+ /** Thread ID for conversation persistence */
273
+ threadId?: string;
274
+ /** Debug mode */
275
+ debug?: boolean;
276
+ /** Available tools (passed to LLM) */
277
+ tools?: ToolDefinition[];
278
+ }
279
+ /**
280
+ * Chat callbacks for state updates
281
+ */
282
+ interface ChatCallbacks<T extends UIMessage = UIMessage> {
283
+ /** Called when messages change */
284
+ onMessagesChange?: (messages: T[]) => void;
285
+ /** Called when status changes */
286
+ onStatusChange?: (status: ChatStatus$1) => void;
287
+ /** Called when an error occurs */
288
+ onError?: (error: Error | null) => void;
289
+ /** Called when a message starts streaming */
290
+ onMessageStart?: (messageId: string) => void;
291
+ /** Called when message content is streamed */
292
+ onMessageDelta?: (messageId: string, delta: string) => void;
293
+ /** Called when a message finishes */
294
+ onMessageFinish?: (message: T) => void;
295
+ /** Called when tool calls are received */
296
+ onToolCalls?: (toolCalls: T["toolCalls"]) => void;
297
+ /** Called when generation is complete */
298
+ onFinish?: (messages: T[]) => void;
299
+ }
300
+ /**
301
+ * Chat initialization options
302
+ */
303
+ interface ChatInit<T extends UIMessage = UIMessage> extends ChatConfig {
304
+ /** Initial messages */
305
+ initialMessages?: T[];
306
+ /** State implementation (injected by framework adapter) */
307
+ state?: ChatState$1<T>;
308
+ /** Transport implementation */
309
+ transport?: ChatTransport;
310
+ /** Callbacks */
311
+ callbacks?: ChatCallbacks<T>;
312
+ }
313
+
314
+ /**
315
+ * Tool Types
316
+ *
317
+ * Types for tool execution and agent loop.
318
+ */
319
+
320
+ /**
321
+ * Tool execution status
322
+ */
323
+ type ToolExecutionStatus = "pending" | "executing" | "completed" | "failed" | "rejected";
324
+ /**
325
+ * Tool approval status
326
+ */
327
+ type ToolApprovalStatus = "none" | "required" | "approved" | "rejected";
328
+ /**
329
+ * Tool execution record
330
+ */
331
+ interface ToolExecution {
332
+ id: string;
333
+ toolCallId: string;
334
+ name: string;
335
+ args: Record<string, unknown>;
336
+ status: ToolExecutionStatus;
337
+ approvalStatus: ToolApprovalStatus;
338
+ result?: unknown;
339
+ error?: string;
340
+ startedAt?: Date;
341
+ completedAt?: Date;
342
+ }
343
+ /**
344
+ * Tool response
345
+ */
346
+ interface ToolResponse {
347
+ toolCallId: string;
348
+ success: boolean;
349
+ result?: unknown;
350
+ error?: string;
351
+ }
352
+ /**
353
+ * Agent loop configuration
354
+ */
355
+ interface AgentLoopConfig {
356
+ /** Maximum iterations */
357
+ maxIterations?: number;
358
+ /** Auto-approve all tools */
359
+ autoApprove?: boolean;
360
+ /** Initial tools */
361
+ tools?: ToolDefinition[];
362
+ /** Max tool executions to keep in memory (default: 100). Oldest are pruned. */
363
+ maxExecutionHistory?: number;
364
+ }
365
+ /**
366
+ * Agent loop callbacks
367
+ */
368
+ interface AgentLoopCallbacks {
369
+ /** Called when tool executions change */
370
+ onExecutionsChange?: (executions: ToolExecution[]) => void;
371
+ /** Called when a tool needs approval */
372
+ onApprovalRequired?: (execution: ToolExecution) => void;
373
+ /** Called when a tool starts */
374
+ onToolStart?: (execution: ToolExecution) => void;
375
+ /** Called when a tool completes */
376
+ onToolComplete?: (execution: ToolExecution) => void;
377
+ /** Called to continue chat with tool results */
378
+ onContinue?: (results: ToolResponse[]) => Promise<void>;
379
+ /** Called when max iterations reached */
380
+ onMaxIterationsReached?: () => void;
381
+ }
382
+ /**
383
+ * Agent loop state
384
+ */
385
+ interface AgentLoopState$1 {
386
+ toolExecutions: ToolExecution[];
387
+ iteration: number;
388
+ maxIterations: number;
389
+ maxIterationsReached: boolean;
390
+ isProcessing: boolean;
391
+ }
392
+ /**
393
+ * Agent loop actions interface
394
+ */
395
+ interface AgentLoopActions {
396
+ approveToolExecution: (executionId: string, permissionLevel?: PermissionLevel) => void;
397
+ rejectToolExecution: (executionId: string, reason?: string, permissionLevel?: PermissionLevel) => void;
398
+ clearToolExecutions: () => void;
399
+ }
400
+ /**
401
+ * Initial agent loop state
402
+ */
403
+ declare const initialAgentLoopState: AgentLoopState$1;
404
+
405
+ /**
406
+ * Event types emitted by AbstractChat
407
+ */
408
+ type ChatEvent = {
409
+ type: "toolCalls";
410
+ toolCalls: UIMessage["toolCalls"];
411
+ } | {
412
+ type: "done";
413
+ } | {
414
+ type: "error";
415
+ error: Error;
416
+ };
417
+ /**
418
+ * Event handler type
419
+ */
420
+ type ChatEventHandler<T extends ChatEvent["type"]> = (event: Extract<ChatEvent, {
421
+ type: T;
422
+ }>) => void;
423
+ /**
424
+ * AbstractChat - Core chat functionality
425
+ *
426
+ * @example
427
+ * ```typescript
428
+ * // With React state
429
+ * class ReactChat extends AbstractChat {
430
+ * constructor(config: ChatInit) {
431
+ * const state = new ReactChatState();
432
+ * super({ ...config, state });
433
+ * }
434
+ * }
435
+ *
436
+ * // Usage
437
+ * const chat = new ReactChat({ runtimeUrl: '/api/chat' });
438
+ * await chat.sendMessage('Hello!');
439
+ * ```
440
+ */
441
+ declare class AbstractChat<T extends UIMessage = UIMessage> {
442
+ protected state: ChatState$1<T>;
443
+ protected transport: ChatTransport;
444
+ protected config: ChatConfig;
445
+ protected callbacks: ChatCallbacks<T>;
446
+ private eventHandlers;
447
+ private streamState;
448
+ constructor(init: ChatInit<T>);
449
+ get messages(): T[];
450
+ get status(): ChatStatus$1;
451
+ get error(): Error | undefined;
452
+ get isStreaming(): boolean;
453
+ /**
454
+ * Send a message
455
+ */
456
+ sendMessage(content: string, attachments?: MessageAttachment[]): Promise<void>;
457
+ /**
458
+ * Continue with tool results
459
+ *
460
+ * Automatically handles `addAsUserMessage` flag in results (e.g., screenshots).
461
+ * When a tool result has this flag, the attachment is extracted and sent as
462
+ * a user message so the AI can see it (e.g., for vision analysis).
463
+ */
464
+ continueWithToolResults(toolResults: Array<{
465
+ toolCallId: string;
466
+ result: unknown;
467
+ }>): Promise<void>;
468
+ /**
469
+ * Stop generation
470
+ */
471
+ stop(): void;
472
+ /**
473
+ * Clear all messages
474
+ */
475
+ clearMessages(): void;
476
+ /**
477
+ * Set messages directly
478
+ */
479
+ setMessages(messages: T[]): void;
480
+ /**
481
+ * Regenerate last response
482
+ */
483
+ regenerate(messageId?: string): Promise<void>;
484
+ /**
485
+ * Subscribe to events
486
+ */
487
+ on<E extends ChatEvent["type"]>(event: E, handler: ChatEventHandler<E>): () => void;
488
+ /**
489
+ * Emit an event
490
+ */
491
+ protected emit<E extends ChatEvent["type"]>(type: E, data: Omit<Extract<ChatEvent, {
492
+ type: E;
493
+ }>, "type">): void;
494
+ /**
495
+ * Process a chat request
496
+ */
497
+ protected processRequest(): Promise<void>;
498
+ /**
499
+ * Set tools available for the LLM
500
+ */
501
+ setTools(tools: ToolDefinition[]): void;
502
+ /**
503
+ * Dynamic context from useAIContext hook
504
+ */
505
+ protected dynamicContext: string;
506
+ /**
507
+ * Set dynamic context (appended to system prompt)
508
+ */
509
+ setContext(context: string): void;
510
+ /**
511
+ * Build the request payload
512
+ */
513
+ protected buildRequest(): {
514
+ messages: ({
515
+ role: "tool";
516
+ content: string;
517
+ tool_call_id: string;
518
+ tool_calls?: undefined;
519
+ attachments?: undefined;
520
+ } | {
521
+ role: MessageRole;
522
+ content: string;
523
+ tool_calls: ToolCall[] | undefined;
524
+ tool_call_id: string | undefined;
525
+ attachments: MessageAttachment[] | undefined;
526
+ })[];
527
+ threadId: string | undefined;
528
+ systemPrompt: string | undefined;
529
+ llm: Partial<LLMConfig> | undefined;
530
+ tools: {
531
+ name: string;
532
+ description: string;
533
+ inputSchema: ToolInputSchema;
534
+ }[] | undefined;
535
+ };
536
+ /**
537
+ * Handle streaming response
538
+ */
539
+ protected handleStreamResponse(stream: AsyncIterable<StreamChunk>): Promise<void>;
540
+ /**
541
+ * Handle JSON (non-streaming) response
542
+ */
543
+ protected handleJsonResponse(response: ChatResponse): void;
544
+ /**
545
+ * Handle errors
546
+ */
547
+ protected handleError(error: Error): void;
548
+ /**
549
+ * Debug logging
550
+ */
551
+ protected debug(action: string, data?: unknown): void;
552
+ /**
553
+ * Type guard for async iterable
554
+ */
555
+ private isAsyncIterable;
556
+ /**
557
+ * Dispose and cleanup
558
+ */
559
+ dispose(): void;
560
+ }
561
+
562
+ /**
563
+ * AbstractAgentLoop - Framework-agnostic agent loop for tool execution
564
+ *
565
+ * Handles the agentic loop where the LLM can call tools, receive results,
566
+ * and continue processing until completion.
567
+ */
568
+
569
+ /**
570
+ * Tool call info from LLM response
571
+ */
572
+ interface ToolCallInfo {
573
+ id: string;
574
+ name: string;
575
+ args: Record<string, unknown>;
576
+ }
577
+ /**
578
+ * AbstractAgentLoop - Core agent loop functionality
579
+ *
580
+ * @example
581
+ * ```typescript
582
+ * const agentLoop = new AbstractAgentLoop(config, {
583
+ * onToolExecutionsChange: setToolExecutions,
584
+ * onToolApprovalRequired: handleApproval,
585
+ * });
586
+ *
587
+ * // Register tools
588
+ * agentLoop.registerTool(myTool);
589
+ *
590
+ * // Execute tool calls from LLM
591
+ * await agentLoop.executeToolCalls(toolCalls);
592
+ * ```
593
+ */
594
+ declare class AbstractAgentLoop implements AgentLoopActions {
595
+ private _toolExecutions;
596
+ private _iteration;
597
+ private _maxIterations;
598
+ private _maxIterationsReached;
599
+ private _isProcessing;
600
+ private registeredTools;
601
+ private pendingApprovals;
602
+ private config;
603
+ private callbacks;
604
+ private _maxExecutionHistory;
605
+ constructor(config?: AgentLoopConfig, callbacks?: AgentLoopCallbacks);
606
+ get toolExecutions(): ToolExecution[];
607
+ get iteration(): number;
608
+ get maxIterations(): number;
609
+ get maxIterationsReached(): boolean;
610
+ get isProcessing(): boolean;
611
+ get state(): AgentLoopState$1;
612
+ get pendingApprovalExecutions(): ToolExecution[];
613
+ get tools(): ToolDefinition[];
614
+ private setToolExecutions;
615
+ private setIteration;
616
+ private setProcessing;
617
+ private addToolExecution;
618
+ private updateToolExecution;
619
+ /**
620
+ * Register a tool
621
+ */
622
+ registerTool(tool: ToolDefinition): void;
623
+ /**
624
+ * Unregister a tool
625
+ */
626
+ unregisterTool(name: string): void;
627
+ /**
628
+ * Get a registered tool
629
+ */
630
+ getTool(name: string): ToolDefinition | undefined;
631
+ /**
632
+ * Execute tool calls from LLM response
633
+ * Returns tool results for sending back to LLM
634
+ */
635
+ executeToolCalls(toolCalls: ToolCallInfo[]): Promise<ToolResponse[]>;
636
+ /**
637
+ * Execute a single tool
638
+ */
639
+ private executeSingleTool;
640
+ /**
641
+ * Wait for user approval
642
+ */
643
+ private waitForApproval;
644
+ /**
645
+ * Approve a tool execution
646
+ */
647
+ approveToolExecution(executionId: string, _permissionLevel?: PermissionLevel): void;
648
+ /**
649
+ * Reject a tool execution
650
+ */
651
+ rejectToolExecution(executionId: string, reason?: string, _permissionLevel?: PermissionLevel): void;
652
+ /**
653
+ * Clear all tool executions
654
+ */
655
+ clearToolExecutions(): void;
656
+ /**
657
+ * Reset the agent loop for a new conversation
658
+ */
659
+ reset(): void;
660
+ /**
661
+ * Update configuration
662
+ */
663
+ updateConfig(config: Partial<AgentLoopConfig>): void;
664
+ /**
665
+ * Update callbacks
666
+ */
667
+ updateCallbacks(callbacks: Partial<AgentLoopCallbacks>): void;
668
+ /**
669
+ * Dispose of resources
670
+ */
671
+ dispose(): void;
672
+ }
673
+
674
+ interface CopilotProviderProps {
675
+ children: React__default.ReactNode;
676
+ runtimeUrl: string;
677
+ config?: CopilotConfig["config"];
678
+ cloud?: CopilotConfig["cloud"];
679
+ systemPrompt?: string;
680
+ /** @deprecated Use useTools() hook instead */
681
+ tools?: ToolsConfig;
682
+ threadId?: string;
683
+ initialMessages?: Message[];
684
+ onMessagesChange?: (messages: Message[]) => void;
685
+ onError?: (error: Error) => void;
686
+ /** Enable/disable streaming (default: true) */
687
+ streaming?: boolean;
688
+ debug?: boolean;
689
+ }
690
+ interface CopilotContextValue {
691
+ messages: UIMessage[];
692
+ status: "ready" | "submitted" | "streaming" | "error";
693
+ error: Error | null;
694
+ isLoading: boolean;
695
+ sendMessage: (content: string, attachments?: MessageAttachment[]) => Promise<void>;
696
+ stop: () => void;
697
+ clearMessages: () => void;
698
+ regenerate: (messageId?: string) => Promise<void>;
699
+ registerTool: (tool: ToolDefinition) => void;
700
+ unregisterTool: (name: string) => void;
701
+ registeredTools: ToolDefinition[];
702
+ toolExecutions: ToolExecution[];
703
+ pendingApprovals: ToolExecution[];
704
+ approveToolExecution: (id: string, permissionLevel?: PermissionLevel) => void;
705
+ rejectToolExecution: (id: string, reason?: string, permissionLevel?: PermissionLevel) => void;
706
+ registerAction: (action: ActionDefinition) => void;
707
+ unregisterAction: (name: string) => void;
708
+ registeredActions: ActionDefinition[];
709
+ addContext: (context: string, parentId?: string) => string;
710
+ removeContext: (id: string) => void;
711
+ threadId?: string;
712
+ runtimeUrl: string;
713
+ toolsConfig?: ToolsConfig;
714
+ }
715
+ declare function useCopilot(): CopilotContextValue;
716
+ declare function CopilotProvider({ children, runtimeUrl, config, cloud, systemPrompt, tools: toolsConfig, threadId, initialMessages, onMessagesChange, onError, streaming, debug, }: CopilotProviderProps): react_jsx_runtime.JSX.Element;
717
+
718
+ /**
719
+ * Chat UI state interface (UI-only state, not message data)
720
+ * Message data is stored in ThreadsState as the single source of truth
721
+ */
722
+ interface ChatState {
723
+ /** Whether a response is being generated */
724
+ isLoading: boolean;
725
+ /** Current error if any */
726
+ error: Error | null;
727
+ }
728
+ /**
729
+ * Combined chat state for context consumers
730
+ * Includes derived data from threads for convenience
731
+ */
732
+ interface CombinedChatState {
733
+ /** All messages in the conversation (from active thread) */
734
+ messages: Message[];
735
+ /** Whether a response is being generated */
736
+ isLoading: boolean;
737
+ /** Current error if any */
738
+ error: Error | null;
739
+ /** Thread/conversation ID (from active thread) */
740
+ threadId: string | null;
741
+ /** Sources from knowledge base (from active thread) */
742
+ sources: Source[];
743
+ }
744
+ /**
745
+ * Tools state interface (Smart Context tools)
746
+ */
747
+ interface ToolsState {
748
+ /** Whether tools are enabled */
749
+ isEnabled: boolean;
750
+ /** Pending consent request */
751
+ pendingConsent: ToolConsentRequest | null;
752
+ /** Last captured context */
753
+ lastContext: CapturedContext | null;
754
+ /** Currently capturing */
755
+ isCapturing: boolean;
756
+ }
757
+ /**
758
+ * Agent loop state interface (Agentic tools)
759
+ */
760
+ interface AgentLoopState {
761
+ /** Current tool executions */
762
+ toolExecutions: ToolExecution$1[];
763
+ /** Current loop iteration */
764
+ iteration: number;
765
+ /** Maximum iterations */
766
+ maxIterations: number;
767
+ /** Whether max iterations was reached */
768
+ maxIterationsReached: boolean;
769
+ /** Whether waiting for server response after tool completion */
770
+ isProcessing: boolean;
771
+ }
772
+ /**
773
+ * Chat actions interface
774
+ */
775
+ interface ChatActions {
776
+ /** Send a message (with optional attachments) */
777
+ sendMessage: (content: string, attachments?: MessageAttachment[]) => Promise<void>;
778
+ /** Send a message with context */
779
+ sendMessageWithContext: (content: string, context: CapturedContext) => Promise<void>;
780
+ /** Stop generation */
781
+ stopGeneration: () => void;
782
+ /** Clear all messages */
783
+ clearMessages: () => void;
784
+ /** Regenerate last response */
785
+ regenerate: (messageId?: string) => Promise<void>;
786
+ /** Set messages directly */
787
+ setMessages: (messages: Message[]) => void;
788
+ /**
789
+ * Process file to MessageAttachment
790
+ * - Premium: uploads to cloud storage, returns URL-based attachment
791
+ * - Free: converts to base64
792
+ */
793
+ processAttachment: (file: File) => Promise<MessageAttachment>;
794
+ }
795
+ /**
796
+ * Tools actions interface
797
+ */
798
+ interface ToolsActions {
799
+ /** Request consent for tools */
800
+ requestConsent: (tools: ToolType[], reason?: string) => void;
801
+ /** Respond to consent request */
802
+ respondToConsent: (approved: ToolType[], remember?: boolean) => void;
803
+ /** Capture context */
804
+ captureContext: (tools: ToolType[]) => Promise<CapturedContext>;
805
+ /** Clear pending consent */
806
+ clearConsent: () => void;
807
+ }
808
+
809
+ /**
810
+ * Hook to register multiple AI actions/tools
811
+ *
812
+ * @example
813
+ * ```tsx
814
+ * useAIActions([
815
+ * {
816
+ * name: 'getWeather',
817
+ * description: 'Get weather for a location',
818
+ * parameters: {
819
+ * location: { type: 'string', required: true, description: 'City name' },
820
+ * },
821
+ * handler: async ({ location }) => {
822
+ * const weather = await fetchWeather(location);
823
+ * return weather;
824
+ * },
825
+ * },
826
+ * ]);
827
+ * ```
828
+ */
829
+ declare function useAIActions(actions: ActionDefinition[]): void;
830
+ /**
831
+ * Hook to register a single AI action/tool
832
+ *
833
+ * @example
834
+ * ```tsx
835
+ * useAIAction({
836
+ * name: 'searchProducts',
837
+ * description: 'Search for products',
838
+ * parameters: {
839
+ * query: { type: 'string', required: true },
840
+ * },
841
+ * handler: async ({ query }) => {
842
+ * return await searchProducts(query);
843
+ * },
844
+ * });
845
+ * ```
846
+ */
847
+ declare function useAIAction(action: ActionDefinition): void;
848
+
849
+ /**
850
+ * Context item for AI
851
+ */
852
+ interface AIContextItem {
853
+ /** Unique key for this context */
854
+ key: string;
855
+ /** Data to provide to AI (will be JSON stringified) */
856
+ data: unknown;
857
+ /** Optional description to help AI understand the context */
858
+ description?: string;
859
+ /** Parent context ID for hierarchical/nested contexts */
860
+ parentId?: string;
861
+ }
862
+ /**
863
+ * Hook to provide app state/context to the AI
864
+ *
865
+ * This hook allows you to inject React state into the AI's context,
866
+ * so it can understand and reference your app's current state.
867
+ *
868
+ * @returns Context ID that can be used as `parentId` for nested contexts
869
+ *
870
+ * @example Basic usage
871
+ * ```tsx
872
+ * function CartPage() {
873
+ * const [cart, setCart] = useState([]);
874
+ *
875
+ * // Provide cart data to AI
876
+ * useAIContext({
877
+ * key: 'cart',
878
+ * data: cart,
879
+ * description: 'User shopping cart items',
880
+ * });
881
+ *
882
+ * return <CartUI cart={cart} />;
883
+ * }
884
+ * ```
885
+ *
886
+ * @example Nested/hierarchical contexts
887
+ * ```tsx
888
+ * function EmployeeList({ employees }) {
889
+ * // Parent context - returns ID for nesting
890
+ * const listId = useAIContext({
891
+ * key: 'employees',
892
+ * data: { count: employees.length },
893
+ * description: 'Employee list',
894
+ * });
895
+ *
896
+ * return employees.map(emp => (
897
+ * <Employee key={emp.id} employee={emp} parentContextId={listId} />
898
+ * ));
899
+ * }
900
+ *
901
+ * function Employee({ employee, parentContextId }) {
902
+ * // Child context - nested under parent
903
+ * useAIContext({
904
+ * key: `employee-${employee.id}`,
905
+ * data: employee,
906
+ * description: employee.name,
907
+ * parentId: parentContextId, // Links to parent context
908
+ * });
909
+ *
910
+ * return <div>{employee.name}</div>;
911
+ * }
912
+ * ```
913
+ */
914
+ declare function useAIContext(item: AIContextItem): string | undefined;
915
+ /**
916
+ * Hook to provide multiple context items at once
917
+ *
918
+ * @example
919
+ * ```tsx
920
+ * useAIContexts([
921
+ * { key: 'user', data: currentUser },
922
+ * { key: 'cart', data: cartItems },
923
+ * { key: 'page', data: { route: '/checkout', step: 2 } },
924
+ * ]);
925
+ * ```
926
+ */
927
+ declare function useAIContexts(items: AIContextItem[]): void;
928
+
929
+ /**
930
+ * useAITools options
931
+ */
932
+ interface UseAIToolsOptions extends ToolsConfig {
933
+ /** Callback when consent is requested */
934
+ onConsentRequest?: (request: ToolConsentRequest) => Promise<ToolConsentResponse>;
935
+ /** Auto-start capturing when enabled */
936
+ autoStart?: boolean;
937
+ }
938
+ /**
939
+ * useAITools return type
940
+ */
941
+ interface UseAIToolsReturn {
942
+ /** Whether tools are enabled */
943
+ isEnabled: boolean;
944
+ /** Currently active captures */
945
+ activeCaptures: {
946
+ console: boolean;
947
+ network: boolean;
948
+ };
949
+ /** Capture screenshot */
950
+ captureScreenshot: (options?: ScreenshotOptions) => Promise<CapturedContext["screenshot"]>;
951
+ /** Get console logs */
952
+ getConsoleLogs: (options?: ConsoleLogOptions) => CapturedContext["consoleLogs"];
953
+ /** Get network requests */
954
+ getNetworkRequests: (options?: NetworkRequestOptions) => CapturedContext["networkRequests"];
955
+ /** Capture all enabled context */
956
+ captureContext: (tools?: ToolType[]) => Promise<CapturedContext>;
957
+ /** Detect intent from message */
958
+ detectIntent: (message: string) => IntentDetectionResult;
959
+ /** Request consent for tools */
960
+ requestConsent: (tools: ToolType[], reason?: string) => Promise<ToolConsentResponse>;
961
+ /** Start capturing */
962
+ startCapturing: () => void;
963
+ /** Stop capturing */
964
+ stopCapturing: () => void;
965
+ /** Clear captured data */
966
+ clearCaptured: () => void;
967
+ /** Format captured context for AI */
968
+ formatForAI: (context: CapturedContext) => string;
969
+ /** Pending consent request (for UI) */
970
+ pendingConsent: ToolConsentRequest | null;
971
+ /** Respond to consent request */
972
+ respondToConsent: (response: ToolConsentResponse) => void;
973
+ }
974
+ /**
975
+ * Hook for AI Smart Context Tools
976
+ *
977
+ * Provides React integration for screenshot, console, and network capture
978
+ * with consent-based UX and intent detection.
979
+ *
980
+ * @example
981
+ * ```tsx
982
+ * const {
983
+ * captureScreenshot,
984
+ * getConsoleLogs,
985
+ * detectIntent,
986
+ * pendingConsent,
987
+ * respondToConsent,
988
+ * } = useAITools({
989
+ * screenshot: true,
990
+ * console: true,
991
+ * network: true,
992
+ * requireConsent: true,
993
+ * });
994
+ *
995
+ * // Detect if tools are needed based on user message
996
+ * const handleMessage = async (message: string) => {
997
+ * const intent = detectIntent(message);
998
+ *
999
+ * if (intent.suggestedTools.length > 0) {
1000
+ * const consent = await requestConsent(
1001
+ * intent.suggestedTools,
1002
+ * generateSuggestionReason(intent)
1003
+ * );
1004
+ *
1005
+ * if (consent.approved.length > 0) {
1006
+ * const context = await captureContext(consent.approved);
1007
+ * // Include context with message
1008
+ * }
1009
+ * }
1010
+ * };
1011
+ * ```
1012
+ */
1013
+ declare function useAITools(options?: UseAIToolsOptions): UseAIToolsReturn;
1014
+
1015
+ /**
1016
+ * Configuration for registering a tool (legacy format)
1017
+ */
1018
+ interface UseToolConfig<TParams = Record<string, unknown>> {
1019
+ /** Unique tool name */
1020
+ name: string;
1021
+ /** Tool description for LLM */
1022
+ description: string;
1023
+ /** JSON Schema for input parameters */
1024
+ inputSchema: {
1025
+ type: "object";
1026
+ properties: Record<string, unknown>;
1027
+ required?: string[];
1028
+ };
1029
+ /** Handler function */
1030
+ handler: (params: TParams, context?: ToolContext) => Promise<ToolResponse$1> | ToolResponse$1;
1031
+ /** Optional render function for UI */
1032
+ render?: (props: ToolRenderProps<TParams>) => React.ReactNode;
1033
+ /** Whether the tool is available */
1034
+ available?: boolean;
1035
+ /** Require user approval */
1036
+ needsApproval?: boolean;
1037
+ /** Custom approval message */
1038
+ approvalMessage?: string;
1039
+ }
1040
+ /**
1041
+ * Register a client-side tool
1042
+ *
1043
+ * This hook registers a tool that can be called by the AI during a conversation.
1044
+ * The tool will execute on the client side.
1045
+ *
1046
+ * @example
1047
+ * ```tsx
1048
+ * useTool({
1049
+ * name: "navigate_to_page",
1050
+ * description: "Navigate to a specific page in the app",
1051
+ * inputSchema: {
1052
+ * type: "object",
1053
+ * properties: {
1054
+ * path: { type: "string", description: "The path to navigate to" },
1055
+ * },
1056
+ * required: ["path"],
1057
+ * },
1058
+ * handler: async ({ path }) => {
1059
+ * router.push(path);
1060
+ * return { success: true, message: `Navigated to ${path}` };
1061
+ * },
1062
+ * });
1063
+ * ```
1064
+ */
1065
+ declare function useTool<TParams = Record<string, unknown>>(config: UseToolConfig<TParams>, dependencies?: unknown[]): void;
1066
+ /**
1067
+ * Register multiple tools using a ToolSet (Vercel AI SDK pattern)
1068
+ *
1069
+ * This is the recommended way to register tools as it follows
1070
+ * the Vercel AI SDK pattern with explicit tool definitions.
1071
+ *
1072
+ * @example
1073
+ * ```tsx
1074
+ * import { useTools } from '@yourgpt/copilot-sdk-react';
1075
+ * import { builtinTools, tool, success } from '../core';
1076
+ *
1077
+ * function MyApp() {
1078
+ * // Register built-in tools
1079
+ * useTools({
1080
+ * capture_screenshot: builtinTools.capture_screenshot,
1081
+ * get_console_logs: builtinTools.get_console_logs,
1082
+ * });
1083
+ *
1084
+ * // Or create custom tools
1085
+ * useTools({
1086
+ * get_weather: tool({
1087
+ * description: 'Get weather for a location',
1088
+ * inputSchema: {
1089
+ * type: 'object',
1090
+ * properties: {
1091
+ * location: { type: 'string' },
1092
+ * },
1093
+ * required: ['location'],
1094
+ * },
1095
+ * handler: async ({ location }) => {
1096
+ * const weather = await fetchWeather(location);
1097
+ * return success(weather);
1098
+ * },
1099
+ * }),
1100
+ * });
1101
+ *
1102
+ * return <CopilotChat />;
1103
+ * }
1104
+ * ```
1105
+ */
1106
+ declare function useTools(tools: ToolSet): void;
1107
+
1108
+ /**
1109
+ * Zod schema type (minimal interface)
1110
+ */
1111
+ interface ZodObjectSchema {
1112
+ _output: Record<string, unknown>;
1113
+ _def: {
1114
+ shape: () => Record<string, unknown>;
1115
+ };
1116
+ }
1117
+ /**
1118
+ * Configuration for registering a tool with Zod schema
1119
+ */
1120
+ interface UseToolWithSchemaConfig<TSchema extends ZodObjectSchema> {
1121
+ /** Unique tool name */
1122
+ name: string;
1123
+ /** Tool description for LLM */
1124
+ description: string;
1125
+ /** Zod schema for input parameters */
1126
+ schema: TSchema;
1127
+ /** Handler function */
1128
+ handler: (params: TSchema["_output"], context?: ToolContext) => Promise<ToolResponse$1> | ToolResponse$1;
1129
+ /** Optional render function for UI */
1130
+ render?: (props: ToolRenderProps<TSchema["_output"]>) => React.ReactNode;
1131
+ /** Whether the tool is available */
1132
+ available?: boolean;
1133
+ }
1134
+ /**
1135
+ * Register a client-side tool using a Zod schema
1136
+ *
1137
+ * This hook provides type-safe tool registration using Zod schemas.
1138
+ * The Zod schema is automatically converted to JSON Schema for the LLM.
1139
+ *
1140
+ * @example
1141
+ * ```tsx
1142
+ * import { z } from "zod";
1143
+ *
1144
+ * useToolWithSchema({
1145
+ * name: "navigate_to_page",
1146
+ * description: "Navigate to a specific page in the app",
1147
+ * schema: z.object({
1148
+ * path: z.string().describe("The path to navigate to"),
1149
+ * }),
1150
+ * handler: async ({ path }) => {
1151
+ * // TypeScript knows `path` is a string!
1152
+ * router.push(path);
1153
+ * return { success: true, message: `Navigated to ${path}` };
1154
+ * },
1155
+ * });
1156
+ * ```
1157
+ */
1158
+ declare function useToolWithSchema<TSchema extends ZodObjectSchema>(config: UseToolWithSchemaConfig<TSchema>, dependencies?: unknown[]): void;
1159
+ /**
1160
+ * Register multiple client-side tools using Zod schemas
1161
+ *
1162
+ * @example
1163
+ * ```tsx
1164
+ * import { z } from "zod";
1165
+ *
1166
+ * useToolsWithSchema([
1167
+ * {
1168
+ * name: "navigate",
1169
+ * description: "Navigate to page",
1170
+ * schema: z.object({ path: z.string() }),
1171
+ * handler: async ({ path }) => { ... },
1172
+ * },
1173
+ * {
1174
+ * name: "open_modal",
1175
+ * description: "Open a modal",
1176
+ * schema: z.object({ modalId: z.string() }),
1177
+ * handler: async ({ modalId }) => { ... },
1178
+ * },
1179
+ * ]);
1180
+ * ```
1181
+ */
1182
+ declare function useToolsWithSchema<TSchema extends ZodObjectSchema>(tools: UseToolWithSchemaConfig<TSchema>[], dependencies?: unknown[]): void;
1183
+
1184
+ /**
1185
+ * Tool executor return type
1186
+ */
1187
+ interface UseToolExecutorReturn {
1188
+ /**
1189
+ * Execute a tool by name with given arguments
1190
+ */
1191
+ executeTool: (toolCall: UnifiedToolCall) => Promise<ToolResponse$1>;
1192
+ /**
1193
+ * Send tool result back to server
1194
+ */
1195
+ sendToolResult: (toolCallId: string, result: ToolResponse$1) => Promise<void>;
1196
+ /**
1197
+ * Get a registered tool by name
1198
+ */
1199
+ getTool: (name: string) => ToolDefinition | undefined;
1200
+ /**
1201
+ * Check if a tool is registered
1202
+ */
1203
+ hasTool: (name: string) => boolean;
1204
+ }
1205
+ /**
1206
+ * Internal hook for executing client-side tools
1207
+ *
1208
+ * This hook is used internally by the CopilotProvider to execute
1209
+ * tools when the server requests them via SSE events.
1210
+ *
1211
+ * It can also be used for custom implementations where you need
1212
+ * direct control over tool execution.
1213
+ *
1214
+ * @example
1215
+ * ```tsx
1216
+ * const { executeTool, sendToolResult } = useToolExecutor();
1217
+ *
1218
+ * // When receiving a tool:execute event from server
1219
+ * const handleToolExecute = async (event: ToolExecuteEvent) => {
1220
+ * const result = await executeTool({
1221
+ * id: event.id,
1222
+ * name: event.name,
1223
+ * input: event.args,
1224
+ * });
1225
+ *
1226
+ * // Send result back to server
1227
+ * await sendToolResult(event.id, result);
1228
+ * };
1229
+ * ```
1230
+ */
1231
+ declare function useToolExecutor(): UseToolExecutorReturn;
1232
+
1233
+ /**
1234
+ * Suggestion item
1235
+ */
1236
+ interface Suggestion {
1237
+ /** Suggestion text */
1238
+ text: string;
1239
+ /** Optional icon */
1240
+ icon?: string;
1241
+ }
1242
+ /**
1243
+ * useSuggestions options
1244
+ */
1245
+ interface UseSuggestionsOptions {
1246
+ /** Number of suggestions to show */
1247
+ count?: number;
1248
+ /** Context for generating suggestions */
1249
+ context?: string;
1250
+ /** Static suggestions (if not using AI-generated) */
1251
+ suggestions?: Suggestion[] | string[];
1252
+ /** Auto-refresh on conversation change */
1253
+ autoRefresh?: boolean;
1254
+ }
1255
+ /**
1256
+ * useSuggestions return type
1257
+ */
1258
+ interface UseSuggestionsReturn {
1259
+ /** Current suggestions */
1260
+ suggestions: Suggestion[];
1261
+ /** Whether suggestions are loading */
1262
+ isLoading: boolean;
1263
+ /** Refresh suggestions */
1264
+ refresh: () => Promise<void>;
1265
+ /** Select a suggestion (sends as message) */
1266
+ select: (suggestion: Suggestion | string) => void;
1267
+ }
1268
+ /**
1269
+ * Hook for chat suggestions
1270
+ *
1271
+ * @example
1272
+ * ```tsx
1273
+ * const { suggestions, select } = useSuggestions({
1274
+ * count: 3,
1275
+ * context: 'Help users with product questions',
1276
+ * });
1277
+ *
1278
+ * return (
1279
+ * <div>
1280
+ * {suggestions.map((s, i) => (
1281
+ * <button key={i} onClick={() => select(s)}>{s.text}</button>
1282
+ * ))}
1283
+ * </div>
1284
+ * );
1285
+ * ```
1286
+ */
1287
+ declare function useSuggestions(options?: UseSuggestionsOptions): UseSuggestionsReturn;
1288
+
1289
+ /**
1290
+ * useAgent options
1291
+ */
1292
+ interface UseAgentOptions<TState = Record<string, unknown>> {
1293
+ /** Agent name */
1294
+ name: string;
1295
+ /** Initial state */
1296
+ initialState?: TState;
1297
+ /** Called when agent state changes */
1298
+ onStateChange?: (state: TState) => void;
1299
+ }
1300
+ /**
1301
+ * useAgent return type
1302
+ */
1303
+ interface UseAgentReturn<TState = Record<string, unknown>> {
1304
+ /** Current agent state */
1305
+ state: TState;
1306
+ /** Whether agent is running */
1307
+ isRunning: boolean;
1308
+ /** Current node name (for graph-based agents) */
1309
+ nodeName: string | null;
1310
+ /** Start the agent */
1311
+ start: (input?: string | Record<string, unknown>) => Promise<void>;
1312
+ /** Stop the agent */
1313
+ stop: () => void;
1314
+ /** Update agent state */
1315
+ setState: (state: Partial<TState>) => void;
1316
+ /** Error if any */
1317
+ error: Error | null;
1318
+ }
1319
+ /**
1320
+ * Hook for connecting to agents (LangGraph, etc.)
1321
+ *
1322
+ * @example
1323
+ * ```tsx
1324
+ * const { state, isRunning, start } = useAgent<{ city: string }>({
1325
+ * name: 'weather-agent',
1326
+ * initialState: { city: '' },
1327
+ * });
1328
+ *
1329
+ * return (
1330
+ * <div>
1331
+ * <p>City: {state.city}</p>
1332
+ * <button onClick={() => start('What is the weather?')}>
1333
+ * {isRunning ? 'Running...' : 'Start'}
1334
+ * </button>
1335
+ * </div>
1336
+ * );
1337
+ * ```
1338
+ */
1339
+ declare function useAgent<TState = Record<string, unknown>>(options: UseAgentOptions<TState>): UseAgentReturn<TState>;
1340
+
1341
+ /**
1342
+ * Knowledge Base Search Utility
1343
+ *
1344
+ * Integrates with managed cloud knowledge base API to search indexed documents.
1345
+ */
1346
+
1347
+ type KnowledgeBaseResult = InternalKnowledgeBaseResult;
1348
+ type KnowledgeBaseConfig = InternalKnowledgeBaseConfig;
1349
+ interface KnowledgeBaseSearchResponse extends InternalKnowledgeBaseSearchResponse {
1350
+ page?: number;
1351
+ }
1352
+ /**
1353
+ * Search the knowledge base
1354
+ *
1355
+ * @param query - Search query string
1356
+ * @param config - Knowledge base configuration
1357
+ * @returns Search results
1358
+ */
1359
+ declare function searchKnowledgeBase(query: string, config: KnowledgeBaseConfig): Promise<KnowledgeBaseSearchResponse>;
1360
+ /**
1361
+ * Format knowledge base results for AI context
1362
+ */
1363
+ declare function formatKnowledgeResultsForAI(results: KnowledgeBaseResult[]): string;
1364
+
1365
+ /**
1366
+ * Hook configuration for knowledge base
1367
+ */
1368
+ interface UseKnowledgeBaseConfig {
1369
+ /** Project UID for the knowledge base */
1370
+ projectUid: string;
1371
+ /** Auth token for API calls */
1372
+ token: string;
1373
+ /** App ID (default: "1") */
1374
+ appId?: string;
1375
+ /** Results limit (default: 5) */
1376
+ limit?: number;
1377
+ /** Whether to enable the tool (default: true) */
1378
+ enabled?: boolean;
1379
+ }
1380
+ /**
1381
+ * Hook to integrate knowledge base search as a tool
1382
+ *
1383
+ * Registers a `search_knowledge` tool that the AI can use to search
1384
+ * the knowledge base for relevant information.
1385
+ *
1386
+ * @example
1387
+ * ```tsx
1388
+ * function MyComponent() {
1389
+ * useKnowledgeBase({
1390
+ * projectUid: "your-project-uid",
1391
+ * token: "your-auth-token",
1392
+ * });
1393
+ *
1394
+ * return <CopilotChat />;
1395
+ * }
1396
+ * ```
1397
+ */
1398
+ declare function useKnowledgeBase(config: UseKnowledgeBaseConfig): void;
1399
+
1400
+ /**
1401
+ * Provider capabilities for UI feature flags
1402
+ */
1403
+ interface ProviderCapabilities {
1404
+ /** Supports image inputs */
1405
+ supportsVision: boolean;
1406
+ /** Supports tool/function calling */
1407
+ supportsTools: boolean;
1408
+ /** Supports extended thinking (Claude, DeepSeek) */
1409
+ supportsThinking: boolean;
1410
+ /** Supports streaming responses */
1411
+ supportsStreaming: boolean;
1412
+ /** Supports PDF document inputs */
1413
+ supportsPDF: boolean;
1414
+ /** Supports audio inputs */
1415
+ supportsAudio: boolean;
1416
+ /** Supports video inputs */
1417
+ supportsVideo: boolean;
1418
+ /** Maximum context tokens */
1419
+ maxTokens: number;
1420
+ /** Supported image MIME types */
1421
+ supportedImageTypes: string[];
1422
+ /** Supported audio MIME types */
1423
+ supportedAudioTypes?: string[];
1424
+ /** Supported video MIME types */
1425
+ supportedVideoTypes?: string[];
1426
+ /** Supports JSON mode / structured output */
1427
+ supportsJsonMode?: boolean;
1428
+ /** Supports system messages */
1429
+ supportsSystemMessages?: boolean;
1430
+ }
1431
+ /**
1432
+ * Capabilities response from the server
1433
+ */
1434
+ interface CapabilitiesResponse {
1435
+ /** Provider name */
1436
+ provider: string;
1437
+ /** Current model ID */
1438
+ model: string;
1439
+ /** Model capabilities */
1440
+ capabilities: ProviderCapabilities;
1441
+ /** List of supported models for this provider */
1442
+ supportedModels: string[];
1443
+ }
1444
+ /**
1445
+ * Hook to access model capabilities from the runtime
1446
+ *
1447
+ * @returns Capabilities state and actions
1448
+ *
1449
+ * @example
1450
+ * ```tsx
1451
+ * function ChatInput() {
1452
+ * const { capabilities, isLoading } = useCapabilities();
1453
+ *
1454
+ * return (
1455
+ * <div>
1456
+ * {capabilities.supportsVision && (
1457
+ * <ImageUploadButton />
1458
+ * )}
1459
+ * {capabilities.supportsAudio && (
1460
+ * <AudioRecordButton />
1461
+ * )}
1462
+ * </div>
1463
+ * );
1464
+ * }
1465
+ * ```
1466
+ */
1467
+ declare function useCapabilities(): {
1468
+ /** Current model capabilities */
1469
+ capabilities: ProviderCapabilities;
1470
+ /** Current provider name */
1471
+ provider: string;
1472
+ /** Current model ID */
1473
+ model: string;
1474
+ /** List of supported models for current provider */
1475
+ supportedModels: string[];
1476
+ /** Whether capabilities are being loaded */
1477
+ isLoading: boolean;
1478
+ /** Error if fetch failed */
1479
+ error: Error | null;
1480
+ /** Refetch capabilities */
1481
+ refetch: () => Promise<void>;
1482
+ };
1483
+ /**
1484
+ * Hook to check if a specific feature is supported
1485
+ *
1486
+ * @param feature - The feature to check (e.g., 'vision', 'audio', 'video')
1487
+ * @returns Whether the feature is supported
1488
+ *
1489
+ * @example
1490
+ * ```tsx
1491
+ * function ImageButton() {
1492
+ * const supportsVision = useFeatureSupport('vision');
1493
+ *
1494
+ * if (!supportsVision) return null;
1495
+ * return <button>Upload Image</button>;
1496
+ * }
1497
+ * ```
1498
+ */
1499
+ declare function useFeatureSupport(feature: keyof Pick<ProviderCapabilities, "supportsVision" | "supportsTools" | "supportsThinking" | "supportsStreaming" | "supportsPDF" | "supportsAudio" | "supportsVideo" | "supportsJsonMode" | "supportsSystemMessages">): boolean;
1500
+ /**
1501
+ * Hook to get supported media types
1502
+ *
1503
+ * @returns Object with supported media types
1504
+ *
1505
+ * @example
1506
+ * ```tsx
1507
+ * function MediaUpload() {
1508
+ * const { imageTypes, audioTypes, videoTypes } = useSupportedMediaTypes();
1509
+ *
1510
+ * return (
1511
+ * <input
1512
+ * type="file"
1513
+ * accept={imageTypes.join(',')}
1514
+ * />
1515
+ * );
1516
+ * }
1517
+ * ```
1518
+ */
1519
+ declare function useSupportedMediaTypes(): {
1520
+ /** Supported image MIME types */
1521
+ imageTypes: string[];
1522
+ /** Supported audio MIME types */
1523
+ audioTypes: string[];
1524
+ /** Supported video MIME types */
1525
+ videoTypes: string[];
1526
+ /** Whether any image types are supported */
1527
+ hasImageSupport: boolean;
1528
+ /** Whether any audio types are supported */
1529
+ hasAudioSupport: boolean;
1530
+ /** Whether any video types are supported */
1531
+ hasVideoSupport: boolean;
1532
+ };
1533
+
1534
+ /**
1535
+ * SDK State for DevLogger
1536
+ * This type is compatible with DevLoggerState in @yourgpt/copilot-sdk-ui
1537
+ */
1538
+ interface DevLoggerState {
1539
+ chat: {
1540
+ isLoading: boolean;
1541
+ messageCount: number;
1542
+ threadId: string;
1543
+ error: string | null;
1544
+ };
1545
+ tools: {
1546
+ isEnabled: boolean;
1547
+ isCapturing: boolean;
1548
+ pendingConsent: boolean;
1549
+ };
1550
+ agentLoop: {
1551
+ toolExecutions: Array<{
1552
+ id: string;
1553
+ name: string;
1554
+ status: string;
1555
+ approvalStatus: string;
1556
+ }>;
1557
+ pendingApprovals: number;
1558
+ iteration: number;
1559
+ maxIterations: number;
1560
+ };
1561
+ registered: {
1562
+ tools: Array<{
1563
+ name: string;
1564
+ location: string;
1565
+ }>;
1566
+ actions: Array<{
1567
+ name: string;
1568
+ }>;
1569
+ contextCount: number;
1570
+ };
1571
+ permissions: {
1572
+ stored: Array<{
1573
+ toolName: string;
1574
+ level: string;
1575
+ }>;
1576
+ loaded: boolean;
1577
+ };
1578
+ config: {
1579
+ provider: string;
1580
+ model: string;
1581
+ runtimeUrl: string;
1582
+ };
1583
+ }
1584
+ /**
1585
+ * Hook to build DevLogger state from SDK context
1586
+ *
1587
+ * Used internally by CopilotProvider when showLogger is true
1588
+ */
1589
+ declare function useDevLogger(): DevLoggerState;
1590
+
1591
+ /**
1592
+ * Create a permission storage adapter based on config
1593
+ */
1594
+ declare function createPermissionStorage(config: PermissionStorageConfig): PermissionStorageAdapter;
1595
+ /**
1596
+ * Create a session-only permission cache
1597
+ * Used for "session" permission level (in-memory, cleared on page close)
1598
+ */
1599
+ declare function createSessionPermissionCache(): Map<string, PermissionLevel>;
1600
+
1601
+ /**
1602
+ * ReactChatState - React-specific implementation of ChatState
1603
+ *
1604
+ * This class implements the ChatState interface with callback-based
1605
+ * reactivity for use with React's useSyncExternalStore.
1606
+ *
1607
+ * Pattern inspired by Vercel AI SDK's useSyncExternalStore pattern.
1608
+ */
1609
+
1610
+ /**
1611
+ * ReactChatState implements ChatState with callback-based reactivity
1612
+ *
1613
+ * @example
1614
+ * ```tsx
1615
+ * const state = new ReactChatState<UIMessage>();
1616
+ *
1617
+ * // Subscribe to changes (for useSyncExternalStore)
1618
+ * const unsubscribe = state.subscribe(() => {
1619
+ * console.log('State changed');
1620
+ * });
1621
+ *
1622
+ * // Get snapshot (for useSyncExternalStore)
1623
+ * const messages = state.messages;
1624
+ * ```
1625
+ */
1626
+ declare class ReactChatState<T extends UIMessage = UIMessage> implements ChatState$1<T> {
1627
+ private _messages;
1628
+ private _status;
1629
+ private _error;
1630
+ private subscribers;
1631
+ constructor(initialMessages?: T[]);
1632
+ get messages(): T[];
1633
+ get status(): ChatStatus$1;
1634
+ get error(): Error | undefined;
1635
+ set messages(value: T[]);
1636
+ set status(value: ChatStatus$1);
1637
+ set error(value: Error | undefined);
1638
+ pushMessage(message: T): void;
1639
+ popMessage(): void;
1640
+ replaceMessage(index: number, message: T): void;
1641
+ updateLastMessage(updater: (message: T) => T): void;
1642
+ setMessages(messages: T[]): void;
1643
+ clearMessages(): void;
1644
+ /**
1645
+ * Subscribe to state changes.
1646
+ * Returns an unsubscribe function.
1647
+ *
1648
+ * @example
1649
+ * ```tsx
1650
+ * const messages = useSyncExternalStore(
1651
+ * state.subscribe,
1652
+ * () => state.messages
1653
+ * );
1654
+ * ```
1655
+ */
1656
+ subscribe: (callback: () => void) => (() => void);
1657
+ private notify;
1658
+ /**
1659
+ * Cleanup subscriptions
1660
+ */
1661
+ dispose(): void;
1662
+ }
1663
+ /**
1664
+ * Create a ReactChatState instance
1665
+ */
1666
+ declare function createReactChatState<T extends UIMessage = UIMessage>(initialMessages?: T[]): ReactChatState<T>;
1667
+
1668
+ /**
1669
+ * ReactChat - React-specific wrapper for AbstractChat
1670
+ *
1671
+ * This class extends the new AbstractChat from @yourgpt/copilot-sdk-chat
1672
+ * and injects ReactChatState for React-specific state management.
1673
+ *
1674
+ * Pattern inspired by Vercel AI SDK's Chat class.
1675
+ */
1676
+
1677
+ /**
1678
+ * Chat status for UI state
1679
+ */
1680
+ type ChatStatus = "ready" | "submitted" | "streaming" | "error";
1681
+ /**
1682
+ * ReactChat configuration
1683
+ */
1684
+ interface ReactChatConfig {
1685
+ /** Runtime API endpoint */
1686
+ runtimeUrl: string;
1687
+ /** System prompt */
1688
+ systemPrompt?: string;
1689
+ /** LLM configuration */
1690
+ llm?: ChatConfig["llm"];
1691
+ /** Thread ID */
1692
+ threadId?: string;
1693
+ /** Enable streaming (default: true) */
1694
+ streaming?: boolean;
1695
+ /** Request headers */
1696
+ headers?: Record<string, string>;
1697
+ /** Initial messages */
1698
+ initialMessages?: UIMessage[];
1699
+ /** Debug mode */
1700
+ debug?: boolean;
1701
+ /** Callbacks */
1702
+ callbacks?: ChatCallbacks<UIMessage>;
1703
+ }
1704
+ /**
1705
+ * ReactChat extends AbstractChat with React-specific state management.
1706
+ *
1707
+ * Uses ReactChatState which implements ChatState interface with
1708
+ * callback-based reactivity for useSyncExternalStore.
1709
+ *
1710
+ * @example
1711
+ * ```tsx
1712
+ * const chatRef = useRef(new ReactChat(config));
1713
+ *
1714
+ * const messages = useSyncExternalStore(
1715
+ * chatRef.current.subscribe,
1716
+ * () => chatRef.current.messages
1717
+ * );
1718
+ * ```
1719
+ */
1720
+ declare class ReactChat extends AbstractChat<UIMessage> {
1721
+ private reactState;
1722
+ constructor(config: ReactChatConfig);
1723
+ /**
1724
+ * Subscribe to state changes.
1725
+ * Returns an unsubscribe function.
1726
+ *
1727
+ * @example
1728
+ * ```tsx
1729
+ * const messages = useSyncExternalStore(
1730
+ * chat.subscribe,
1731
+ * () => chat.messages
1732
+ * );
1733
+ * ```
1734
+ */
1735
+ subscribe: (callback: () => void) => (() => void);
1736
+ /**
1737
+ * Subscribe to tool calls events
1738
+ */
1739
+ onToolCalls(handler: ChatEventHandler<"toolCalls">): () => void;
1740
+ /**
1741
+ * Subscribe to done events
1742
+ */
1743
+ onDone(handler: ChatEventHandler<"done">): () => void;
1744
+ /**
1745
+ * Subscribe to error events
1746
+ */
1747
+ onError(handler: ChatEventHandler<"error">): () => void;
1748
+ dispose(): void;
1749
+ }
1750
+ /**
1751
+ * Create a ReactChat instance
1752
+ */
1753
+ declare function createReactChat(config: ReactChatConfig): ReactChat;
1754
+
1755
+ /**
1756
+ * Hook configuration
1757
+ */
1758
+ interface UseChatConfig extends Omit<ReactChatConfig, "callbacks"> {
1759
+ /** Callback when messages change */
1760
+ onMessagesChange?: (messages: UIMessage[]) => void;
1761
+ /** Callback when error occurs */
1762
+ onError?: (error: Error | null) => void;
1763
+ /** Callback when generation finishes */
1764
+ onFinish?: (messages: UIMessage[]) => void;
1765
+ /** Callback when tool calls are received */
1766
+ onToolCalls?: (toolCalls: UIMessage["toolCalls"]) => void;
1767
+ }
1768
+ /**
1769
+ * Hook return type
1770
+ */
1771
+ interface UseChatReturn {
1772
+ /** All messages */
1773
+ messages: UIMessage[];
1774
+ /** Current status */
1775
+ status: ChatStatus$1;
1776
+ /** Current error */
1777
+ error: Error | undefined;
1778
+ /** Whether loading */
1779
+ isLoading: boolean;
1780
+ /** Current input value */
1781
+ input: string;
1782
+ /** Set input value */
1783
+ setInput: (input: string) => void;
1784
+ /** Send a message */
1785
+ sendMessage: (content: string, attachments?: MessageAttachment[]) => Promise<void>;
1786
+ /** Stop generation */
1787
+ stop: () => void;
1788
+ /** Clear all messages */
1789
+ clearMessages: () => void;
1790
+ /** Set messages directly */
1791
+ setMessages: (messages: UIMessage[]) => void;
1792
+ /** Regenerate last response */
1793
+ regenerate: (messageId?: string) => Promise<void>;
1794
+ /** Continue with tool results */
1795
+ continueWithToolResults: (toolResults: Array<{
1796
+ toolCallId: string;
1797
+ result: unknown;
1798
+ }>) => Promise<void>;
1799
+ /** Reference to the ReactChat instance */
1800
+ chatRef: React.RefObject<ReactChat | null>;
1801
+ }
1802
+ /**
1803
+ * useChat - Thin React wrapper using useSyncExternalStore
1804
+ *
1805
+ * This hook is designed to be minimal (~100 lines like Vercel AI SDK).
1806
+ * All business logic lives in ReactChat/AbstractChat.
1807
+ *
1808
+ * @example
1809
+ * ```tsx
1810
+ * const { messages, sendMessage, status } = useChat({
1811
+ * runtimeUrl: "/api/chat",
1812
+ * });
1813
+ *
1814
+ * return (
1815
+ * <div>
1816
+ * {messages.map(m => <Message key={m.id} message={m} />)}
1817
+ * <button onClick={() => sendMessage("Hello!")}>Send</button>
1818
+ * </div>
1819
+ * );
1820
+ * ```
1821
+ */
1822
+ declare function useChat(config: UseChatConfig): UseChatReturn;
1823
+
1824
+ export { type AIContextItem, AbstractAgentLoop, AbstractChat, ActionDefinition, type AgentLoopActions, type AgentLoopCallbacks, type AgentLoopState, type CapabilitiesResponse, CapturedContext, type ChatActions, type ChatCallbacks, type ChatConfig, type ChatState, type ChatStatus, type ToolExecution as ChatToolExecution, type ToolResponse as ChatToolResponse, type CombinedChatState, CopilotConfig, type CopilotContextValue, CopilotProvider, type CopilotProviderProps, type AgentLoopState$1 as CoreAgentLoopState, type ChatState$1 as CoreChatState, type DevLoggerState, IntentDetectionResult, type KnowledgeBaseConfig, type KnowledgeBaseResult, type KnowledgeBaseSearchResponse, LLMConfig, Message, PermissionLevel, PermissionStorageAdapter, PermissionStorageConfig, type ProviderCapabilities, ReactChat, type ReactChatConfig, ReactChatState, Source, type Suggestion, ToolConsentRequest, ToolConsentResponse, ToolContext, ToolDefinition, ToolExecution$1 as ToolExecution, ToolResponse$1 as ToolResponse, ToolType, type ToolsActions, ToolsConfig, type ToolsState, type UIMessage, UnifiedToolCall, type UseAIToolsOptions, type UseAIToolsReturn, type UseAgentOptions, type UseAgentReturn, type UseChatConfig, type UseChatReturn, type UseKnowledgeBaseConfig, type UseSuggestionsOptions, type UseSuggestionsReturn, type UseToolConfig, type UseToolExecutorReturn, type UseToolWithSchemaConfig, createPermissionStorage, createReactChat, createReactChatState, createSessionPermissionCache, formatKnowledgeResultsForAI, initialAgentLoopState, searchKnowledgeBase, useAIAction, useAIActions, useAIContext, useAIContexts, useAITools, useAgent, useCapabilities, useChat, useCopilot, useDevLogger, useFeatureSupport, useKnowledgeBase, useSuggestions, useSupportedMediaTypes, useTool, useToolExecutor, useToolWithSchema, useTools, useToolsWithSchema };