ai-site-pilot 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,295 @@
1
+ # ai-site-pilot
2
+
3
+ AI chat widget that can **control and navigate your website**. Unlike typical chatbots that just answer questions, Site Pilot can take actions—scroll to sections, open modals, filter content, and more.
4
+
5
+ ## Features
6
+
7
+ - 🎯 **Tool System** - Define custom actions the AI can take on your site
8
+ - 🌊 **Streaming** - Real-time streaming responses with SSE
9
+ - 🎤 **Speech** - Voice input and text-to-speech output
10
+ - 🎨 **Themeable** - CSS variables for easy customization
11
+ - 📱 **Responsive** - Works on all screen sizes
12
+ - ⚡ **Vercel AI SDK** - Works with any LLM provider
13
+
14
+ ## Installation
15
+
16
+ ```bash
17
+ npm install ai-site-pilot
18
+ ```
19
+
20
+ ## Quick Start
21
+
22
+ ### 1. Create the API Route
23
+
24
+ ```typescript
25
+ // app/api/chat/route.ts
26
+ import { createChatHandler } from 'ai-site-pilot/api';
27
+ import { defineTool } from 'ai-site-pilot/tools';
28
+ import { google } from '@ai-sdk/google';
29
+
30
+ const navigateTool = defineTool({
31
+ name: 'navigate',
32
+ description: 'Navigate to a section of the page',
33
+ parameters: {
34
+ type: 'object',
35
+ properties: {
36
+ section: {
37
+ type: 'string',
38
+ description: 'Section to navigate to',
39
+ enum: ['home', 'products', 'about', 'contact'],
40
+ },
41
+ },
42
+ required: ['section'],
43
+ },
44
+ });
45
+
46
+ export const POST = createChatHandler({
47
+ model: google('gemini-2.0-flash'),
48
+ systemPrompt: `You are a helpful assistant for our website.
49
+ You can navigate users to different sections using the navigate tool.`,
50
+ tools: [navigateTool],
51
+ });
52
+ ```
53
+
54
+ ### 2. Add the Component
55
+
56
+ ```tsx
57
+ // app/layout.tsx or components/ChatWidget.tsx
58
+ 'use client';
59
+
60
+ import { SitePilot } from 'ai-site-pilot';
61
+ import 'ai-site-pilot/styles.css';
62
+
63
+ export function ChatWidget() {
64
+ return (
65
+ <SitePilot
66
+ apiEndpoint="/api/chat"
67
+ suggestions={[
68
+ { text: 'Show me products', icon: '🛍️' },
69
+ { text: 'Take me to contact', icon: '📧' },
70
+ ]}
71
+ onToolCall={(name, args) => {
72
+ if (name === 'navigate') {
73
+ document.getElementById(args.section as string)?.scrollIntoView({
74
+ behavior: 'smooth',
75
+ });
76
+ }
77
+ }}
78
+ />
79
+ );
80
+ }
81
+ ```
82
+
83
+ ## API Reference
84
+
85
+ ### `<SitePilot />`
86
+
87
+ Main chat widget component.
88
+
89
+ | Prop | Type | Default | Description |
90
+ |------|------|---------|-------------|
91
+ | `apiEndpoint` | `string` | required | API endpoint for chat |
92
+ | `theme` | `SitePilotTheme` | `{}` | Theme configuration |
93
+ | `suggestions` | `Suggestion[]` | `[]` | Suggestion prompts |
94
+ | `features` | `SitePilotFeatures` | `{}` | Feature toggles |
95
+ | `onToolCall` | `(name, args) => void` | - | Tool call handler |
96
+ | `defaultOpen` | `boolean` | `false` | Initial open state |
97
+ | `placeholder` | `string` | `'Type a message...'` | Input placeholder |
98
+ | `welcomeMessage` | `string` | `'Hi! I'm here to help...'` | Welcome message |
99
+
100
+ #### Theme Options
101
+
102
+ ```typescript
103
+ interface SitePilotTheme {
104
+ accent?: string; // 'amber', 'blue', or CSS color
105
+ position?: 'bottom-right' | 'bottom-left' | 'top-right' | 'top-left';
106
+ borderRadius?: number;
107
+ }
108
+ ```
109
+
110
+ #### Feature Toggles
111
+
112
+ ```typescript
113
+ interface SitePilotFeatures {
114
+ speech?: boolean; // Voice input (default: true)
115
+ tts?: boolean; // Text-to-speech (default: true)
116
+ fullscreen?: boolean; // Fullscreen mode (default: true)
117
+ suggestions?: boolean; // Show suggestions (default: true)
118
+ }
119
+ ```
120
+
121
+ ### `createChatHandler()`
122
+
123
+ Factory for creating Next.js API route handlers.
124
+
125
+ ```typescript
126
+ import { createChatHandler } from 'ai-site-pilot/api';
127
+
128
+ export const POST = createChatHandler({
129
+ model: google('gemini-2.0-flash'), // Any Vercel AI SDK model
130
+ systemPrompt: 'You are a helpful assistant...',
131
+ tools: [myTool1, myTool2],
132
+ temperature: 0.7,
133
+ maxTokens: 1000,
134
+ });
135
+ ```
136
+
137
+ ### `defineTool()`
138
+
139
+ Helper for defining tools with type safety.
140
+
141
+ ```typescript
142
+ import { defineTool } from 'ai-site-pilot/tools';
143
+
144
+ const searchTool = defineTool({
145
+ name: 'search_products',
146
+ description: 'Search for products by query',
147
+ parameters: {
148
+ type: 'object',
149
+ properties: {
150
+ query: { type: 'string', description: 'Search terms' },
151
+ category: { type: 'string', enum: ['electronics', 'clothing'] },
152
+ },
153
+ required: ['query'],
154
+ },
155
+ handler: async ({ query, category }) => {
156
+ // Client-side handler (optional)
157
+ const results = await searchProducts(query, category);
158
+ displayResults(results);
159
+ },
160
+ });
161
+ ```
162
+
163
+ ### `useChat()`
164
+
165
+ Hook for custom chat implementations.
166
+
167
+ ```typescript
168
+ import { useChat } from 'ai-site-pilot/hooks';
169
+
170
+ function MyCustomChat() {
171
+ const {
172
+ messages,
173
+ input,
174
+ setInput,
175
+ isLoading,
176
+ sendMessage,
177
+ clearMessages,
178
+ } = useChat({
179
+ apiEndpoint: '/api/chat',
180
+ onToolCall: (name, args) => {
181
+ // Handle tool calls
182
+ },
183
+ });
184
+
185
+ return (
186
+ // Your custom UI
187
+ );
188
+ }
189
+ ```
190
+
191
+ ## Styling
192
+
193
+ ### CSS Variables
194
+
195
+ Override these variables to customize the appearance:
196
+
197
+ ```css
198
+ .pilot-container {
199
+ --pilot-accent-h: 38; /* Hue */
200
+ --pilot-accent-s: 92%; /* Saturation */
201
+ --pilot-accent-l: 50%; /* Lightness */
202
+ --pilot-bg: #0F0720; /* Background */
203
+ --pilot-text: #ffffff; /* Text color */
204
+ --pilot-text-muted: #a1a1aa; /* Muted text */
205
+ --pilot-border: rgba(255, 255, 255, 0.1);
206
+ --pilot-radius: 24px;
207
+ }
208
+ ```
209
+
210
+ ### Tailwind Integration
211
+
212
+ If using Tailwind, you can extend your config:
213
+
214
+ ```javascript
215
+ // tailwind.config.js
216
+ module.exports = {
217
+ theme: {
218
+ extend: {
219
+ colors: {
220
+ pilot: {
221
+ accent: 'hsl(var(--pilot-accent-h), var(--pilot-accent-s), var(--pilot-accent-l))',
222
+ },
223
+ },
224
+ },
225
+ },
226
+ };
227
+ ```
228
+
229
+ ## Use Cases
230
+
231
+ ### E-commerce
232
+
233
+ ```typescript
234
+ const tools = [
235
+ defineTool({
236
+ name: 'search_products',
237
+ description: 'Search product catalog',
238
+ parameters: { /* ... */ },
239
+ }),
240
+ defineTool({
241
+ name: 'add_to_cart',
242
+ description: 'Add item to shopping cart',
243
+ parameters: { /* ... */ },
244
+ }),
245
+ defineTool({
246
+ name: 'show_category',
247
+ description: 'Filter products by category',
248
+ parameters: { /* ... */ },
249
+ }),
250
+ ];
251
+ ```
252
+
253
+ ### Documentation Sites
254
+
255
+ ```typescript
256
+ const tools = [
257
+ defineTool({
258
+ name: 'search_docs',
259
+ description: 'Search documentation',
260
+ parameters: { /* ... */ },
261
+ }),
262
+ defineTool({
263
+ name: 'navigate_to_page',
264
+ description: 'Go to a documentation page',
265
+ parameters: { /* ... */ },
266
+ }),
267
+ ];
268
+ ```
269
+
270
+ ### Portfolio Sites
271
+
272
+ ```typescript
273
+ const tools = [
274
+ defineTool({
275
+ name: 'open_project',
276
+ description: 'Open project details modal',
277
+ parameters: { /* ... */ },
278
+ }),
279
+ defineTool({
280
+ name: 'filter_by_category',
281
+ description: 'Filter projects by category',
282
+ parameters: { /* ... */ },
283
+ }),
284
+ ];
285
+ ```
286
+
287
+ ## Requirements
288
+
289
+ - React 18+ or React 19
290
+ - Next.js 13+ (for API routes)
291
+ - A Vercel AI SDK compatible model
292
+
293
+ ## License
294
+
295
+ MIT
@@ -0,0 +1,78 @@
1
+ import { LanguageModel } from 'ai';
2
+ import { T as ToolDefinition } from '../types--7jDyUM6.mjs';
3
+ import { S as StreamEvent } from '../types-DAvVRuXd.mjs';
4
+
5
+ /**
6
+ * Factory for creating Next.js API route handlers
7
+ */
8
+
9
+ interface ChatHandlerConfig {
10
+ /** The AI model to use (from Vercel AI SDK) */
11
+ model: LanguageModel;
12
+ /** System prompt for the AI */
13
+ systemPrompt: string;
14
+ /** Tool definitions for the AI */
15
+ tools?: ToolDefinition[];
16
+ /** Temperature for response generation (0-1) */
17
+ temperature?: number;
18
+ /** Maximum tokens in response */
19
+ maxTokens?: number;
20
+ }
21
+ /**
22
+ * Create a Next.js API route handler for chat
23
+ *
24
+ * Works with any Vercel AI SDK compatible model including:
25
+ * - Google Gemini (@ai-sdk/google)
26
+ * - OpenAI (@ai-sdk/openai)
27
+ * - Anthropic (@ai-sdk/anthropic)
28
+ * - And more...
29
+ *
30
+ * @example
31
+ * ```ts
32
+ * // app/api/chat/route.ts
33
+ * import { createChatHandler } from 'ai-site-pilot/api';
34
+ * import { google } from '@ai-sdk/google';
35
+ *
36
+ * export const POST = createChatHandler({
37
+ * model: google('gemini-2.0-flash'),
38
+ * systemPrompt: 'You are a helpful assistant...',
39
+ * tools: myTools,
40
+ * });
41
+ * ```
42
+ *
43
+ * @example Using OpenAI
44
+ * ```ts
45
+ * import { openai } from '@ai-sdk/openai';
46
+ *
47
+ * export const POST = createChatHandler({
48
+ * model: openai('gpt-4o'),
49
+ * systemPrompt: 'You are a helpful assistant...',
50
+ * });
51
+ * ```
52
+ */
53
+ declare function createChatHandler(config: ChatHandlerConfig): (req: Request) => Promise<Response>;
54
+
55
+ /**
56
+ * SSE streaming utilities
57
+ */
58
+
59
+ /**
60
+ * Create an SSE encoder for streaming responses
61
+ */
62
+ declare function createSSEEncoder(): {
63
+ encode(event: StreamEvent): Uint8Array;
64
+ encodeText(content: string): Uint8Array;
65
+ encodeTool(name: string, args: Record<string, unknown>): Uint8Array;
66
+ encodeDone(): Uint8Array;
67
+ encodeError(message: string): Uint8Array;
68
+ };
69
+ /**
70
+ * Create SSE response headers
71
+ */
72
+ declare function getSSEHeaders(): HeadersInit;
73
+ /**
74
+ * Parse SSE events from a ReadableStream
75
+ */
76
+ declare function parseSSEStream(reader: ReadableStreamDefaultReader<Uint8Array>): AsyncGenerator<StreamEvent>;
77
+
78
+ export { type ChatHandlerConfig, createChatHandler, createSSEEncoder, getSSEHeaders, parseSSEStream };
@@ -0,0 +1,78 @@
1
+ import { LanguageModel } from 'ai';
2
+ import { T as ToolDefinition } from '../types--7jDyUM6.js';
3
+ import { S as StreamEvent } from '../types-DAvVRuXd.js';
4
+
5
+ /**
6
+ * Factory for creating Next.js API route handlers
7
+ */
8
+
9
+ interface ChatHandlerConfig {
10
+ /** The AI model to use (from Vercel AI SDK) */
11
+ model: LanguageModel;
12
+ /** System prompt for the AI */
13
+ systemPrompt: string;
14
+ /** Tool definitions for the AI */
15
+ tools?: ToolDefinition[];
16
+ /** Temperature for response generation (0-1) */
17
+ temperature?: number;
18
+ /** Maximum tokens in response */
19
+ maxTokens?: number;
20
+ }
21
+ /**
22
+ * Create a Next.js API route handler for chat
23
+ *
24
+ * Works with any Vercel AI SDK compatible model including:
25
+ * - Google Gemini (@ai-sdk/google)
26
+ * - OpenAI (@ai-sdk/openai)
27
+ * - Anthropic (@ai-sdk/anthropic)
28
+ * - And more...
29
+ *
30
+ * @example
31
+ * ```ts
32
+ * // app/api/chat/route.ts
33
+ * import { createChatHandler } from 'ai-site-pilot/api';
34
+ * import { google } from '@ai-sdk/google';
35
+ *
36
+ * export const POST = createChatHandler({
37
+ * model: google('gemini-2.0-flash'),
38
+ * systemPrompt: 'You are a helpful assistant...',
39
+ * tools: myTools,
40
+ * });
41
+ * ```
42
+ *
43
+ * @example Using OpenAI
44
+ * ```ts
45
+ * import { openai } from '@ai-sdk/openai';
46
+ *
47
+ * export const POST = createChatHandler({
48
+ * model: openai('gpt-4o'),
49
+ * systemPrompt: 'You are a helpful assistant...',
50
+ * });
51
+ * ```
52
+ */
53
+ declare function createChatHandler(config: ChatHandlerConfig): (req: Request) => Promise<Response>;
54
+
55
+ /**
56
+ * SSE streaming utilities
57
+ */
58
+
59
+ /**
60
+ * Create an SSE encoder for streaming responses
61
+ */
62
+ declare function createSSEEncoder(): {
63
+ encode(event: StreamEvent): Uint8Array;
64
+ encodeText(content: string): Uint8Array;
65
+ encodeTool(name: string, args: Record<string, unknown>): Uint8Array;
66
+ encodeDone(): Uint8Array;
67
+ encodeError(message: string): Uint8Array;
68
+ };
69
+ /**
70
+ * Create SSE response headers
71
+ */
72
+ declare function getSSEHeaders(): HeadersInit;
73
+ /**
74
+ * Parse SSE events from a ReadableStream
75
+ */
76
+ declare function parseSSEStream(reader: ReadableStreamDefaultReader<Uint8Array>): AsyncGenerator<StreamEvent>;
77
+
78
+ export { type ChatHandlerConfig, createChatHandler, createSSEEncoder, getSSEHeaders, parseSSEStream };
@@ -0,0 +1,142 @@
1
+ 'use strict';
2
+
3
+ var ai = require('ai');
4
+
5
+ // src/api/createChatHandler.ts
6
+
7
+ // src/api/streaming.ts
8
+ function createSSEEncoder() {
9
+ const encoder = new TextEncoder();
10
+ return {
11
+ encode(event) {
12
+ return encoder.encode(`data: ${JSON.stringify(event)}
13
+
14
+ `);
15
+ },
16
+ encodeText(content) {
17
+ return encoder.encode(`data: ${JSON.stringify({ type: "text", content })}
18
+
19
+ `);
20
+ },
21
+ encodeTool(name, args) {
22
+ return encoder.encode(`data: ${JSON.stringify({ type: "tool", name, args })}
23
+
24
+ `);
25
+ },
26
+ encodeDone() {
27
+ return encoder.encode(`data: ${JSON.stringify({ type: "done" })}
28
+
29
+ `);
30
+ },
31
+ encodeError(message) {
32
+ return encoder.encode(`data: ${JSON.stringify({ type: "error", message })}
33
+
34
+ `);
35
+ }
36
+ };
37
+ }
38
+ function getSSEHeaders() {
39
+ return {
40
+ "Content-Type": "text/event-stream",
41
+ "Cache-Control": "no-cache",
42
+ "Connection": "keep-alive"
43
+ };
44
+ }
45
+ async function* parseSSEStream(reader) {
46
+ const decoder = new TextDecoder();
47
+ let buffer = "";
48
+ while (true) {
49
+ const { done, value } = await reader.read();
50
+ if (done) break;
51
+ buffer += decoder.decode(value, { stream: true });
52
+ const lines = buffer.split("\n");
53
+ buffer = lines.pop() || "";
54
+ for (const line of lines) {
55
+ if (line.startsWith("data: ")) {
56
+ try {
57
+ const data = JSON.parse(line.slice(6));
58
+ yield data;
59
+ } catch {
60
+ }
61
+ }
62
+ }
63
+ }
64
+ }
65
+
66
+ // src/api/createChatHandler.ts
67
+ function convertToolsToAISDK(tools) {
68
+ const result = {};
69
+ for (const tool of tools) {
70
+ result[tool.name] = {
71
+ description: tool.description,
72
+ parameters: {
73
+ type: "object",
74
+ properties: tool.parameters.properties,
75
+ required: tool.parameters.required
76
+ }
77
+ };
78
+ }
79
+ return result;
80
+ }
81
+ function createChatHandler(config) {
82
+ const { model, systemPrompt, tools = [], temperature = 0.7, maxTokens } = config;
83
+ return async function POST(req) {
84
+ try {
85
+ const body = await req.json();
86
+ const { messages } = body;
87
+ const coreMessages = messages.map((m) => ({
88
+ role: m.role,
89
+ content: m.content
90
+ }));
91
+ const sse = createSSEEncoder();
92
+ const stream = new ReadableStream({
93
+ async start(controller) {
94
+ try {
95
+ const result = ai.streamText({
96
+ model,
97
+ system: systemPrompt,
98
+ messages: coreMessages,
99
+ temperature,
100
+ maxTokens,
101
+ tools: tools.length > 0 ? convertToolsToAISDK(tools) : void 0
102
+ });
103
+ for await (const chunk of (await result).textStream) {
104
+ if (chunk) {
105
+ controller.enqueue(sse.encodeText(chunk));
106
+ }
107
+ }
108
+ const finalResult = await result;
109
+ const toolCalls = await finalResult.toolCalls || [];
110
+ for (const toolCall of toolCalls) {
111
+ controller.enqueue(
112
+ sse.encodeTool(toolCall.toolName, toolCall.args)
113
+ );
114
+ }
115
+ controller.enqueue(sse.encodeDone());
116
+ controller.close();
117
+ } catch (error) {
118
+ console.error("Streaming error:", error);
119
+ controller.enqueue(sse.encodeError("An error occurred during streaming"));
120
+ controller.close();
121
+ }
122
+ }
123
+ });
124
+ return new Response(stream, {
125
+ headers: getSSEHeaders()
126
+ });
127
+ } catch (error) {
128
+ console.error("Chat API error:", error);
129
+ return new Response(JSON.stringify({ error: "Internal server error" }), {
130
+ status: 500,
131
+ headers: { "Content-Type": "application/json" }
132
+ });
133
+ }
134
+ };
135
+ }
136
+
137
+ exports.createChatHandler = createChatHandler;
138
+ exports.createSSEEncoder = createSSEEncoder;
139
+ exports.getSSEHeaders = getSSEHeaders;
140
+ exports.parseSSEStream = parseSSEStream;
141
+ //# sourceMappingURL=index.js.map
142
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/api/streaming.ts","../../src/api/createChatHandler.ts"],"names":["streamText"],"mappings":";;;;;;;AASO,SAAS,gBAAA,GAAmB;AACjC,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAEhC,EAAA,OAAO;AAAA,IACL,OAAO,KAAA,EAAgC;AACrC,MAAA,OAAO,QAAQ,MAAA,CAAO,CAAA,MAAA,EAAS,IAAA,CAAK,SAAA,CAAU,KAAK,CAAC;;AAAA,CAAM,CAAA;AAAA,IAC5D,CAAA;AAAA,IAEA,WAAW,OAAA,EAA6B;AACtC,MAAA,OAAO,OAAA,CAAQ,MAAA,CAAO,CAAA,MAAA,EAAS,IAAA,CAAK,SAAA,CAAU,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,CAAC;;AAAA,CAAM,CAAA;AAAA,IAChF,CAAA;AAAA,IAEA,UAAA,CAAW,MAAc,IAAA,EAA2C;AAClE,MAAA,OAAO,OAAA,CAAQ,MAAA,CAAO,CAAA,MAAA,EAAS,IAAA,CAAK,SAAA,CAAU,EAAE,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,IAAA,EAAM,CAAC;;AAAA,CAAM,CAAA;AAAA,IACnF,CAAA;AAAA,IAEA,UAAA,GAAyB;AACvB,MAAA,OAAO,OAAA,CAAQ,OAAO,CAAA,MAAA,EAAS,IAAA,CAAK,UAAU,EAAE,IAAA,EAAM,MAAA,EAAQ,CAAC;;AAAA,CAAM,CAAA;AAAA,IACvE,CAAA;AAAA,IAEA,YAAY,OAAA,EAA6B;AACvC,MAAA,OAAO,OAAA,CAAQ,MAAA,CAAO,CAAA,MAAA,EAAS,IAAA,CAAK,SAAA,CAAU,EAAE,IAAA,EAAM,OAAA,EAAS,OAAA,EAAS,CAAC;;AAAA,CAAM,CAAA;AAAA,IACjF;AAAA,GACF;AACF;AAKO,SAAS,aAAA,GAA6B;AAC3C,EAAA,OAAO;AAAA,IACL,cAAA,EAAgB,mBAAA;AAAA,IAChB,eAAA,EAAiB,UAAA;AAAA,IACjB,YAAA,EAAc;AAAA,GAChB;AACF;AAKA,gBAAuB,eACrB,MAAA,EAC6B;AAC7B,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,EAAA,IAAI,MAAA,GAAS,EAAA;AAEb,EAAA,OAAO,IAAA,EAAM;AACX,IAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAC1C,IAAA,IAAI,IAAA,EAAM;AAEV,IAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,IAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAC/B,IAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,IAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,MAAA,IAAI,IAAA,CAAK,UAAA,CAAW,QAAQ,CAAA,EAAG;AAC7B,QAAA,IAAI;AACF,UAAA,MAAM,OAAO,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA;AACrC,UAAA,MAAM,IAAA;AAAA,QACR,CAAA,CAAA,MAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AC9CA,SAAS,oBAAoB,KAAA,EAAyB;AACpD,EAAA,MAAM,SAAuE,EAAC;AAE9E,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAA,CAAO,IAAA,CAAK,IAAI,CAAA,GAAI;AAAA,MAClB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,UAAA,EAAY;AAAA,QACV,IAAA,EAAM,QAAA;AAAA,QACN,UAAA,EAAY,KAAK,UAAA,CAAW,UAAA;AAAA,QAC5B,QAAA,EAAU,KAAK,UAAA,CAAW;AAAA;AAC5B,KACF;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAkCO,SAAS,kBAAkB,MAAA,EAA2B;AAC3D,EAAA,MAAM,EAAE,OAAO,YAAA,EAAc,KAAA,GAAQ,EAAC,EAAG,WAAA,GAAc,GAAA,EAAK,SAAA,EAAU,GAAI,MAAA;AAE1E,EAAA,OAAO,eAAe,KAAK,GAAA,EAAiC;AAC1D,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAQ,MAAM,GAAA,CAAI,IAAA,EAAK;AAC7B,MAAA,MAAM,EAAE,UAAS,GAAI,IAAA;AAGrB,MAAA,MAAM,YAAA,GAA8B,QAAA,CAAS,GAAA,CAAI,CAAC,CAAA,MAAO;AAAA,QACvD,MAAM,CAAA,CAAE,IAAA;AAAA,QACR,SAAS,CAAA,CAAE;AAAA,OACb,CAAE,CAAA;AAGF,MAAA,MAAM,MAAM,gBAAA,EAAiB;AAG7B,MAAA,MAAM,MAAA,GAAS,IAAI,cAAA,CAAe;AAAA,QAChC,MAAM,MAAM,UAAA,EAAY;AACtB,UAAA,IAAI;AACF,YAAA,MAAM,SAASA,aAAA,CAAW;AAAA,cACxB,KAAA;AAAA,cACA,MAAA,EAAQ,YAAA;AAAA,cACR,QAAA,EAAU,YAAA;AAAA,cACV,WAAA;AAAA,cACA,SAAA;AAAA,cACA,OAAO,KAAA,CAAM,MAAA,GAAS,CAAA,GAAI,mBAAA,CAAoB,KAAK,CAAA,GAAI,KAAA;AAAA,aACxD,CAAA;AAGD,YAAA,WAAA,MAAiB,KAAA,IAAA,CAAU,MAAM,MAAA,EAAQ,UAAA,EAAY;AACnD,cAAA,IAAI,KAAA,EAAO;AACT,gBAAA,UAAA,CAAW,OAAA,CAAQ,GAAA,CAAI,UAAA,CAAW,KAAK,CAAC,CAAA;AAAA,cAC1C;AAAA,YACF;AAGA,YAAA,MAAM,cAAc,MAAM,MAAA;AAC1B,YAAA,MAAM,SAAA,GAAa,MAAM,WAAA,CAAY,SAAA,IAAc,EAAC;AAEpD,YAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,cAAA,UAAA,CAAW,OAAA;AAAA,gBACT,GAAA,CAAI,UAAA,CAAW,QAAA,CAAS,QAAA,EAAU,SAAS,IAA+B;AAAA,eAC5E;AAAA,YACF;AAEA,YAAA,UAAA,CAAW,OAAA,CAAQ,GAAA,CAAI,UAAA,EAAY,CAAA;AACnC,YAAA,UAAA,CAAW,KAAA,EAAM;AAAA,UACnB,SAAS,KAAA,EAAO;AACd,YAAA,OAAA,CAAQ,KAAA,CAAM,oBAAoB,KAAK,CAAA;AACvC,YAAA,UAAA,CAAW,OAAA,CAAQ,GAAA,CAAI,WAAA,CAAY,oCAAoC,CAAC,CAAA;AACxE,YAAA,UAAA,CAAW,KAAA,EAAM;AAAA,UACnB;AAAA,QACF;AAAA,OACD,CAAA;AAED,MAAA,OAAO,IAAI,SAAS,MAAA,EAAQ;AAAA,QAC1B,SAAS,aAAA;AAAc,OACxB,CAAA;AAAA,IACH,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAA,CAAM,mBAAmB,KAAK,CAAA;AACtC,MAAA,OAAO,IAAI,SAAS,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,uBAAA,EAAyB,CAAA,EAAG;AAAA,QACtE,MAAA,EAAQ,GAAA;AAAA,QACR,OAAA,EAAS,EAAE,cAAA,EAAgB,kBAAA;AAAmB,OAC/C,CAAA;AAAA,IACH;AAAA,EACF,CAAA;AACF","file":"index.js","sourcesContent":["/**\n * SSE streaming utilities\n */\n\nimport type { StreamEvent } from '../types';\n\n/**\n * Create an SSE encoder for streaming responses\n */\nexport function createSSEEncoder() {\n const encoder = new TextEncoder();\n\n return {\n encode(event: StreamEvent): Uint8Array {\n return encoder.encode(`data: ${JSON.stringify(event)}\\n\\n`);\n },\n\n encodeText(content: string): Uint8Array {\n return encoder.encode(`data: ${JSON.stringify({ type: 'text', content })}\\n\\n`);\n },\n\n encodeTool(name: string, args: Record<string, unknown>): Uint8Array {\n return encoder.encode(`data: ${JSON.stringify({ type: 'tool', name, args })}\\n\\n`);\n },\n\n encodeDone(): Uint8Array {\n return encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\\n\\n`);\n },\n\n encodeError(message: string): Uint8Array {\n return encoder.encode(`data: ${JSON.stringify({ type: 'error', message })}\\n\\n`);\n },\n };\n}\n\n/**\n * Create SSE response headers\n */\nexport function getSSEHeaders(): HeadersInit {\n return {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n };\n}\n\n/**\n * Parse SSE events from a ReadableStream\n */\nexport async function* parseSSEStream(\n reader: ReadableStreamDefaultReader<Uint8Array>\n): AsyncGenerator<StreamEvent> {\n const decoder = new TextDecoder();\n let buffer = '';\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.startsWith('data: ')) {\n try {\n const data = JSON.parse(line.slice(6)) as StreamEvent;\n yield data;\n } catch {\n // Skip malformed JSON\n }\n }\n }\n }\n}\n","/**\n * Factory for creating Next.js API route handlers\n */\n\nimport { streamText, type CoreMessage, type LanguageModel } from 'ai';\nimport type { ToolDefinition } from '../tools/types';\nimport { createSSEEncoder, getSSEHeaders } from './streaming';\n\nexport interface ChatHandlerConfig {\n /** The AI model to use (from Vercel AI SDK) */\n model: LanguageModel;\n /** System prompt for the AI */\n systemPrompt: string;\n /** Tool definitions for the AI */\n tools?: ToolDefinition[];\n /** Temperature for response generation (0-1) */\n temperature?: number;\n /** Maximum tokens in response */\n maxTokens?: number;\n}\n\ninterface RequestBody {\n messages: Array<{ role: 'user' | 'assistant'; content: string }>;\n}\n\n/**\n * Convert tool definitions to Vercel AI SDK format\n */\nfunction convertToolsToAISDK(tools: ToolDefinition[]) {\n const result: Record<string, { description: string; parameters: unknown }> = {};\n\n for (const tool of tools) {\n result[tool.name] = {\n description: tool.description,\n parameters: {\n type: 'object',\n properties: tool.parameters.properties,\n required: tool.parameters.required,\n },\n };\n }\n\n return result;\n}\n\n/**\n * Create a Next.js API route handler for chat\n *\n * Works with any Vercel AI SDK compatible model including:\n * - Google Gemini (@ai-sdk/google)\n * - OpenAI (@ai-sdk/openai)\n * - Anthropic (@ai-sdk/anthropic)\n * - And more...\n *\n * @example\n * ```ts\n * // app/api/chat/route.ts\n * import { createChatHandler } from 'ai-site-pilot/api';\n * import { google } from '@ai-sdk/google';\n *\n * export const POST = createChatHandler({\n * model: google('gemini-2.0-flash'),\n * systemPrompt: 'You are a helpful assistant...',\n * tools: myTools,\n * });\n * ```\n *\n * @example Using OpenAI\n * ```ts\n * import { openai } from '@ai-sdk/openai';\n *\n * export const POST = createChatHandler({\n * model: openai('gpt-4o'),\n * systemPrompt: 'You are a helpful assistant...',\n * });\n * ```\n */\nexport function createChatHandler(config: ChatHandlerConfig) {\n const { model, systemPrompt, tools = [], temperature = 0.7, maxTokens } = config;\n\n return async function POST(req: Request): Promise<Response> {\n try {\n const body = (await req.json()) as RequestBody;\n const { messages } = body;\n\n // Convert messages to CoreMessage format\n const coreMessages: CoreMessage[] = messages.map((m) => ({\n role: m.role,\n content: m.content,\n }));\n\n // Create the SSE encoder\n const sse = createSSEEncoder();\n\n // Create a readable stream for SSE\n const stream = new ReadableStream({\n async start(controller) {\n try {\n const result = streamText({\n model,\n system: systemPrompt,\n messages: coreMessages,\n temperature,\n maxTokens,\n tools: tools.length > 0 ? convertToolsToAISDK(tools) : undefined,\n });\n\n // Stream text chunks\n for await (const chunk of (await result).textStream) {\n if (chunk) {\n controller.enqueue(sse.encodeText(chunk));\n }\n }\n\n // Get tool calls from the result\n const finalResult = await result;\n const toolCalls = (await finalResult.toolCalls) || [];\n\n for (const toolCall of toolCalls) {\n controller.enqueue(\n sse.encodeTool(toolCall.toolName, toolCall.args as Record<string, unknown>)\n );\n }\n\n controller.enqueue(sse.encodeDone());\n controller.close();\n } catch (error) {\n console.error('Streaming error:', error);\n controller.enqueue(sse.encodeError('An error occurred during streaming'));\n controller.close();\n }\n },\n });\n\n return new Response(stream, {\n headers: getSSEHeaders(),\n });\n } catch (error) {\n console.error('Chat API error:', error);\n return new Response(JSON.stringify({ error: 'Internal server error' }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n });\n }\n };\n}\n"]}