@float.js/core 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,103 @@
1
+ /**
2
+ * Float.js AI Module
3
+ * Native AI integration with streaming support
4
+ */
5
+ interface AIProvider {
6
+ name: string;
7
+ chat(options: ChatOptions): Promise<AIResponse>;
8
+ stream(options: ChatOptions): AsyncIterable<string>;
9
+ }
10
+ interface ChatOptions {
11
+ model?: string;
12
+ messages: Message[];
13
+ temperature?: number;
14
+ maxTokens?: number;
15
+ system?: string;
16
+ }
17
+ interface Message {
18
+ role: 'user' | 'assistant' | 'system';
19
+ content: string;
20
+ }
21
+ interface AIResponse {
22
+ content: string;
23
+ model: string;
24
+ usage?: {
25
+ promptTokens: number;
26
+ completionTokens: number;
27
+ totalTokens: number;
28
+ };
29
+ }
30
+ /**
31
+ * OpenAI Provider
32
+ */
33
+ declare class OpenAIProvider implements AIProvider {
34
+ name: string;
35
+ private apiKey;
36
+ private baseUrl;
37
+ constructor(options?: {
38
+ apiKey?: string;
39
+ baseUrl?: string;
40
+ });
41
+ chat(options: ChatOptions): Promise<AIResponse>;
42
+ stream(options: ChatOptions): AsyncIterable<string>;
43
+ }
44
+ /**
45
+ * Anthropic Provider
46
+ */
47
+ declare class AnthropicProvider implements AIProvider {
48
+ name: string;
49
+ private apiKey;
50
+ private baseUrl;
51
+ constructor(options?: {
52
+ apiKey?: string;
53
+ baseUrl?: string;
54
+ });
55
+ chat(options: ChatOptions): Promise<AIResponse>;
56
+ stream(options: ChatOptions): AsyncIterable<string>;
57
+ }
58
+ /**
59
+ * AI Instance - Main entry point
60
+ */
61
+ declare class FloatAI {
62
+ private providers;
63
+ private defaultProvider;
64
+ constructor();
65
+ register(provider: AIProvider): void;
66
+ use(name: string): this;
67
+ private getProvider;
68
+ /**
69
+ * Simple chat completion
70
+ */
71
+ chat(prompt: string, options?: Partial<ChatOptions>): Promise<string>;
72
+ /**
73
+ * Chat with message history
74
+ */
75
+ complete(options: ChatOptions): Promise<AIResponse>;
76
+ /**
77
+ * Stream chat completion
78
+ */
79
+ stream(prompt: string, options?: Partial<ChatOptions>): AsyncIterable<string>;
80
+ /**
81
+ * Stream with message history
82
+ */
83
+ streamChat(options: ChatOptions): AsyncIterable<string>;
84
+ }
85
+ declare const ai: FloatAI;
86
+ /**
87
+ * Create a streaming response for API routes
88
+ */
89
+ declare function streamResponse(iterable: AsyncIterable<string>, options?: {
90
+ headers?: Record<string, string>;
91
+ }): Response;
92
+ /**
93
+ * Create a Server-Sent Events response
94
+ */
95
+ declare function sseResponse(iterable: AsyncIterable<string>, options?: {
96
+ headers?: Record<string, string>;
97
+ }): Response;
98
+ /**
99
+ * AI Action decorator for type-safe AI endpoints
100
+ */
101
+ declare function aiAction<T extends Record<string, unknown>>(handler: (input: T) => Promise<string> | AsyncIterable<string>): (request: Request) => Promise<Response>;
102
+
103
+ export { type AIProvider, type AIResponse, AnthropicProvider, type ChatOptions, type Message, OpenAIProvider, ai, aiAction, sseResponse, streamResponse };
@@ -0,0 +1,293 @@
1
+ // src/ai/index.ts
2
+ var OpenAIProvider = class {
3
+ name = "openai";
4
+ apiKey;
5
+ baseUrl;
6
+ constructor(options = {}) {
7
+ this.apiKey = options.apiKey || process.env.OPENAI_API_KEY || "";
8
+ this.baseUrl = options.baseUrl || "https://api.openai.com/v1";
9
+ }
10
+ async chat(options) {
11
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
12
+ method: "POST",
13
+ headers: {
14
+ "Content-Type": "application/json",
15
+ "Authorization": `Bearer ${this.apiKey}`
16
+ },
17
+ body: JSON.stringify({
18
+ model: options.model || "gpt-4o-mini",
19
+ messages: options.system ? [{ role: "system", content: options.system }, ...options.messages] : options.messages,
20
+ temperature: options.temperature ?? 0.7,
21
+ max_tokens: options.maxTokens
22
+ })
23
+ });
24
+ const data = await response.json();
25
+ return {
26
+ content: data.choices[0].message.content,
27
+ model: data.model,
28
+ usage: data.usage ? {
29
+ promptTokens: data.usage.prompt_tokens,
30
+ completionTokens: data.usage.completion_tokens,
31
+ totalTokens: data.usage.total_tokens
32
+ } : void 0
33
+ };
34
+ }
35
+ async *stream(options) {
36
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
37
+ method: "POST",
38
+ headers: {
39
+ "Content-Type": "application/json",
40
+ "Authorization": `Bearer ${this.apiKey}`
41
+ },
42
+ body: JSON.stringify({
43
+ model: options.model || "gpt-4o-mini",
44
+ messages: options.system ? [{ role: "system", content: options.system }, ...options.messages] : options.messages,
45
+ temperature: options.temperature ?? 0.7,
46
+ max_tokens: options.maxTokens,
47
+ stream: true
48
+ })
49
+ });
50
+ const reader = response.body?.getReader();
51
+ if (!reader) throw new Error("No response body");
52
+ const decoder = new TextDecoder();
53
+ let buffer = "";
54
+ while (true) {
55
+ const { done, value } = await reader.read();
56
+ if (done) break;
57
+ buffer += decoder.decode(value, { stream: true });
58
+ const lines = buffer.split("\n");
59
+ buffer = lines.pop() || "";
60
+ for (const line of lines) {
61
+ if (line.startsWith("data: ")) {
62
+ const data = line.slice(6);
63
+ if (data === "[DONE]") return;
64
+ try {
65
+ const parsed = JSON.parse(data);
66
+ const content = parsed.choices[0]?.delta?.content;
67
+ if (content) yield content;
68
+ } catch {
69
+ }
70
+ }
71
+ }
72
+ }
73
+ }
74
+ };
75
+ var AnthropicProvider = class {
76
+ name = "anthropic";
77
+ apiKey;
78
+ baseUrl;
79
+ constructor(options = {}) {
80
+ this.apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || "";
81
+ this.baseUrl = options.baseUrl || "https://api.anthropic.com/v1";
82
+ }
83
+ async chat(options) {
84
+ const response = await fetch(`${this.baseUrl}/messages`, {
85
+ method: "POST",
86
+ headers: {
87
+ "Content-Type": "application/json",
88
+ "x-api-key": this.apiKey,
89
+ "anthropic-version": "2023-06-01"
90
+ },
91
+ body: JSON.stringify({
92
+ model: options.model || "claude-3-5-sonnet-20241022",
93
+ messages: options.messages.filter((m) => m.role !== "system"),
94
+ system: options.system || options.messages.find((m) => m.role === "system")?.content,
95
+ max_tokens: options.maxTokens || 4096,
96
+ temperature: options.temperature ?? 0.7
97
+ })
98
+ });
99
+ const data = await response.json();
100
+ return {
101
+ content: data.content[0].text,
102
+ model: data.model,
103
+ usage: data.usage ? {
104
+ promptTokens: data.usage.input_tokens,
105
+ completionTokens: data.usage.output_tokens,
106
+ totalTokens: data.usage.input_tokens + data.usage.output_tokens
107
+ } : void 0
108
+ };
109
+ }
110
+ async *stream(options) {
111
+ const response = await fetch(`${this.baseUrl}/messages`, {
112
+ method: "POST",
113
+ headers: {
114
+ "Content-Type": "application/json",
115
+ "x-api-key": this.apiKey,
116
+ "anthropic-version": "2023-06-01"
117
+ },
118
+ body: JSON.stringify({
119
+ model: options.model || "claude-3-5-sonnet-20241022",
120
+ messages: options.messages.filter((m) => m.role !== "system"),
121
+ system: options.system || options.messages.find((m) => m.role === "system")?.content,
122
+ max_tokens: options.maxTokens || 4096,
123
+ temperature: options.temperature ?? 0.7,
124
+ stream: true
125
+ })
126
+ });
127
+ const reader = response.body?.getReader();
128
+ if (!reader) throw new Error("No response body");
129
+ const decoder = new TextDecoder();
130
+ let buffer = "";
131
+ while (true) {
132
+ const { done, value } = await reader.read();
133
+ if (done) break;
134
+ buffer += decoder.decode(value, { stream: true });
135
+ const lines = buffer.split("\n");
136
+ buffer = lines.pop() || "";
137
+ for (const line of lines) {
138
+ if (line.startsWith("data: ")) {
139
+ try {
140
+ const parsed = JSON.parse(line.slice(6));
141
+ if (parsed.type === "content_block_delta") {
142
+ yield parsed.delta.text;
143
+ }
144
+ } catch {
145
+ }
146
+ }
147
+ }
148
+ }
149
+ }
150
+ };
151
+ var FloatAI = class {
152
+ providers = /* @__PURE__ */ new Map();
153
+ defaultProvider = "openai";
154
+ constructor() {
155
+ if (process.env.OPENAI_API_KEY) {
156
+ this.register(new OpenAIProvider());
157
+ this.defaultProvider = "openai";
158
+ }
159
+ if (process.env.ANTHROPIC_API_KEY) {
160
+ this.register(new AnthropicProvider());
161
+ if (!process.env.OPENAI_API_KEY) {
162
+ this.defaultProvider = "anthropic";
163
+ }
164
+ }
165
+ }
166
+ register(provider) {
167
+ this.providers.set(provider.name, provider);
168
+ }
169
+ use(name) {
170
+ if (!this.providers.has(name)) {
171
+ throw new Error(`AI provider "${name}" not registered`);
172
+ }
173
+ this.defaultProvider = name;
174
+ return this;
175
+ }
176
+ getProvider() {
177
+ const provider = this.providers.get(this.defaultProvider);
178
+ if (!provider) {
179
+ throw new Error(`No AI provider configured. Set OPENAI_API_KEY or ANTHROPIC_API_KEY`);
180
+ }
181
+ return provider;
182
+ }
183
+ /**
184
+ * Simple chat completion
185
+ */
186
+ async chat(prompt, options = {}) {
187
+ const response = await this.getProvider().chat({
188
+ ...options,
189
+ messages: [{ role: "user", content: prompt }]
190
+ });
191
+ return response.content;
192
+ }
193
+ /**
194
+ * Chat with message history
195
+ */
196
+ async complete(options) {
197
+ return this.getProvider().chat(options);
198
+ }
199
+ /**
200
+ * Stream chat completion
201
+ */
202
+ stream(prompt, options = {}) {
203
+ return this.getProvider().stream({
204
+ ...options,
205
+ messages: [{ role: "user", content: prompt }]
206
+ });
207
+ }
208
+ /**
209
+ * Stream with message history
210
+ */
211
+ streamChat(options) {
212
+ return this.getProvider().stream(options);
213
+ }
214
+ };
215
+ var ai = new FloatAI();
216
+ function streamResponse(iterable, options = {}) {
217
+ const encoder = new TextEncoder();
218
+ const stream = new ReadableStream({
219
+ async start(controller) {
220
+ try {
221
+ for await (const chunk of iterable) {
222
+ controller.enqueue(encoder.encode(chunk));
223
+ }
224
+ controller.close();
225
+ } catch (error) {
226
+ controller.error(error);
227
+ }
228
+ }
229
+ });
230
+ return new Response(stream, {
231
+ headers: {
232
+ "Content-Type": "text/plain; charset=utf-8",
233
+ "Transfer-Encoding": "chunked",
234
+ "Cache-Control": "no-cache",
235
+ ...options.headers
236
+ }
237
+ });
238
+ }
239
+ function sseResponse(iterable, options = {}) {
240
+ const encoder = new TextEncoder();
241
+ const stream = new ReadableStream({
242
+ async start(controller) {
243
+ try {
244
+ for await (const chunk of iterable) {
245
+ controller.enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}
246
+
247
+ `));
248
+ }
249
+ controller.enqueue(encoder.encode("data: [DONE]\n\n"));
250
+ controller.close();
251
+ } catch (error) {
252
+ controller.error(error);
253
+ }
254
+ }
255
+ });
256
+ return new Response(stream, {
257
+ headers: {
258
+ "Content-Type": "text/event-stream",
259
+ "Cache-Control": "no-cache",
260
+ "Connection": "keep-alive",
261
+ ...options.headers
262
+ }
263
+ });
264
+ }
265
+ function aiAction(handler) {
266
+ return async (request) => {
267
+ try {
268
+ const input = await request.json();
269
+ const result = handler(input);
270
+ if (Symbol.asyncIterator in Object(result)) {
271
+ return streamResponse(result);
272
+ }
273
+ const content = await result;
274
+ return new Response(JSON.stringify({ content }), {
275
+ headers: { "Content-Type": "application/json" }
276
+ });
277
+ } catch (error) {
278
+ return new Response(
279
+ JSON.stringify({ error: error.message }),
280
+ { status: 500, headers: { "Content-Type": "application/json" } }
281
+ );
282
+ }
283
+ };
284
+ }
285
+ export {
286
+ AnthropicProvider,
287
+ OpenAIProvider,
288
+ ai,
289
+ aiAction,
290
+ sseResponse,
291
+ streamResponse
292
+ };
293
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/ai/index.ts"],"sourcesContent":["/**\n * Float.js AI Module\n * Native AI integration with streaming support\n */\n\nexport interface AIProvider {\n name: string;\n chat(options: ChatOptions): Promise<AIResponse>;\n stream(options: ChatOptions): AsyncIterable<string>;\n}\n\nexport interface ChatOptions {\n model?: string;\n messages: Message[];\n temperature?: number;\n maxTokens?: number;\n system?: string;\n}\n\nexport interface Message {\n role: 'user' | 'assistant' | 'system';\n content: string;\n}\n\nexport interface AIResponse {\n content: string;\n model: string;\n usage?: {\n promptTokens: number;\n completionTokens: number;\n totalTokens: number;\n };\n}\n\n/**\n * OpenAI Provider\n */\nexport class OpenAIProvider implements AIProvider {\n name = 'openai';\n private apiKey: string;\n private baseUrl: string;\n\n constructor(options: { apiKey?: string; baseUrl?: string } = {}) {\n this.apiKey = options.apiKey || process.env.OPENAI_API_KEY || '';\n this.baseUrl = options.baseUrl || 'https://api.openai.com/v1';\n }\n\n async chat(options: ChatOptions): Promise<AIResponse> {\n const response = await fetch(`${this.baseUrl}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify({\n model: options.model || 'gpt-4o-mini',\n messages: options.system \n ? [{ role: 'system', content: options.system }, ...options.messages]\n : options.messages,\n temperature: options.temperature ?? 0.7,\n max_tokens: options.maxTokens,\n }),\n });\n\n const data = await response.json();\n \n return {\n content: data.choices[0].message.content,\n model: data.model,\n usage: data.usage ? {\n promptTokens: data.usage.prompt_tokens,\n completionTokens: data.usage.completion_tokens,\n totalTokens: data.usage.total_tokens,\n } : undefined,\n };\n }\n\n async *stream(options: ChatOptions): AsyncIterable<string> {\n const response = await fetch(`${this.baseUrl}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this.apiKey}`,\n },\n body: JSON.stringify({\n model: options.model || 'gpt-4o-mini',\n messages: options.system \n ? [{ role: 'system', content: options.system }, ...options.messages]\n : options.messages,\n temperature: options.temperature ?? 0.7,\n max_tokens: options.maxTokens,\n stream: true,\n }),\n });\n\n const reader = response.body?.getReader();\n if (!reader) throw new Error('No response body');\n\n const decoder = new TextDecoder();\n let buffer = '';\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.startsWith('data: ')) {\n const data = line.slice(6);\n if (data === '[DONE]') return;\n \n try {\n const parsed = JSON.parse(data);\n const content = parsed.choices[0]?.delta?.content;\n if (content) yield content;\n } catch {\n // Skip invalid JSON\n }\n }\n }\n }\n }\n}\n\n/**\n * Anthropic Provider\n */\nexport class AnthropicProvider implements AIProvider {\n name = 'anthropic';\n private apiKey: string;\n private baseUrl: string;\n\n constructor(options: { apiKey?: string; baseUrl?: string } = {}) {\n this.apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || '';\n this.baseUrl = options.baseUrl || 'https://api.anthropic.com/v1';\n }\n\n async chat(options: ChatOptions): Promise<AIResponse> {\n const response = await fetch(`${this.baseUrl}/messages`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'x-api-key': this.apiKey,\n 'anthropic-version': '2023-06-01',\n },\n body: JSON.stringify({\n model: options.model || 'claude-3-5-sonnet-20241022',\n messages: options.messages.filter(m => m.role !== 'system'),\n system: options.system || options.messages.find(m => m.role === 'system')?.content,\n max_tokens: options.maxTokens || 4096,\n temperature: options.temperature ?? 0.7,\n }),\n });\n\n const data = await response.json();\n \n return {\n content: data.content[0].text,\n model: data.model,\n usage: data.usage ? {\n promptTokens: data.usage.input_tokens,\n completionTokens: data.usage.output_tokens,\n totalTokens: data.usage.input_tokens + data.usage.output_tokens,\n } : undefined,\n };\n }\n\n async *stream(options: ChatOptions): AsyncIterable<string> {\n const response = await fetch(`${this.baseUrl}/messages`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'x-api-key': this.apiKey,\n 'anthropic-version': '2023-06-01',\n },\n body: JSON.stringify({\n model: options.model || 'claude-3-5-sonnet-20241022',\n messages: options.messages.filter(m => m.role !== 'system'),\n system: options.system || options.messages.find(m => m.role === 'system')?.content,\n max_tokens: options.maxTokens || 4096,\n temperature: options.temperature ?? 0.7,\n stream: true,\n }),\n });\n\n const reader = response.body?.getReader();\n if (!reader) throw new Error('No response body');\n\n const decoder = new TextDecoder();\n let buffer = '';\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.startsWith('data: ')) {\n try {\n const parsed = JSON.parse(line.slice(6));\n if (parsed.type === 'content_block_delta') {\n yield parsed.delta.text;\n }\n } catch {\n // Skip invalid JSON\n }\n }\n }\n }\n }\n}\n\n/**\n * AI Instance - Main entry point\n */\nclass FloatAI {\n private providers: Map<string, AIProvider> = new Map();\n private defaultProvider: string = 'openai';\n\n constructor() {\n // Auto-register providers based on available API keys\n if (process.env.OPENAI_API_KEY) {\n this.register(new OpenAIProvider());\n this.defaultProvider = 'openai';\n }\n if (process.env.ANTHROPIC_API_KEY) {\n this.register(new AnthropicProvider());\n if (!process.env.OPENAI_API_KEY) {\n this.defaultProvider = 'anthropic';\n }\n }\n }\n\n register(provider: AIProvider): void {\n this.providers.set(provider.name, provider);\n }\n\n use(name: string): this {\n if (!this.providers.has(name)) {\n throw new Error(`AI provider \"${name}\" not registered`);\n }\n this.defaultProvider = name;\n return this;\n }\n\n private getProvider(): AIProvider {\n const provider = this.providers.get(this.defaultProvider);\n if (!provider) {\n throw new Error(`No AI provider configured. Set OPENAI_API_KEY or ANTHROPIC_API_KEY`);\n }\n return provider;\n }\n\n /**\n * Simple chat completion\n */\n async chat(prompt: string, options: Partial<ChatOptions> = {}): Promise<string> {\n const response = await this.getProvider().chat({\n ...options,\n messages: [{ role: 'user', content: prompt }],\n });\n return response.content;\n }\n\n /**\n * Chat with message history\n */\n async complete(options: ChatOptions): Promise<AIResponse> {\n return this.getProvider().chat(options);\n }\n\n /**\n * Stream chat completion\n */\n stream(prompt: string, options: Partial<ChatOptions> = {}): AsyncIterable<string> {\n return this.getProvider().stream({\n ...options,\n messages: [{ role: 'user', content: prompt }],\n });\n }\n\n /**\n * Stream with message history\n */\n streamChat(options: ChatOptions): AsyncIterable<string> {\n return this.getProvider().stream(options);\n }\n}\n\n// Singleton instance\nexport const ai = new FloatAI();\n\n/**\n * Create a streaming response for API routes\n */\nexport function streamResponse(\n iterable: AsyncIterable<string>,\n options: { headers?: Record<string, string> } = {}\n): Response {\n const encoder = new TextEncoder();\n \n const stream = new ReadableStream({\n async start(controller) {\n try {\n for await (const chunk of iterable) {\n controller.enqueue(encoder.encode(chunk));\n }\n controller.close();\n } catch (error) {\n controller.error(error);\n }\n },\n });\n\n return new Response(stream, {\n headers: {\n 'Content-Type': 'text/plain; charset=utf-8',\n 'Transfer-Encoding': 'chunked',\n 'Cache-Control': 'no-cache',\n ...options.headers,\n },\n });\n}\n\n/**\n * Create a Server-Sent Events response\n */\nexport function sseResponse(\n iterable: AsyncIterable<string>,\n options: { headers?: Record<string, string> } = {}\n): Response {\n const encoder = new TextEncoder();\n \n const stream = new ReadableStream({\n async start(controller) {\n try {\n for await (const chunk of iterable) {\n controller.enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}\\n\\n`));\n }\n controller.enqueue(encoder.encode('data: [DONE]\\n\\n'));\n controller.close();\n } catch (error) {\n controller.error(error);\n }\n },\n });\n\n return new Response(stream, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n ...options.headers,\n },\n });\n}\n\n/**\n * AI Action decorator for type-safe AI endpoints\n */\nexport function aiAction<T extends Record<string, unknown>>(\n handler: (input: T) => Promise<string> | AsyncIterable<string>\n) {\n return async (request: Request): Promise<Response> => {\n try {\n const input = await request.json() as T;\n const result = handler(input);\n\n if (Symbol.asyncIterator in Object(result)) {\n return streamResponse(result as AsyncIterable<string>);\n }\n\n const content = await (result as Promise<string>);\n return new Response(JSON.stringify({ content }), {\n headers: { 'Content-Type': 'application/json' },\n });\n } catch (error) {\n return new Response(\n JSON.stringify({ error: (error as Error).message }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n );\n }\n };\n}\n"],"mappings":";AAqCO,IAAM,iBAAN,MAA2C;AAAA,EAChD,OAAO;AAAA,EACC;AAAA,EACA;AAAA,EAER,YAAY,UAAiD,CAAC,GAAG;AAC/D,SAAK,SAAS,QAAQ,UAAU,QAAQ,IAAI,kBAAkB;AAC9D,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA,EAEA,MAAM,KAAK,SAA2C;AACpD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,qBAAqB;AAAA,MAC/D,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB,UAAU,KAAK,MAAM;AAAA,MACxC;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB,OAAO,QAAQ,SAAS;AAAA,QACxB,UAAU,QAAQ,SACd,CAAC,EAAE,MAAM,UAAU,SAAS,QAAQ,OAAO,GAAG,GAAG,QAAQ,QAAQ,IACjE,QAAQ;AAAA,QACZ,aAAa,QAAQ,eAAe;AAAA,QACpC,YAAY,QAAQ;AAAA,MACtB,CAAC;AAAA,IACH,CAAC;AAED,UAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,WAAO;AAAA,MACL,SAAS,KAAK,QAAQ,CAAC,EAAE,QAAQ;AAAA,MACjC,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK,QAAQ;AAAA,QAClB,cAAc,KAAK,MAAM;AAAA,QACzB,kBAAkB,KAAK,MAAM;AAAA,QAC7B,aAAa,KAAK,MAAM;AAAA,MAC1B,IAAI;AAAA,IACN;AAAA,EACF;AAAA,EAEA,OAAO,OAAO,SAA6C;AACzD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,qBAAqB;AAAA,MAC/D,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB,UAAU,KAAK,MAAM;AAAA,MACxC;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB,OAAO,QAAQ,SAAS;AAAA,QACxB,UAAU,QAAQ,SACd,CAAC,EAAE,MAAM,UAAU,SAAS,QAAQ,OAAO,GAAG,GAAG,QAAQ,QAAQ,IACjE,QAAQ;AAAA,QACZ,aAAa,QAAQ,eAAe;AAAA,QACpC,YAAY,QAAQ;AAAA,QACpB,QAAQ;AAAA,MACV,CAAC;AAAA,IACH,CAAC;AAED,UAAM,SAAS,SAAS,MAAM,UAAU;AACxC,QAAI,CAAC,OAAQ,OAAM,IAAI,MAAM,kBAAkB;AAE/C,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,SAAS;AAEb,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,KAAM;AAEV,gBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,YAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,eAAS,MAAM,IAAI,KAAK;AAExB,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,WAAW,QAAQ,GAAG;AAC7B,gBAAM,OAAO,KAAK,MAAM,CAAC;AACzB,cAAI,SAAS,SAAU;AAEvB,cAAI;AACF,kBAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,kBAAM,UAAU,OAAO,QAAQ,CAAC,GAAG,OAAO;AAC1C,gBAAI,QAAS,OAAM;AAAA,UACrB,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAKO,IAAM,oBAAN,MAA8C;AAAA,EACnD,OAAO;AAAA,EACC;AAAA,EACA;AAAA,EAER,YAAY,UAAiD,CAAC,GAAG;AAC/D,SAAK,SAAS,QAAQ,UAAU,QAAQ,IAAI,qBAAqB;AACjE,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA,EAEA,MAAM,KAAK,SAA2C;AACpD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,aAAa;AAAA,MACvD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,aAAa,KAAK;AAAA,QAClB,qBAAqB;AAAA,MACvB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB,OAAO,QAAQ,SAAS;AAAA,QACxB,UAAU,QAAQ,SAAS,OAAO,OAAK,EAAE,SAAS,QAAQ;AAAA,QAC1D,QAAQ,QAAQ,UAAU,QAAQ,SAAS,KAAK,OAAK,EAAE,SAAS,QAAQ,GAAG;AAAA,QAC3E,YAAY,QAAQ,aAAa;AAAA,QACjC,aAAa,QAAQ,eAAe;AAAA,MACtC,CAAC;AAAA,IACH,CAAC;AAED,UAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,WAAO;AAAA,MACL,SAAS,KAAK,QAAQ,CAAC,EAAE;AAAA,MACzB,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK,QAAQ;AAAA,QAClB,cAAc,KAAK,MAAM;AAAA,QACzB,kBAAkB,KAAK,MAAM;AAAA,QAC7B,aAAa,KAAK,MAAM,eAAe,KAAK,MAAM;AAAA,MACpD,IAAI;AAAA,IACN;AAAA,EACF;AAAA,EAEA,OAAO,OAAO,SAA6C;AACzD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,aAAa;AAAA,MACvD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,aAAa,KAAK;AAAA,QAClB,qBAAqB;AAAA,MACvB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB,OAAO,QAAQ,SAAS;AAAA,QACxB,UAAU,QAAQ,SAAS,OAAO,OAAK,EAAE,SAAS,QAAQ;AAAA,QAC1D,QAAQ,QAAQ,UAAU,QAAQ,SAAS,KAAK,OAAK,EAAE,SAAS,QAAQ,GAAG;AAAA,QAC3E,YAAY,QAAQ,aAAa;AAAA,QACjC,aAAa,QAAQ,eAAe;AAAA,QACpC,QAAQ;AAAA,MACV,CAAC;AAAA,IACH,CAAC;AAED,UAAM,SAAS,SAAS,MAAM,UAAU;AACxC,QAAI,CAAC,OAAQ,OAAM,IAAI,MAAM,kBAAkB;AAE/C,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,SAAS;AAEb,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,KAAM;AAEV,gBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,YAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,eAAS,MAAM,IAAI,KAAK;AAExB,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,WAAW,QAAQ,GAAG;AAC7B,cAAI;AACF,kBAAM,SAAS,KAAK,MAAM,KAAK,MAAM,CAAC,CAAC;AACvC,gBAAI,OAAO,SAAS,uBAAuB;AACzC,oBAAM,OAAO,MAAM;AAAA,YACrB;AAAA,UACF,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAKA,IAAM,UAAN,MAAc;AAAA,EACJ,YAAqC,oBAAI,IAAI;AAAA,EAC7C,kBAA0B;AAAA,EAElC,cAAc;AAEZ,QAAI,QAAQ,IAAI,gBAAgB;AAC9B,WAAK,SAAS,IAAI,eAAe,CAAC;AAClC,WAAK,kBAAkB;AAAA,IACzB;AACA,QAAI,QAAQ,IAAI,mBAAmB;AACjC,WAAK,SAAS,IAAI,kBAAkB,CAAC;AACrC,UAAI,CAAC,QAAQ,IAAI,gBAAgB;AAC/B,aAAK,kBAAkB;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,UAAU,IAAI,SAAS,MAAM,QAAQ;AAAA,EAC5C;AAAA,EAEA,IAAI,MAAoB;AACtB,QAAI,CAAC,KAAK,UAAU,IAAI,IAAI,GAAG;AAC7B,YAAM,IAAI,MAAM,gBAAgB,IAAI,kBAAkB;AAAA,IACxD;AACA,SAAK,kBAAkB;AACvB,WAAO;AAAA,EACT;AAAA,EAEQ,cAA0B;AAChC,UAAM,WAAW,KAAK,UAAU,IAAI,KAAK,eAAe;AACxD,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,oEAAoE;AAAA,IACtF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAAgB,UAAgC,CAAC,GAAoB;AAC9E,UAAM,WAAW,MAAM,KAAK,YAAY,EAAE,KAAK;AAAA,MAC7C,GAAG;AAAA,MACH,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AACD,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,SAA2C;AACxD,WAAO,KAAK,YAAY,EAAE,KAAK,OAAO;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,QAAgB,UAAgC,CAAC,GAA0B;AAChF,WAAO,KAAK,YAAY,EAAE,OAAO;AAAA,MAC/B,GAAG;AAAA,MACH,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAA6C;AACtD,WAAO,KAAK,YAAY,EAAE,OAAO,OAAO;AAAA,EAC1C;AACF;AAGO,IAAM,KAAK,IAAI,QAAQ;AAKvB,SAAS,eACd,UACA,UAAgD,CAAC,GACvC;AACV,QAAM,UAAU,IAAI,YAAY;AAEhC,QAAM,SAAS,IAAI,eAAe;AAAA,IAChC,MAAM,MAAM,YAAY;AACtB,UAAI;AACF,yBAAiB,SAAS,UAAU;AAClC,qBAAW,QAAQ,QAAQ,OAAO,KAAK,CAAC;AAAA,QAC1C;AACA,mBAAW,MAAM;AAAA,MACnB,SAAS,OAAO;AACd,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,IAAI,SAAS,QAAQ;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,qBAAqB;AAAA,MACrB,iBAAiB;AAAA,MACjB,GAAG,QAAQ;AAAA,IACb;AAAA,EACF,CAAC;AACH;AAKO,SAAS,YACd,UACA,UAAgD,CAAC,GACvC;AACV,QAAM,UAAU,IAAI,YAAY;AAEhC,QAAM,SAAS,IAAI,eAAe;AAAA,IAChC,MAAM,MAAM,YAAY;AACtB,UAAI;AACF,yBAAiB,SAAS,UAAU;AAClC,qBAAW,QAAQ,QAAQ,OAAO,SAAS,KAAK,UAAU,KAAK,CAAC;AAAA;AAAA,CAAM,CAAC;AAAA,QACzE;AACA,mBAAW,QAAQ,QAAQ,OAAO,kBAAkB,CAAC;AACrD,mBAAW,MAAM;AAAA,MACnB,SAAS,OAAO;AACd,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,IAAI,SAAS,QAAQ;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB;AAAA,MACjB,cAAc;AAAA,MACd,GAAG,QAAQ;AAAA,IACb;AAAA,EACF,CAAC;AACH;AAKO,SAAS,SACd,SACA;AACA,SAAO,OAAO,YAAwC;AACpD,QAAI;AACF,YAAM,QAAQ,MAAM,QAAQ,KAAK;AACjC,YAAM,SAAS,QAAQ,KAAK;AAE5B,UAAI,OAAO,iBAAiB,OAAO,MAAM,GAAG;AAC1C,eAAO,eAAe,MAA+B;AAAA,MACvD;AAEA,YAAM,UAAU,MAAO;AACvB,aAAO,IAAI,SAAS,KAAK,UAAU,EAAE,QAAQ,CAAC,GAAG;AAAA,QAC/C,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAChD,CAAC;AAAA,IACH,SAAS,OAAO;AACd,aAAO,IAAI;AAAA,QACT,KAAK,UAAU,EAAE,OAAQ,MAAgB,QAAQ,CAAC;AAAA,QAClD,EAAE,QAAQ,KAAK,SAAS,EAAE,gBAAgB,mBAAmB,EAAE;AAAA,MACjE;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,193 @@
1
+ /**
2
+ * Float.js - Built-in Analytics
3
+ *
4
+ * Zero-config privacy-focused analytics built into the framework.
5
+ * No external dependencies, no cookies, GDPR-compliant by default.
6
+ */
7
+ interface PageView {
8
+ id: string;
9
+ timestamp: number;
10
+ pathname: string;
11
+ referrer?: string;
12
+ userAgent?: string;
13
+ country?: string;
14
+ device: 'desktop' | 'mobile' | 'tablet' | 'unknown';
15
+ browser: string;
16
+ os: string;
17
+ sessionId: string;
18
+ duration?: number;
19
+ }
20
+ interface WebVitals {
21
+ id: string;
22
+ timestamp: number;
23
+ pathname: string;
24
+ metrics: {
25
+ FCP?: number;
26
+ LCP?: number;
27
+ FID?: number;
28
+ CLS?: number;
29
+ TTFB?: number;
30
+ INP?: number;
31
+ };
32
+ }
33
+ interface CustomEvent {
34
+ id: string;
35
+ timestamp: number;
36
+ name: string;
37
+ pathname: string;
38
+ properties?: Record<string, string | number | boolean>;
39
+ sessionId: string;
40
+ }
41
+ interface AnalyticsData {
42
+ pageviews: PageView[];
43
+ vitals: WebVitals[];
44
+ events: CustomEvent[];
45
+ }
46
+ interface AnalyticsConfig {
47
+ /** Enable/disable analytics */
48
+ enabled: boolean;
49
+ /** Ignore paths (regex patterns) */
50
+ ignorePaths: (string | RegExp)[];
51
+ /** Max events in memory before flush */
52
+ maxBufferSize: number;
53
+ /** Flush interval in ms */
54
+ flushInterval: number;
55
+ /** Custom event handler */
56
+ onFlush?: (data: AnalyticsData) => Promise<void>;
57
+ /** Hash IP addresses for privacy */
58
+ hashIPs: boolean;
59
+ /** Track web vitals */
60
+ trackVitals: boolean;
61
+ /** Session timeout in minutes */
62
+ sessionTimeout: number;
63
+ /** GeoIP lookup */
64
+ geoIP: boolean;
65
+ }
66
+ interface AnalyticsSummary {
67
+ period: {
68
+ start: Date;
69
+ end: Date;
70
+ };
71
+ pageviews: {
72
+ total: number;
73
+ unique: number;
74
+ byPath: Record<string, number>;
75
+ byReferrer: Record<string, number>;
76
+ byDevice: Record<string, number>;
77
+ byBrowser: Record<string, number>;
78
+ byCountry: Record<string, number>;
79
+ };
80
+ vitals: {
81
+ avgFCP: number;
82
+ avgLCP: number;
83
+ avgFID: number;
84
+ avgCLS: number;
85
+ avgTTFB: number;
86
+ p75LCP: number;
87
+ p75FID: number;
88
+ p75CLS: number;
89
+ };
90
+ events: {
91
+ total: number;
92
+ byName: Record<string, number>;
93
+ };
94
+ sessions: {
95
+ total: number;
96
+ avgDuration: number;
97
+ bounceRate: number;
98
+ };
99
+ }
100
+ declare class AnalyticsEngine {
101
+ private config;
102
+ private buffer;
103
+ private flushTimer;
104
+ private sessions;
105
+ private allData;
106
+ constructor(config?: Partial<AnalyticsConfig>);
107
+ private startFlushTimer;
108
+ private shouldIgnore;
109
+ private getOrCreateSession;
110
+ /**
111
+ * Track a page view
112
+ */
113
+ trackPageview(req: Request, options?: {
114
+ country?: string;
115
+ }): PageView | null;
116
+ /**
117
+ * Track Web Vitals
118
+ */
119
+ trackVitals(pathname: string, metrics: WebVitals['metrics']): WebVitals | null;
120
+ /**
121
+ * Track custom event
122
+ */
123
+ trackEvent(name: string, properties?: Record<string, string | number | boolean>, req?: Request): CustomEvent | null;
124
+ private checkBufferSize;
125
+ /**
126
+ * Flush buffer to storage/handler
127
+ */
128
+ flush(): Promise<void>;
129
+ /**
130
+ * Get analytics summary
131
+ */
132
+ getSummary(startDate?: Date, endDate?: Date): AnalyticsSummary;
133
+ /**
134
+ * Get real-time stats (last 5 minutes)
135
+ */
136
+ getRealtime(): {
137
+ activeUsers: number;
138
+ pageviews: number;
139
+ topPages: Array<{
140
+ path: string;
141
+ count: number;
142
+ }>;
143
+ };
144
+ /**
145
+ * Export data as JSON
146
+ */
147
+ exportData(): AnalyticsData;
148
+ /**
149
+ * Clear all analytics data
150
+ */
151
+ clearData(): void;
152
+ /**
153
+ * Stop the analytics engine
154
+ */
155
+ stop(): void;
156
+ }
157
+ interface AnalyticsMiddlewareOptions {
158
+ config?: Partial<AnalyticsConfig>;
159
+ getCountry?: (req: Request) => string | undefined;
160
+ }
161
+ /**
162
+ * Get or create analytics engine
163
+ */
164
+ declare function getAnalytics(config?: Partial<AnalyticsConfig>): AnalyticsEngine;
165
+ /**
166
+ * Configure analytics
167
+ */
168
+ declare function configureAnalytics(config: Partial<AnalyticsConfig>): AnalyticsEngine;
169
+ /**
170
+ * Create analytics middleware
171
+ */
172
+ declare function createAnalyticsMiddleware(options?: AnalyticsMiddlewareOptions): (req: Request, next: () => Promise<Response>) => Promise<Response>;
173
+ /**
174
+ * Create analytics API handler
175
+ */
176
+ declare function createAnalyticsHandler(): (req: Request) => Promise<Response>;
177
+ declare const analyticsClientScript = "\n<script>\n(function() {\n // Simple Web Vitals reporter\n const endpoint = '/__float/analytics?action=vitals';\n \n function sendVitals(metrics) {\n const body = JSON.stringify({\n pathname: window.location.pathname,\n metrics: metrics\n });\n \n if (navigator.sendBeacon) {\n navigator.sendBeacon(endpoint, body);\n } else {\n fetch(endpoint, { method: 'POST', body, keepalive: true });\n }\n }\n\n // Observe LCP\n if ('PerformanceObserver' in window) {\n const vitals = {};\n \n // LCP\n new PerformanceObserver((list) => {\n const entries = list.getEntries();\n const lastEntry = entries[entries.length - 1];\n vitals.LCP = Math.round(lastEntry.startTime);\n }).observe({ type: 'largest-contentful-paint', buffered: true });\n \n // FCP\n new PerformanceObserver((list) => {\n const entries = list.getEntries();\n vitals.FCP = Math.round(entries[0].startTime);\n }).observe({ type: 'paint', buffered: true });\n \n // CLS\n let clsValue = 0;\n new PerformanceObserver((list) => {\n for (const entry of list.getEntries()) {\n if (!entry.hadRecentInput) {\n clsValue += entry.value;\n }\n }\n vitals.CLS = clsValue;\n }).observe({ type: 'layout-shift', buffered: true });\n \n // Send on page hide\n document.addEventListener('visibilitychange', () => {\n if (document.visibilityState === 'hidden') {\n sendVitals(vitals);\n }\n });\n }\n})();\n</script>\n";
178
+ declare const analytics: {
179
+ engine: typeof getAnalytics;
180
+ configure: typeof configureAnalytics;
181
+ createMiddleware: typeof createAnalyticsMiddleware;
182
+ createHandler: typeof createAnalyticsHandler;
183
+ clientScript: string;
184
+ track: {
185
+ pageview: (req: Request, options?: {
186
+ country?: string;
187
+ }) => PageView | null;
188
+ event: (name: string, properties?: Record<string, string | number | boolean>, req?: Request) => CustomEvent | null;
189
+ vitals: (pathname: string, metrics: WebVitals["metrics"]) => WebVitals | null;
190
+ };
191
+ };
192
+
193
+ export { type AnalyticsConfig, type AnalyticsData, AnalyticsEngine, type AnalyticsMiddlewareOptions, type AnalyticsSummary, type CustomEvent, type PageView, type WebVitals, analytics, analyticsClientScript, configureAnalytics, createAnalyticsHandler, createAnalyticsMiddleware, analytics as default, getAnalytics };