@tryhamster/gerbil 1.0.0-rc.0 → 1.0.0-rc.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +79 -14
  2. package/dist/auto-update-DsWBBnEk.mjs +3 -0
  3. package/dist/browser/index.d.mts +401 -5
  4. package/dist/browser/index.d.mts.map +1 -1
  5. package/dist/browser/index.mjs +1772 -146
  6. package/dist/browser/index.mjs.map +1 -1
  7. package/dist/{chrome-backend-CtwPENIW.mjs → chrome-backend-JEPeM2YE.mjs} +1 -1
  8. package/dist/{chrome-backend-C5Un08O4.mjs → chrome-backend-Y9F7W5VQ.mjs} +514 -73
  9. package/dist/chrome-backend-Y9F7W5VQ.mjs.map +1 -0
  10. package/dist/cli.mjs +3359 -646
  11. package/dist/cli.mjs.map +1 -1
  12. package/dist/frameworks/express.d.mts +1 -1
  13. package/dist/frameworks/express.mjs +3 -3
  14. package/dist/frameworks/fastify.d.mts +1 -1
  15. package/dist/frameworks/fastify.mjs +3 -3
  16. package/dist/frameworks/hono.d.mts +1 -1
  17. package/dist/frameworks/hono.mjs +3 -3
  18. package/dist/frameworks/next.d.mts +2 -2
  19. package/dist/frameworks/next.mjs +3 -3
  20. package/dist/frameworks/react.d.mts +1 -1
  21. package/dist/frameworks/trpc.d.mts +1 -1
  22. package/dist/frameworks/trpc.mjs +3 -3
  23. package/dist/gerbil-DeQlX_Mt.mjs +5 -0
  24. package/dist/gerbil-POAz8peb.d.mts +431 -0
  25. package/dist/gerbil-POAz8peb.d.mts.map +1 -0
  26. package/dist/gerbil-yoSpRHgv.mjs +1463 -0
  27. package/dist/gerbil-yoSpRHgv.mjs.map +1 -0
  28. package/dist/index.d.mts +395 -9
  29. package/dist/index.d.mts.map +1 -1
  30. package/dist/index.mjs +8 -6
  31. package/dist/index.mjs.map +1 -1
  32. package/dist/integrations/ai-sdk.d.mts +122 -4
  33. package/dist/integrations/ai-sdk.d.mts.map +1 -1
  34. package/dist/integrations/ai-sdk.mjs +239 -11
  35. package/dist/integrations/ai-sdk.mjs.map +1 -1
  36. package/dist/integrations/langchain.d.mts +132 -2
  37. package/dist/integrations/langchain.d.mts.map +1 -1
  38. package/dist/integrations/langchain.mjs +176 -8
  39. package/dist/integrations/langchain.mjs.map +1 -1
  40. package/dist/integrations/llamaindex.d.mts +1 -1
  41. package/dist/integrations/llamaindex.mjs +3 -3
  42. package/dist/integrations/mcp-client.mjs +4 -4
  43. package/dist/integrations/mcp-client.mjs.map +1 -1
  44. package/dist/integrations/mcp.d.mts +2 -2
  45. package/dist/integrations/mcp.d.mts.map +1 -1
  46. package/dist/integrations/mcp.mjs +6 -6
  47. package/dist/{mcp-R8kRLIKb.mjs → mcp-Bitg4sjX.mjs} +10 -37
  48. package/dist/mcp-Bitg4sjX.mjs.map +1 -0
  49. package/dist/microphone-D-6y9aiE.mjs +3 -0
  50. package/dist/{models-DKULvhOr.mjs → models-BAtL8qsA.mjs} +42 -7
  51. package/dist/models-BAtL8qsA.mjs.map +1 -0
  52. package/dist/{models-De2-_GmQ.d.mts → models-CE0fBq0U.d.mts} +2 -2
  53. package/dist/models-CE0fBq0U.d.mts.map +1 -0
  54. package/dist/{one-liner-BUQR0nqq.mjs → one-liner-B1rmFto6.mjs} +2 -2
  55. package/dist/{one-liner-BUQR0nqq.mjs.map → one-liner-B1rmFto6.mjs.map} +1 -1
  56. package/dist/repl-D20JO260.mjs +10 -0
  57. package/dist/skills/index.d.mts +303 -12
  58. package/dist/skills/index.d.mts.map +1 -1
  59. package/dist/skills/index.mjs +6 -6
  60. package/dist/skills-5DxAV-rn.mjs +1435 -0
  61. package/dist/skills-5DxAV-rn.mjs.map +1 -0
  62. package/dist/stt-Bv_dum-R.mjs +433 -0
  63. package/dist/stt-Bv_dum-R.mjs.map +1 -0
  64. package/dist/stt-KzSoNvwI.mjs +3 -0
  65. package/dist/{tools-BsiEE6f2.mjs → tools-IYPrqoek.mjs} +6 -7
  66. package/dist/{tools-BsiEE6f2.mjs.map → tools-IYPrqoek.mjs.map} +1 -1
  67. package/dist/tts-5yWeP_I0.mjs +3 -0
  68. package/dist/tts-DG6denWG.mjs +729 -0
  69. package/dist/tts-DG6denWG.mjs.map +1 -0
  70. package/dist/types-s6Py2_DL.d.mts +353 -0
  71. package/dist/types-s6Py2_DL.d.mts.map +1 -0
  72. package/dist/{utils-7vXqtq2Q.mjs → utils-CkB4Roi6.mjs} +1 -1
  73. package/dist/{utils-7vXqtq2Q.mjs.map → utils-CkB4Roi6.mjs.map} +1 -1
  74. package/docs/ai-sdk.md +137 -21
  75. package/docs/browser.md +241 -2
  76. package/docs/memory.md +72 -0
  77. package/docs/stt.md +494 -0
  78. package/docs/tts.md +569 -0
  79. package/docs/vision.md +396 -0
  80. package/package.json +17 -18
  81. package/dist/auto-update-BbNHbSU1.mjs +0 -3
  82. package/dist/chrome-backend-C5Un08O4.mjs.map +0 -1
  83. package/dist/gerbil-BfnsFWRE.mjs +0 -644
  84. package/dist/gerbil-BfnsFWRE.mjs.map +0 -1
  85. package/dist/gerbil-BjW-z7Fq.mjs +0 -5
  86. package/dist/gerbil-DZ1k3ChC.d.mts +0 -138
  87. package/dist/gerbil-DZ1k3ChC.d.mts.map +0 -1
  88. package/dist/mcp-R8kRLIKb.mjs.map +0 -1
  89. package/dist/models-DKULvhOr.mjs.map +0 -1
  90. package/dist/models-De2-_GmQ.d.mts.map +0 -1
  91. package/dist/skills-D3CEpgDc.mjs +0 -630
  92. package/dist/skills-D3CEpgDc.mjs.map +0 -1
  93. package/dist/types-BS1N92Jt.d.mts +0 -183
  94. package/dist/types-BS1N92Jt.d.mts.map +0 -1
package/docs/vision.md ADDED
@@ -0,0 +1,396 @@
1
+ # Vision Models in Gerbil
2
+
3
+ Gerbil supports **Vision Language Models (VLMs)** like Ministral 3B that can understand and describe images. This guide covers how to use vision capabilities across all Gerbil interfaces.
4
+
5
+ ## Quick Start
6
+
7
+ ```typescript
8
+ import { Gerbil } from "@tryhamster/gerbil";
9
+
10
+ const g = new Gerbil();
11
+ await g.loadModel("ministral-3b"); // Vision-capable model
12
+
13
+ const result = await g.generate("What's in this image?", {
14
+ images: [{ source: "https://example.com/photo.jpg" }]
15
+ });
16
+
17
+ console.log(result.text);
18
+ ```
19
+
20
+ ## Supported Models
21
+
22
+ | Model ID | Vision | Reasoning | Context | Size |
23
+ |----------|--------|-----------|---------|------|
24
+ | `ministral-3b` | ✅ | ✅ | 256K | ~2.5GB |
25
+
26
+ More vision models coming soon as they become available in ONNX format.
27
+
28
+ ## Image Input Types
29
+
30
+ Gerbil accepts images in several formats:
31
+
32
+ ```typescript
33
+ // URL (recommended for web images)
34
+ images: [{ source: "https://example.com/image.jpg" }]
35
+
36
+ // Data URI (base64 encoded)
37
+ images: [{ source: "data:image/png;base64,iVBORw0KGgo..." }]
38
+
39
+ // Local file path (Node.js only, auto-converted to data URI)
40
+ images: [{ source: "/path/to/image.png" }]
41
+
42
+ // With alt text (optional, provides context)
43
+ images: [{ source: "...", alt: "A photo of a sunset" }]
44
+ ```
45
+
46
+ ## Multiple Images
47
+
48
+ You can pass multiple images for comparison or multi-image understanding:
49
+
50
+ ```typescript
51
+ const result = await g.generate("What's the difference between these two images?", {
52
+ images: [
53
+ { source: "https://example.com/before.jpg" },
54
+ { source: "https://example.com/after.jpg" }
55
+ ]
56
+ });
57
+ ```
58
+
59
+ ## Model Capability Detection
60
+
61
+ Check if the loaded model supports vision:
62
+
63
+ ```typescript
64
+ await g.loadModel("ministral-3b");
65
+
66
+ if (g.supportsVision()) {
67
+ // Use vision features
68
+ } else {
69
+ // Text-only mode
70
+ }
71
+ ```
72
+
73
+ ## Graceful Fallback
74
+
75
+ If you pass images to a non-vision model, Gerbil will:
76
+ 1. Log a warning to console
77
+ 2. Ignore the images
78
+ 3. Process the text prompt normally
79
+
80
+ This allows you to write code that works with any model:
81
+
82
+ ```typescript
83
+ // This works with any model - images are used if supported
84
+ const result = await g.generate("Describe this", {
85
+ images: [{ source: imageUrl }]
86
+ });
87
+ ```
88
+
89
+ ---
90
+
91
+ ## AI SDK Integration
92
+
93
+ Use vision models with Vercel AI SDK v5+:
94
+
95
+ ```typescript
96
+ import { generateText } from "ai";
97
+ import { gerbil } from "@tryhamster/gerbil/ai";
98
+
99
+ const { text } = await generateText({
100
+ model: gerbil("ministral-3b"),
101
+ messages: [
102
+ {
103
+ role: "user",
104
+ content: [
105
+ { type: "image", image: new URL("https://example.com/photo.jpg") },
106
+ { type: "text", text: "Describe this image in detail" },
107
+ ],
108
+ },
109
+ ],
110
+ });
111
+ ```
112
+
113
+ ### Image Part Formats
114
+
115
+ The AI SDK integration accepts images in these formats:
116
+
117
+ ```typescript
118
+ // URL object
119
+ { type: "image", image: new URL("https://...") }
120
+
121
+ // URL string
122
+ { type: "image", image: "https://..." }
123
+
124
+ // Base64 string
125
+ { type: "image", image: "data:image/png;base64,..." }
126
+
127
+ // Uint8Array with mime type
128
+ { type: "image", image: imageBytes, mimeType: "image/png" }
129
+ ```
130
+
131
+ ---
132
+
133
+ ## Express & Next.js Integration
134
+
135
+ ### Express
136
+
137
+ ```typescript
138
+ import express from "express";
139
+ import { gerbil } from "@tryhamster/gerbil/express";
140
+
141
+ const app = express();
142
+ app.use("/ai", gerbil({ model: "ministral-3b" })());
143
+
144
+ // POST /ai/generate
145
+ // Body: { prompt: "Describe this", images: [{ source: "https://..." }] }
146
+ ```
147
+
148
+ ### Next.js App Router
149
+
150
+ ```typescript
151
+ // app/api/chat/route.ts
152
+ import { gerbil } from "@tryhamster/gerbil/next";
153
+
154
+ export const POST = gerbil.handler({ model: "ministral-3b" });
155
+
156
+ // Fetch from client:
157
+ // fetch("/api/chat", {
158
+ // method: "POST",
159
+ // body: JSON.stringify({
160
+ // prompt: "What's in this image?",
161
+ // images: [{ source: dataUri }]
162
+ // })
163
+ // })
164
+ ```
165
+
166
+ ---
167
+
168
+ ## React Hooks (Browser)
169
+
170
+ ### useChat with Images
171
+
172
+ ```tsx
173
+ import { useChat } from "@tryhamster/gerbil/browser";
174
+
175
+ function VisionChat() {
176
+ const {
177
+ messages,
178
+ input,
179
+ setInput,
180
+ handleSubmit,
181
+ attachImage,
182
+ attachedImages,
183
+ clearImages,
184
+ sendWithImages,
185
+ } = useChat({ model: "ministral-3b" });
186
+
187
+ const handleFileSelect = (e: React.ChangeEvent<HTMLInputElement>) => {
188
+ const file = e.target.files?.[0];
189
+ if (file) {
190
+ const reader = new FileReader();
191
+ reader.onload = () => attachImage(reader.result as string);
192
+ reader.readAsDataURL(file);
193
+ }
194
+ };
195
+
196
+ return (
197
+ <div>
198
+ {/* Messages */}
199
+ {messages.map(m => (
200
+ <div key={m.id}>
201
+ {m.images?.map((img, i) => (
202
+ <img key={i} src={img} alt="" className="max-w-xs" />
203
+ ))}
204
+ <p>{m.content}</p>
205
+ </div>
206
+ ))}
207
+
208
+ {/* Image attachment */}
209
+ <input type="file" accept="image/*" onChange={handleFileSelect} />
210
+
211
+ {attachedImages.length > 0 && (
212
+ <div>
213
+ 📎 {attachedImages.length} image(s) attached
214
+ <button onClick={clearImages}>Clear</button>
215
+ </div>
216
+ )}
217
+
218
+ {/* Input */}
219
+ <form onSubmit={handleSubmit}>
220
+ <input
221
+ value={input}
222
+ onChange={e => setInput(e.target.value)}
223
+ placeholder="Describe the image..."
224
+ />
225
+ <button type="submit">Send</button>
226
+ </form>
227
+ </div>
228
+ );
229
+ }
230
+ ```
231
+
232
+ ### Direct Image Send
233
+
234
+ ```tsx
235
+ // Send a message with specific images
236
+ sendWithImages("Compare these two photos", [image1DataUri, image2DataUri]);
237
+ ```
238
+
239
+ ---
240
+
241
+ ## Built-in Vision Skills
242
+
243
+ Gerbil includes pre-built skills for common vision tasks:
244
+
245
+ ### Describe Image
246
+
247
+ ```typescript
248
+ import { describeImage } from "@tryhamster/gerbil/skills";
249
+
250
+ const description = await describeImage({
251
+ image: "https://example.com/photo.jpg",
252
+ focus: "details", // "general" | "details" | "text" | "objects" | "scene"
253
+ format: "bullets", // "paragraph" | "bullets" | "structured"
254
+ });
255
+ ```
256
+
257
+ ### Analyze Screenshot
258
+
259
+ ```typescript
260
+ import { analyzeScreenshot } from "@tryhamster/gerbil/skills";
261
+
262
+ const analysis = await analyzeScreenshot({
263
+ image: screenshotDataUri,
264
+ type: "accessibility", // "ui-review" | "accessibility" | "suggestions" | "qa"
265
+ });
266
+ ```
267
+
268
+ ### Extract from Image
269
+
270
+ ```typescript
271
+ import { extractFromImage } from "@tryhamster/gerbil/skills";
272
+
273
+ const extracted = await extractFromImage({
274
+ image: documentPhoto,
275
+ extract: "text", // "text" | "data" | "code" | "table" | "diagram"
276
+ outputFormat: "markdown", // "raw" | "json" | "markdown"
277
+ });
278
+ ```
279
+
280
+ ### Compare Images
281
+
282
+ ```typescript
283
+ import { compareImages } from "@tryhamster/gerbil/skills";
284
+
285
+ const comparison = await compareImages({
286
+ image1: beforeScreenshot,
287
+ image2: afterScreenshot,
288
+ focus: "differences", // "differences" | "similarities" | "detailed"
289
+ });
290
+ ```
291
+
292
+ ### Caption Image
293
+
294
+ ```typescript
295
+ import { captionImage } from "@tryhamster/gerbil/skills";
296
+
297
+ const caption = await captionImage({
298
+ image: photo,
299
+ style: "descriptive", // "concise" | "descriptive" | "creative" | "funny"
300
+ });
301
+ ```
302
+
303
+ ---
304
+
305
+ ## Performance Tips
306
+
307
+ ### WebGPU Acceleration
308
+
309
+ Vision models benefit significantly from GPU acceleration:
310
+
311
+ ```typescript
312
+ // Node.js: Uses Chrome backend for WebGPU
313
+ await g.loadModel("ministral-3b"); // Auto-detects WebGPU
314
+
315
+ // Browser: Native WebGPU
316
+ await g.loadModel("ministral-3b", { device: "webgpu" });
317
+ ```
318
+
319
+ ### Image Size
320
+
321
+ - Larger images take longer to process
322
+ - Consider resizing before sending to the model
323
+ - 512x512 to 1024x1024 is generally optimal
324
+
325
+ ### Caching
326
+
327
+ The model caches in the browser's IndexedDB (via Chrome backend in Node.js), so subsequent loads are fast.
328
+
329
+ ---
330
+
331
+ ## Troubleshooting
332
+
333
+ ### "Model doesn't support vision"
334
+
335
+ Make sure you're using a vision-capable model like `ministral-3b`.
336
+
337
+ ### Slow image processing
338
+
339
+ - Ensure WebGPU is being used (check `g.getDeviceMode()`)
340
+ - Resize large images before sending
341
+ - In Node.js, the Chrome backend provides GPU acceleration
342
+
343
+ ### Image not loading
344
+
345
+ - Check the URL is accessible
346
+ - For local files, ensure the path is absolute
347
+ - Base64 data URIs must include the mime type prefix
348
+
349
+ ---
350
+
351
+ ## API Reference
352
+
353
+ ### ImageInput
354
+
355
+ ```typescript
356
+ interface ImageInput {
357
+ /** Image source: URL, base64 data URI, or local file path */
358
+ source: string;
359
+ /** Optional alt text for context */
360
+ alt?: string;
361
+ }
362
+ ```
363
+
364
+ ### GenerateOptions (with images)
365
+
366
+ ```typescript
367
+ interface GenerateOptions {
368
+ // ... standard options ...
369
+
370
+ /** Images to include (only used if model supports vision) */
371
+ images?: ImageInput[];
372
+ }
373
+ ```
374
+
375
+ ### supportsVision()
376
+
377
+ ```typescript
378
+ g.supportsVision(): boolean
379
+ ```
380
+
381
+ Returns `true` if the loaded model supports vision input.
382
+
383
+ ### ModelConfig
384
+
385
+ ```typescript
386
+ interface ModelConfig {
387
+ // ... standard properties ...
388
+
389
+ /** Whether model supports vision/image input */
390
+ supportsVision?: boolean;
391
+
392
+ /** Size of vision encoder (if applicable) */
393
+ visionEncoderSize?: string;
394
+ }
395
+ ```
396
+
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tryhamster/gerbil",
3
- "version": "1.0.0-rc.0",
3
+ "version": "1.0.0-rc.1",
4
4
  "description": "Local LLM inference for Node.js. GPU-accelerated. Zero config. Works standalone or with Vercel AI SDK.",
5
5
  "type": "module",
6
6
  "main": "dist/index.mjs",
@@ -66,28 +66,17 @@
66
66
  "types": "./dist/browser/index.d.mts"
67
67
  }
68
68
  },
69
- "scripts": {
70
- "build": "tsdown",
71
- "dev": "tsx src/cli/index.ts",
72
- "typecheck": "tsc --noEmit",
73
- "check": "ultracite check",
74
- "fix": "ultracite fix",
75
- "test": "vitest run",
76
- "test:watch": "vitest",
77
- "prepublishOnly": "pnpm build",
78
- "changeset": "changeset",
79
- "release": "changeset publish",
80
- "prepare": "lefthook install"
81
- },
82
69
  "dependencies": {
83
70
  "@huggingface/hub": "^2.7.1",
84
71
  "@huggingface/transformers": "^3.8.0",
85
72
  "chalk": "^5.3.0",
86
73
  "cli-progress": "^3.12.0",
87
74
  "commander": "^12.1.0",
75
+ "kokoro-js": "^1.2.1",
88
76
  "onnxruntime-web": "^1.21.0-dev.20250114-228dd16893",
89
77
  "ora": "^8.0.1",
90
78
  "puppeteer-core": "^24.31.0",
79
+ "react": "^19.0.0",
91
80
  "webgpu": "^0.3.8",
92
81
  "zod": "^3.23.0"
93
82
  },
@@ -101,8 +90,7 @@
101
90
  "hono": ">=4.0.0",
102
91
  "langchain": ">=0.1.0",
103
92
  "llamaindex": ">=0.1.0",
104
- "next": ">=14.0.0",
105
- "react": ">=18.0.0"
93
+ "next": ">=14.0.0"
106
94
  },
107
95
  "peerDependenciesMeta": {
108
96
  "@ai-sdk/provider": {
@@ -158,7 +146,6 @@
158
146
  "ink-spinner": "^5.0.0",
159
147
  "ink-text-input": "^6.0.0",
160
148
  "lefthook": "^2.0.5",
161
- "react": "^19.0.0",
162
149
  "tsdown": "^0.17.0-beta.3",
163
150
  "tsx": "^4.15.0",
164
151
  "typescript": "^5.4.5",
@@ -203,5 +190,17 @@
203
190
  ],
204
191
  "publishConfig": {
205
192
  "access": "public"
193
+ },
194
+ "scripts": {
195
+ "build": "tsdown",
196
+ "dev": "tsx src/cli/index.ts",
197
+ "typecheck": "tsc --noEmit",
198
+ "check": "ultracite check",
199
+ "fix": "ultracite fix",
200
+ "test": "vitest run",
201
+ "test:watch": "vitest",
202
+ "changeset": "changeset",
203
+ "version": "changeset version",
204
+ "release": "pnpm publish && changeset tag"
206
205
  }
207
- }
206
+ }
@@ -1,3 +0,0 @@
1
- import { i as installUpdate, n as checkForUpdate, r as compareVersions, t as CURRENT_VERSION } from "./cli.mjs";
2
-
3
- export { CURRENT_VERSION, checkForUpdate, installUpdate };
@@ -1 +0,0 @@
1
- {"version":3,"file":"chrome-backend-C5Un08O4.mjs","names":["files: { path: string; size?: number }[]","globalBrowser: Browser | null","globalBrowserPromise: Promise<Browser> | null","globalServer: Server | null","pid: number | null","err: any"],"sources":["../src/core/chrome-backend.ts"],"sourcesContent":["/**\n * Chrome DevTools Protocol Backend for WebGPU Inference\n *\n * Uses headless Chrome as a WebGPU accelerator for Node.js environments.\n * Provides the same performance as browser inference (~100+ tok/s with q4f16).\n */\n\nimport { execSync } from \"child_process\";\nimport { existsSync, mkdirSync, readFileSync, rmSync, unlinkSync, writeFileSync } from \"fs\";\nimport { createServer, type Server } from \"http\";\nimport { homedir } from \"os\";\nimport { join } from \"path\";\nimport puppeteer, { type Browser, type CDPSession, type Page } from \"puppeteer-core\";\n\n// Persistent cache directory for Chrome profile (keeps model cache between runs)\nconst GERBIL_CACHE_DIR = join(homedir(), \".gerbil\", \"chrome-cache\");\nconst WS_ENDPOINT_FILE = join(GERBIL_CACHE_DIR, \"ws-endpoint.txt\");\nconst CACHED_MODELS_FILE = join(homedir(), \".gerbil\", \"cached-models.json\");\n\n// ============================================\n// Cached Models Tracking\n// ============================================\n\ntype CachedModelEntry = {\n modelId: string;\n downloadedAt: string;\n lastUsed: string;\n sizeBytes?: number;\n};\n\n/** Get list of models cached in Chrome's IndexedDB */\nexport function getChromeCachedModels(): CachedModelEntry[] {\n try {\n if (!existsSync(CACHED_MODELS_FILE)) {\n return [];\n }\n const data = JSON.parse(readFileSync(CACHED_MODELS_FILE, \"utf-8\"));\n return data.models || [];\n } catch {\n return [];\n }\n}\n\n/** Fetch model size from HuggingFace API */\nasync function fetchModelSize(modelId: string): Promise<number | undefined> {\n try {\n // Try to get ONNX file size from tree API\n const treeRes = await fetch(`https://huggingface.co/api/models/${modelId}/tree/main/onnx`);\n if (treeRes.ok) {\n const files: { path: string; size?: number }[] = await treeRes.json();\n // Prefer q4f16 > q4 > fp16 > any .onnx file\n const q4f16 = files.find((f) => f.path.includes(\"q4f16\") && f.path.endsWith(\".onnx\"));\n const q4 = files.find(\n (f) => f.path.includes(\"q4\") && !f.path.includes(\"f16\") && f.path.endsWith(\".onnx\"),\n );\n const fp16 = files.find((f) => f.path.includes(\"fp16\") && f.path.endsWith(\".onnx\"));\n const anyOnnx = files.find((f) => f.path.endsWith(\".onnx\"));\n const bestFile = q4f16 || q4 || fp16 || anyOnnx;\n if (bestFile?.size) {\n return bestFile.size;\n }\n }\n\n // Fallback to model info API\n const res = await fetch(`https://huggingface.co/api/models/${modelId}`);\n if (res.ok) {\n const info = await res.json();\n return info.usedStorage;\n }\n } catch {\n // Ignore fetch errors\n }\n return;\n}\n\n/** Track a model as cached */\nexport function trackCachedModel(modelId: string, sizeBytes?: number): void {\n try {\n const dir = join(homedir(), \".gerbil\");\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n\n const models = getChromeCachedModels();\n const existing = models.find((m) => m.modelId === modelId);\n const now = new Date().toISOString();\n\n if (existing) {\n existing.lastUsed = now;\n if (sizeBytes) {\n existing.sizeBytes = sizeBytes;\n }\n } else {\n models.push({\n modelId,\n downloadedAt: now,\n lastUsed: now,\n sizeBytes,\n });\n }\n\n writeFileSync(CACHED_MODELS_FILE, JSON.stringify({ models }, null, 2));\n\n // Fetch size in background if not provided\n if (!sizeBytes) {\n fetchModelSize(modelId)\n .then((size) => {\n if (size) {\n const updatedModels = getChromeCachedModels();\n const model = updatedModels.find((m) => m.modelId === modelId);\n if (model) {\n model.sizeBytes = size;\n writeFileSync(CACHED_MODELS_FILE, JSON.stringify({ models: updatedModels }, null, 2));\n }\n }\n })\n .catch(() => {});\n }\n } catch {\n // Ignore tracking errors\n }\n}\n\n/** Remove a model from cache tracking */\nexport function untrackCachedModel(modelId: string): void {\n try {\n const models = getChromeCachedModels().filter((m) => m.modelId !== modelId);\n writeFileSync(CACHED_MODELS_FILE, JSON.stringify({ models }, null, 2));\n } catch {\n // Ignore tracking errors\n }\n}\n\n/** Refresh sizes for cached models that don't have them */\nexport async function refreshCachedModelSizes(): Promise<void> {\n try {\n const models = getChromeCachedModels();\n const needsSize = models.filter((m) => !m.sizeBytes);\n if (needsSize.length === 0) {\n return;\n }\n\n // Fetch sizes in parallel (max 3 at a time)\n const batchSize = 3;\n for (let i = 0; i < needsSize.length; i += batchSize) {\n const batch = needsSize.slice(i, i + batchSize);\n await Promise.all(\n batch.map(async (model) => {\n const size = await fetchModelSize(model.modelId);\n if (size) {\n model.sizeBytes = size;\n }\n }),\n );\n }\n\n // Save updated models\n writeFileSync(CACHED_MODELS_FILE, JSON.stringify({ models }, null, 2));\n } catch {\n // Ignore refresh errors\n }\n}\n\n// Fixed port for local server - IndexedDB cache is origin-scoped, so using a\n// consistent port ensures the model cache persists between runs\n// Port 43724 = \"GERBI\" on phone keypad (GERBIL=437245 is too big for port range)\nconst GERBIL_LOCAL_PORT = 43_724;\n\n// Global singletons - multiple Gerbil instances share browser and server\nlet globalBrowser: Browser | null = null;\nlet globalBrowserPromise: Promise<Browser> | null = null;\nlet globalServer: Server | null = null;\nlet globalServerPort = 0;\n\n// Page tracking for memory management\nlet activePagesCount = 0;\nconst MAX_CONCURRENT_PAGES = 5; // Limit to prevent dev mistakes\n\n// ============================================\n// Types\n// ============================================\n\nexport type ChromeBackendOptions = {\n /** Custom Chrome executable path */\n chromePath?: string;\n /** Model ID to load */\n modelId?: string;\n /** Progress callback */\n onProgress?: (info: { status: string; progress?: number; file?: string }) => void;\n /** Token callback for streaming */\n onToken?: (token: { text: string; state: string; numTokens: number; tps: number }) => void;\n};\n\nexport type GenerateOptions = {\n maxTokens?: number;\n temperature?: number;\n topP?: number;\n topK?: number;\n thinking?: boolean;\n system?: string;\n onToken?: (token: { text: string; state: string; numTokens: number; tps: number }) => void;\n};\n\n// ============================================\n// Chrome Path Detection\n// ============================================\n\nconst CHROME_PATHS = {\n darwin: [\n \"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome\",\n \"/Applications/Google Chrome Canary.app/Contents/MacOS/Google Chrome Canary\",\n \"/Applications/Chromium.app/Contents/MacOS/Chromium\",\n \"/Applications/Microsoft Edge.app/Contents/MacOS/Microsoft Edge\",\n \"/Applications/Brave Browser.app/Contents/MacOS/Brave Browser\",\n ],\n linux: [\n \"google-chrome-stable\",\n \"google-chrome\",\n \"chromium-browser\",\n \"chromium\",\n \"microsoft-edge\",\n \"brave-browser\",\n ],\n win32: [\n \"C:\\\\Program Files\\\\Google\\\\Chrome\\\\Application\\\\chrome.exe\",\n \"C:\\\\Program Files (x86)\\\\Google\\\\Chrome\\\\Application\\\\chrome.exe\",\n `${process.env.LOCALAPPDATA}\\\\Google\\\\Chrome\\\\Application\\\\chrome.exe`,\n \"C:\\\\Program Files\\\\Microsoft\\\\Edge\\\\Application\\\\msedge.exe\",\n \"C:\\\\Program Files\\\\BraveSoftware\\\\Brave-Browser\\\\Application\\\\brave.exe\",\n ],\n};\n\nfunction findChrome(): string {\n // Check env override first\n if (process.env.CHROME_PATH) {\n return process.env.CHROME_PATH;\n }\n\n const platform = process.platform as \"darwin\" | \"linux\" | \"win32\";\n const paths = CHROME_PATHS[platform] || [];\n\n for (const p of paths) {\n try {\n if (platform === \"linux\") {\n // For Linux, check if command exists in PATH\n execSync(`which ${p}`, { stdio: \"ignore\" });\n return p;\n }\n // For macOS/Windows, use fs.existsSync (portable)\n if (existsSync(p)) {\n return p;\n }\n } catch {}\n }\n\n throw new Error(\"Chrome not found. Install Chrome or set CHROME_PATH environment variable.\");\n}\n\n// ============================================\n// Chrome Launch Flags\n// ============================================\n\nfunction getChromeFlags(userDataDir: string, _debuggingPort: number): string[] {\n const flags = [\"--no-sandbox\", `--user-data-dir=${userDataDir}`];\n\n // Platform-specific WebGPU flags\n if (process.platform === \"linux\") {\n // Linux: use Vulkan backend for WebGPU\n flags.push(\n \"--enable-unsafe-webgpu\",\n \"--enable-features=Vulkan\",\n \"--use-angle=vulkan\",\n \"--disable-vulkan-surface\",\n );\n } else if (process.platform === \"darwin\") {\n // macOS: WebGPU uses Metal by default, minimal flags needed\n // Only add --enable-unsafe-webgpu if WebGPU is disabled (rare)\n // For now, try without it to avoid triggering GPU bugs\n } else {\n // Windows: use default DirectX/D3D12 backend\n flags.push(\"--enable-unsafe-webgpu\");\n }\n\n return flags;\n}\n\n// ============================================\n// Worker Page HTML\n// ============================================\n\nfunction getWorkerPageHTML(modelPath: string): string {\n return `\n<!DOCTYPE html>\n<html>\n<head>\n <title>Gerbil WebGPU Backend</title>\n <script type=\"module\">\n import {\n AutoTokenizer,\n AutoModelForCausalLM,\n TextStreamer,\n InterruptableStoppingCriteria,\n } from \"https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.8.0\";\n\n class ModelPipeline {\n static tokenizer = null;\n static model = null;\n static modelId = \"${modelPath}\";\n\n static async getInstance(progressCallback) {\n if (!this.tokenizer) {\n this.tokenizer = await AutoTokenizer.from_pretrained(this.modelId, {\n progress_callback: progressCallback,\n });\n }\n\n if (!this.model) {\n this.model = await AutoModelForCausalLM.from_pretrained(this.modelId, {\n dtype: \"q4f16\",\n device: \"webgpu\",\n progress_callback: progressCallback,\n });\n }\n\n return { tokenizer: this.tokenizer, model: this.model };\n }\n }\n\n const stoppingCriteria = new InterruptableStoppingCriteria();\n let pastKeyValuesCache = null;\n let totalTokensInCache = 0;\n \n // Context length for auto-reset (Qwen3 default: 2048)\n // Cache beyond this provides no benefit and wastes memory\n const CONTEXT_LENGTH = 2048;\n\n // Auto-load model on page init\n (async function() {\n console.log(JSON.stringify({ type: \"progress\", status: \"Loading model...\" }));\n \n try {\n const { tokenizer, model } = await ModelPipeline.getInstance((progress) => {\n if (progress.status === \"progress\" && progress.file) {\n console.log(JSON.stringify({\n type: \"progress\",\n status: \"progress\",\n file: progress.file,\n progress: Math.round(progress.progress || 0),\n }));\n }\n });\n\n console.log(JSON.stringify({ type: \"progress\", status: \"Compiling shaders...\" }));\n const warmupInputs = tokenizer(\"a\");\n await model.generate({ ...warmupInputs, max_new_tokens: 1 });\n\n console.log(JSON.stringify({ type: \"ready\" }));\n } catch (error) {\n console.log(JSON.stringify({ type: \"error\", error: error.message || String(error) }));\n }\n })();\n\n window.gerbilGenerate = async function(messages, options = {}) {\n const { maxTokens = 256, temperature = 0.7, topP = 0.9, topK = 20, thinking = false } = options;\n \n // Auto-reset KV cache if it exceeds context length\n // This prevents unbounded memory growth while preserving performance\n if (totalTokensInCache > CONTEXT_LENGTH) {\n console.log(JSON.stringify({ \n type: \"cache_reset\", \n reason: \"context_exceeded\",\n tokensInCache: totalTokensInCache,\n contextLength: CONTEXT_LENGTH\n }));\n pastKeyValuesCache = null;\n totalTokensInCache = 0;\n }\n\n try {\n const { tokenizer, model } = await ModelPipeline.getInstance();\n\n const inputs = tokenizer.apply_chat_template(messages, {\n add_generation_prompt: true,\n return_dict: true,\n enable_thinking: thinking,\n });\n\n let state = \"answering\";\n let prevState = \"answering\";\n const [START_THINKING_TOKEN_ID, END_THINKING_TOKEN_ID] = tokenizer.encode(\n \"<think></think>\",\n { add_special_tokens: false }\n );\n\n let startTime = null;\n let numTokens = 0;\n\n const tokenCallback = (tokens) => {\n startTime ??= performance.now();\n numTokens++;\n \n const tokenId = Number(tokens[0]);\n if (tokenId === START_THINKING_TOKEN_ID) {\n state = \"thinking\";\n } else if (tokenId === END_THINKING_TOKEN_ID) {\n state = \"answering\";\n }\n };\n\n const streamCallback = (text) => {\n const tps = startTime ? (numTokens / (performance.now() - startTime)) * 1000 : 0;\n \n // Inject <think> markers when state changes (since skip_special_tokens removes them)\n let outputText = text;\n if (thinking) {\n if (state === \"thinking\" && prevState !== \"thinking\") {\n outputText = \"<think>\" + text;\n } else if (state === \"answering\" && prevState === \"thinking\") {\n outputText = \"</think>\" + text;\n }\n }\n prevState = state;\n \n console.log(JSON.stringify({ type: \"token\", text: outputText, state, numTokens, tps }));\n };\n\n const streamer = new TextStreamer(tokenizer, {\n skip_prompt: true,\n skip_special_tokens: true,\n callback_function: streamCallback,\n token_callback_function: tokenCallback,\n });\n\n console.log(JSON.stringify({ type: \"start\" }));\n\n const { past_key_values, sequences } = await model.generate({\n ...inputs,\n past_key_values: pastKeyValuesCache,\n do_sample: temperature > 0,\n temperature: temperature > 0 ? temperature : undefined,\n top_p: topP,\n top_k: topK,\n max_new_tokens: maxTokens,\n streamer,\n stopping_criteria: stoppingCriteria,\n return_dict_in_generate: true,\n });\n\n pastKeyValuesCache = past_key_values;\n \n // Track total tokens in cache (input + generated)\n const inputLength = inputs.input_ids.dims[1];\n totalTokensInCache += inputLength + numTokens;\n\n const endTime = performance.now();\n const totalTime = startTime ? endTime - startTime : 0;\n \n // Extract only the generated tokens (exclude input prompt)\n const generatedTokens = sequences.slice(null, [inputLength, null]);\n const decoded = tokenizer.batch_decode(generatedTokens, { skip_special_tokens: true });\n\n console.log(JSON.stringify({\n type: \"complete\",\n text: decoded[0] || \"\",\n numTokens,\n totalTime,\n tps: totalTime > 0 ? (numTokens / totalTime) * 1000 : 0,\n tokensInCache: totalTokensInCache,\n }));\n\n return decoded[0] || \"\";\n } catch (error) {\n console.log(JSON.stringify({ type: \"error\", error: error.message || String(error) }));\n throw error;\n }\n };\n\n window.gerbilInterrupt = function() {\n stoppingCriteria.interrupt();\n };\n\n window.gerbilReset = function() {\n pastKeyValuesCache = null;\n totalTokensInCache = 0;\n stoppingCriteria.reset();\n console.log(JSON.stringify({ type: \"cache_reset\", reason: \"manual\" }));\n };\n\n // Signal that the page is ready for commands\n console.log(JSON.stringify({ type: \"init\" }));\n </script>\n</head>\n<body>\n <h1>Gerbil WebGPU Backend</h1>\n <p>This page provides WebGPU inference for the Gerbil CLI.</p>\n</body>\n</html>\n`;\n}\n\n// ============================================\n// Chrome GPU Backend\n// ============================================\n\nexport class ChromeGPUBackend {\n private browser: Browser | null = null;\n private page: Page | null = null;\n private cdp: CDPSession | null = null;\n private serverPort = 0;\n private userDataDir: string = GERBIL_CACHE_DIR; // Always use shared cache\n private readonly modelId: string;\n private isReady = false;\n private readonly messageHandlers: Map<string, (data: any) => void> = new Map();\n private pendingRejects: Array<(err: Error) => void> = []; // Track pending waits for cleanup\n\n public server: Server | null = null;\n\n private constructor(modelId: string) {\n this.modelId = modelId;\n }\n\n /**\n * Create and initialize a Chrome GPU backend\n */\n static async create(options: ChromeBackendOptions = {}): Promise<ChromeGPUBackend> {\n const modelId = options.modelId || \"onnx-community/Qwen3-0.6B-ONNX\";\n const backend = new ChromeGPUBackend(modelId);\n await backend.launch(options);\n return backend;\n }\n\n /**\n * Get existing browser or launch a new one (singleton pattern)\n * Multiple Gerbil instances share the same browser process\n */\n private async getOrCreateBrowser(\n chromePath: string,\n options: ChromeBackendOptions,\n ): Promise<Browser> {\n // If we already have a global browser, reuse it\n if (globalBrowser?.connected) {\n options.onProgress?.({ status: \"Reusing existing Chrome...\" });\n return globalBrowser;\n }\n\n // If another caller is launching, wait for them\n if (globalBrowserPromise) {\n options.onProgress?.({ status: \"Waiting for Chrome startup...\" });\n return globalBrowserPromise;\n }\n\n // Try to connect to existing browser via saved WebSocket endpoint\n if (existsSync(WS_ENDPOINT_FILE)) {\n try {\n const wsEndpoint = readFileSync(WS_ENDPOINT_FILE, \"utf-8\").trim();\n options.onProgress?.({ status: \"Connecting to existing Chrome...\" });\n globalBrowser = await puppeteer.connect({\n browserWSEndpoint: wsEndpoint,\n });\n return globalBrowser;\n } catch {\n // Stale endpoint, remove it and launch fresh\n try {\n unlinkSync(WS_ENDPOINT_FILE);\n } catch {}\n }\n }\n\n // Launch new browser\n globalBrowserPromise = this.launchBrowser(chromePath, options);\n try {\n globalBrowser = await globalBrowserPromise;\n return globalBrowser;\n } finally {\n globalBrowserPromise = null;\n }\n }\n\n /**\n * Launch a new Chrome browser instance\n */\n private async launchBrowser(\n chromePath: string,\n _options: ChromeBackendOptions,\n ): Promise<Browser> {\n const debuggingPort = 9222 + Math.floor(Math.random() * 1000);\n\n // Clean up stale lock file if Chrome crashed\n const lockFile = join(this.userDataDir, \"SingletonLock\");\n if (existsSync(lockFile)) {\n try {\n unlinkSync(lockFile);\n await new Promise((r) => setTimeout(r, 200));\n } catch {}\n }\n\n // Use new headless mode - more compatible with WebGPU than old headless\n // Previous crashes were caused by killing our own server, not headless mode\n const browser = await puppeteer.launch({\n executablePath: chromePath,\n headless: true, // Standard headless mode - crashes before were from killing our own server\n args: [\n ...getChromeFlags(this.userDataDir, debuggingPort),\n \"--enable-gpu\",\n \"--no-first-run\",\n \"--no-default-browser-check\",\n \"--disable-background-timer-throttling\",\n \"--disable-renderer-backgrounding\",\n \"--disable-dev-shm-usage\",\n ],\n handleSIGINT: false,\n handleSIGTERM: false,\n handleSIGHUP: false,\n });\n\n // Save WebSocket endpoint for reconnection\n writeFileSync(WS_ENDPOINT_FILE, browser.wsEndpoint());\n\n // Clean up endpoint file when browser closes\n browser.on(\"disconnected\", () => {\n globalBrowser = null;\n try {\n unlinkSync(WS_ENDPOINT_FILE);\n } catch {}\n });\n\n return browser;\n }\n\n /**\n * Launch Chrome and initialize the worker page\n */\n private async launch(options: ChromeBackendOptions): Promise<void> {\n // Check page limit to prevent memory issues during development\n if (activePagesCount >= MAX_CONCURRENT_PAGES) {\n throw new Error(\n `Maximum concurrent pages (${MAX_CONCURRENT_PAGES}) reached. ` +\n \"Call dispose() on old Gerbil instances to free resources. \" +\n `Currently active: ${activePagesCount}`,\n );\n }\n\n const chromePath = options.chromePath || findChrome();\n\n // Use persistent cache directory (keeps model downloads between runs)\n this.userDataDir = GERBIL_CACHE_DIR;\n if (!existsSync(this.userDataDir)) {\n mkdirSync(this.userDataDir, { recursive: true });\n }\n\n // Start tiny HTTP server to serve the worker page\n // (Required because file:// + ES modules + CDN imports doesn't work due to CORS)\n const html = getWorkerPageHTML(this.modelId);\n await this.startServer(html);\n\n options.onProgress?.({ status: \"Starting Chrome...\" });\n\n // Get or create the shared browser instance\n this.browser = await this.getOrCreateBrowser(chromePath, options);\n\n // Create page and set up CDP session\n this.page = await this.browser.newPage();\n this.cdp = await this.page.createCDPSession();\n\n // Increment active page counter\n activePagesCount++;\n options.onProgress?.({\n status: `Active pages: ${activePagesCount}/${MAX_CONCURRENT_PAGES}`,\n });\n\n // Listen for browser disconnect (OOM kill, crash, etc.)\n this.browser.on(\"disconnected\", () => {\n console.error(\"[Chrome] Browser disconnected unexpectedly\");\n this.isReady = false;\n this.browser = null;\n this.page = null;\n this.cdp = null;\n // Fail fast: reject any pending waits so callers don't hang\n this.rejectPendingWaits(new Error(\"CHROME_DISCONNECTED\"));\n });\n\n // Enable console API events and exceptions\n await this.cdp.send(\"Runtime.enable\");\n await this.cdp.send(\"Runtime.setAsyncCallStackDepth\", { maxDepth: 32 });\n\n // Set up console message handler\n this.cdp.on(\"Runtime.consoleAPICalled\", (event) => {\n const text = event.args.map((a: any) => a.value || a.description || \"\").join(\" \");\n\n if (event.type === \"log\" && event.args[0]?.value) {\n try {\n const data = JSON.parse(event.args[0].value);\n this.handleMessage(data, options);\n } catch {\n // Not JSON - only log short messages, skip large data dumps (like KV cache)\n if (\n text.length < 500 &&\n !text.includes(\"Float32Array\") &&\n !text.includes(\"past_key_values\")\n ) {\n // Uncomment for debugging: console.log(\"[Chrome Log]\", text);\n }\n }\n } else if (event.type === \"error\" || event.type === \"warning\") {\n // Filter out noisy messages\n if (\n !(\n text.includes(\"onnxruntime\") ||\n text.includes(\"content-length\") ||\n text.includes(\"Float32Array\") ||\n text.includes(\"past_key_values\")\n ) &&\n text.length < 1000\n ) {\n console.error(`[Chrome ${event.type}]`, text);\n }\n }\n });\n\n // Listen for exceptions\n this.cdp.on(\"Runtime.exceptionThrown\", (event) => {\n const errText =\n event.exceptionDetails?.text || event.exceptionDetails?.exception?.description || \"\";\n // Skip noisy tensor/KV cache dumps\n if (\n errText.includes(\"Float32Array\") ||\n errText.includes(\"past_key_values\") ||\n errText.length > 1000\n ) {\n return;\n }\n console.error(\"[Chrome Exception]\", errText);\n });\n\n // Navigate to our HTTP server - model loads automatically\n await this.page.goto(`http://127.0.0.1:${this.serverPort}/`, {\n waitUntil: \"domcontentloaded\",\n timeout: 30_000,\n });\n\n // Wait for model to be ready (loads automatically on page init)\n await this.waitForMessage(\"ready\", 300_000); // 5 min timeout for model download\n\n this.isReady = true;\n options.onProgress?.({ status: \"Ready (WebGPU)!\" });\n\n // Track this model as cached\n trackCachedModel(this.modelId);\n }\n\n /**\n * Handle incoming messages from the page\n */\n private handleMessage(data: any, options: ChromeBackendOptions): void {\n const { type, ...rest } = data;\n\n // Call registered handler\n const handler = this.messageHandlers.get(type);\n if (handler) {\n handler(rest);\n }\n\n // Also call option callbacks\n if (type === \"progress\") {\n options.onProgress?.(rest);\n } else if (type === \"token\") {\n options.onToken?.(rest);\n }\n }\n\n /**\n * Wait for a specific message type\n */\n private waitForMessage(type: string, timeout = 30_000): Promise<any> {\n return new Promise((resolve, reject) => {\n // Track this reject for cleanup on browser disconnect\n this.pendingRejects.push(reject);\n\n const cleanup = () => {\n clearTimeout(timer);\n this.messageHandlers.delete(type);\n const idx = this.pendingRejects.indexOf(reject);\n if (idx >= 0) {\n this.pendingRejects.splice(idx, 1);\n }\n };\n\n const timer = setTimeout(() => {\n cleanup();\n reject(new Error(`Timeout waiting for ${type} message`));\n }, timeout);\n\n this.messageHandlers.set(type, (data) => {\n cleanup();\n resolve(data);\n });\n });\n }\n\n /**\n * Check if Chrome backend is still alive\n */\n isAlive(): boolean {\n return this.isReady && this.browser !== null && this.page !== null;\n }\n\n /**\n * Get Chrome backend status information\n */\n getStatus(): {\n pid: number | null;\n port: number;\n modelId: string;\n startedAt: Date | null;\n } {\n // Try instance browser first, then global browser\n let pid: number | null = null;\n const browserProcess = this.browser?.process?.() || globalBrowser?.process?.();\n if (browserProcess?.pid) {\n pid = browserProcess.pid;\n }\n return {\n pid,\n port: this.serverPort || globalServerPort,\n modelId: this.modelId,\n startedAt: this.isReady ? new Date() : null,\n };\n }\n\n /**\n * Get Chrome memory usage via CDP Performance metrics\n * Returns memory in bytes or null if unavailable\n */\n async getMemoryUsage(): Promise<{\n jsHeapUsed: number;\n jsHeapTotal: number;\n } | null> {\n if (!(this.cdp && this.isReady)) {\n return null;\n }\n\n try {\n // Enable Performance domain if needed\n await this.cdp.send(\"Performance.enable\");\n\n const { metrics } = (await this.cdp.send(\"Performance.getMetrics\")) as {\n metrics: Array<{ name: string; value: number }>;\n };\n\n const jsHeapUsed = metrics.find((m) => m.name === \"JSHeapUsedSize\")?.value ?? 0;\n const jsHeapTotal = metrics.find((m) => m.name === \"JSHeapTotalSize\")?.value ?? 0;\n\n return { jsHeapUsed, jsHeapTotal };\n } catch {\n return null;\n }\n }\n\n /**\n * Check memory usage and auto-cleanup if threshold exceeded\n * @param thresholdGB Memory threshold in GB (default: 8)\n * @returns true if cleanup was performed\n */\n async checkMemoryAndCleanup(thresholdGB = 8): Promise<boolean> {\n const mem = await this.getMemoryUsage();\n if (!mem) {\n return false;\n }\n\n const usedGB = mem.jsHeapUsed / 1024 ** 3;\n\n if (usedGB > thresholdGB) {\n console.warn(\n `[Gerbil] Memory usage high (${usedGB.toFixed(\n 1,\n )}GB > ${thresholdGB}GB), clearing KV cache...`,\n );\n await this.reset();\n return true;\n }\n\n return false;\n }\n\n /**\n * Get memory usage in a human-readable format\n */\n async getMemoryStats(): Promise<{\n usedGB: number;\n totalGB: number;\n usedPercent: number;\n } | null> {\n const mem = await this.getMemoryUsage();\n if (!mem) {\n return null;\n }\n\n const usedGB = mem.jsHeapUsed / 1024 ** 3;\n const totalGB = mem.jsHeapTotal / 1024 ** 3;\n const usedPercent = (mem.jsHeapUsed / mem.jsHeapTotal) * 100;\n\n return { usedGB, totalGB, usedPercent };\n }\n\n /**\n * Generate text with streaming\n */\n async generate(prompt: string, options: GenerateOptions = {}): Promise<string> {\n if (!this.isAlive()) {\n throw new Error(\"CHROME_BACKEND_DEAD\");\n }\n\n const system = options.system || \"You are a helpful assistant.\";\n const messages = [\n { role: \"system\", content: system },\n { role: \"user\", content: prompt },\n ];\n\n const genOptions = {\n maxTokens: options.maxTokens ?? 256,\n temperature: options.temperature ?? 0.7,\n topP: options.topP ?? 0.9,\n topK: options.topK ?? 20,\n thinking: options.thinking ?? false,\n };\n\n // Set up token handler if callback provided\n if (options.onToken) {\n this.messageHandlers.set(\"token\", options.onToken);\n }\n\n try {\n // Start generation\n const resultPromise = this.page!.evaluate(\n (msgs, opts) => (window as any).gerbilGenerate(msgs, opts),\n messages,\n genOptions,\n );\n\n // Wait for completion\n const completeData = await this.waitForMessage(\"complete\", 600_000); // 10 min timeout\n\n // Clean up token handler\n this.messageHandlers.delete(\"token\");\n\n await resultPromise; // Ensure evaluate completes\n\n return completeData.text || \"\";\n } catch (err: any) {\n // Check if Chrome died during generation\n if (!this.isAlive()) {\n throw new Error(\"CHROME_BACKEND_DEAD\");\n }\n throw err;\n }\n }\n\n /**\n * Interrupt current generation\n */\n async interrupt(): Promise<void> {\n if (this.page) {\n await this.page.evaluate(\"window.gerbilInterrupt()\");\n }\n }\n\n /**\n * Reset conversation cache\n */\n async reset(): Promise<void> {\n if (this.page) {\n await this.page.evaluate(\"window.gerbilReset()\");\n }\n }\n\n /**\n * Check if backend is ready\n */\n ready(): boolean {\n return this.isReady;\n }\n\n /**\n * Start or reuse the global HTTP server\n * Uses singleton pattern to prevent killing our own server\n */\n private async startServer(html: string): Promise<void> {\n // If global server is already running, reuse it\n if (globalServer && globalServerPort) {\n this.server = globalServer;\n this.serverPort = globalServerPort;\n return;\n }\n\n return new Promise((resolve, reject) => {\n const server = createServer((_req, res) => {\n res.writeHead(200, { \"Content-Type\": \"text/html\" });\n res.end(html);\n });\n\n server.on(\"error\", (err: NodeJS.ErrnoException) => {\n if (err.code === \"EADDRINUSE\") {\n // Port in use - assume it's our server from a previous run\n // Just use that port (Chrome will connect to existing server)\n this.serverPort = GERBIL_LOCAL_PORT;\n globalServerPort = GERBIL_LOCAL_PORT;\n resolve();\n } else {\n reject(err);\n }\n });\n\n // Listen on fixed port for consistent IndexedDB origin (cache persistence)\n server.listen(GERBIL_LOCAL_PORT, \"127.0.0.1\", () => {\n this.server = server;\n this.serverPort = GERBIL_LOCAL_PORT;\n globalServer = server;\n globalServerPort = GERBIL_LOCAL_PORT;\n resolve();\n });\n });\n }\n\n /**\n * Dispose of the backend and clean up\n * Note: We keep the shared browser running for other backends\n */\n async dispose(): Promise<void> {\n // Mark as not ready first to prevent new operations\n this.isReady = false;\n\n // Clear pending waits silently (don't reject - just clear them)\n this.pendingRejects = [];\n this.messageHandlers.clear();\n\n // Close our page (but NOT the shared browser - other backends may use it)\n if (this.page) {\n try {\n await this.page.close();\n // Decrement active page counter\n activePagesCount = Math.max(0, activePagesCount - 1);\n } catch {\n // Page may already be closed\n }\n this.page = null;\n }\n\n // Clear CDP session reference (closed with page)\n this.cdp = null;\n\n // We don't close the shared browser or server - they persist for reuse\n // Just clear our references\n this.browser = null;\n this.server = null;\n }\n\n /**\n * Reject all pending waits (called on browser disconnect or dispose)\n */\n private rejectPendingWaits(error: Error): void {\n for (const reject of this.pendingRejects) {\n reject(error);\n }\n this.pendingRejects = [];\n this.messageHandlers.clear();\n }\n\n /**\n * Clear the model cache (forces re-download on next start)\n */\n static clearCache(): void {\n if (existsSync(GERBIL_CACHE_DIR)) {\n rmSync(GERBIL_CACHE_DIR, { recursive: true, force: true });\n }\n }\n\n /**\n * Get the number of active Chrome pages\n */\n static getActivePageCount(): number {\n return activePagesCount;\n }\n\n /**\n * Get memory usage info for all active pages\n */\n static getMemoryInfo(): { activePagesCount: number; maxPages: number } {\n return {\n activePagesCount,\n maxPages: MAX_CONCURRENT_PAGES,\n };\n }\n\n /**\n * Gracefully close the shared browser (call on process exit)\n */\n static async closeSharedBrowser(): Promise<void> {\n if (globalBrowser) {\n try {\n await globalBrowser.close();\n } catch {\n // Browser may already be closed\n }\n globalBrowser = null;\n globalBrowserPromise = null;\n }\n\n if (globalServer) {\n globalServer.close();\n globalServer = null;\n globalServerPort = 0;\n }\n\n // Reset page counter\n activePagesCount = 0;\n\n // Clean up endpoint file\n try {\n unlinkSync(WS_ENDPOINT_FILE);\n } catch {}\n }\n}\n\n// Register cleanup on process exit to prevent mutex errors\nlet cleanupRegistered = false;\nfunction registerCleanup() {\n if (cleanupRegistered) {\n return;\n }\n cleanupRegistered = true;\n\n const cleanup = () => {\n // Synchronous close - can't await in exit handlers\n if (globalBrowser) {\n try {\n // Force kill the browser process\n const browserProcess = globalBrowser.process();\n if (browserProcess) {\n browserProcess.kill(\"SIGTERM\");\n }\n } catch {}\n globalBrowser = null;\n }\n if (globalServer) {\n globalServer.close();\n globalServer = null;\n }\n };\n\n process.on(\"exit\", cleanup);\n process.on(\"SIGINT\", () => {\n cleanup();\n process.exit(0);\n });\n process.on(\"SIGTERM\", () => {\n cleanup();\n process.exit(0);\n });\n}\n\n// Auto-register when module loads\nregisterCleanup();\n\nexport default ChromeGPUBackend;\n"],"mappings":";;;;;;;;;;;;;;AAeA,MAAM,mBAAmB,KAAK,SAAS,EAAE,WAAW,eAAe;AACnE,MAAM,mBAAmB,KAAK,kBAAkB,kBAAkB;AAClE,MAAM,qBAAqB,KAAK,SAAS,EAAE,WAAW,qBAAqB;;AAc3E,SAAgB,wBAA4C;AAC1D,KAAI;AACF,MAAI,CAAC,WAAW,mBAAmB,CACjC,QAAO,EAAE;AAGX,SADa,KAAK,MAAM,aAAa,oBAAoB,QAAQ,CAAC,CACtD,UAAU,EAAE;SAClB;AACN,SAAO,EAAE;;;;AAKb,eAAe,eAAe,SAA8C;AAC1E,KAAI;EAEF,MAAM,UAAU,MAAM,MAAM,qCAAqC,QAAQ,iBAAiB;AAC1F,MAAI,QAAQ,IAAI;GACd,MAAMA,QAA2C,MAAM,QAAQ,MAAM;GAErE,MAAM,QAAQ,MAAM,MAAM,MAAM,EAAE,KAAK,SAAS,QAAQ,IAAI,EAAE,KAAK,SAAS,QAAQ,CAAC;GACrF,MAAM,KAAK,MAAM,MACd,MAAM,EAAE,KAAK,SAAS,KAAK,IAAI,CAAC,EAAE,KAAK,SAAS,MAAM,IAAI,EAAE,KAAK,SAAS,QAAQ,CACpF;GACD,MAAM,OAAO,MAAM,MAAM,MAAM,EAAE,KAAK,SAAS,OAAO,IAAI,EAAE,KAAK,SAAS,QAAQ,CAAC;GACnF,MAAM,UAAU,MAAM,MAAM,MAAM,EAAE,KAAK,SAAS,QAAQ,CAAC;GAC3D,MAAM,WAAW,SAAS,MAAM,QAAQ;AACxC,OAAI,UAAU,KACZ,QAAO,SAAS;;EAKpB,MAAM,MAAM,MAAM,MAAM,qCAAqC,UAAU;AACvE,MAAI,IAAI,GAEN,SADa,MAAM,IAAI,MAAM,EACjB;SAER;;;AAOV,SAAgB,iBAAiB,SAAiB,WAA0B;AAC1E,KAAI;EACF,MAAM,MAAM,KAAK,SAAS,EAAE,UAAU;AACtC,MAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;EAGrC,MAAM,SAAS,uBAAuB;EACtC,MAAM,WAAW,OAAO,MAAM,MAAM,EAAE,YAAY,QAAQ;EAC1D,MAAM,uBAAM,IAAI,MAAM,EAAC,aAAa;AAEpC,MAAI,UAAU;AACZ,YAAS,WAAW;AACpB,OAAI,UACF,UAAS,YAAY;QAGvB,QAAO,KAAK;GACV;GACA,cAAc;GACd,UAAU;GACV;GACD,CAAC;AAGJ,gBAAc,oBAAoB,KAAK,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;AAGtE,MAAI,CAAC,UACH,gBAAe,QAAQ,CACpB,MAAM,SAAS;AACd,OAAI,MAAM;IACR,MAAM,gBAAgB,uBAAuB;IAC7C,MAAM,QAAQ,cAAc,MAAM,MAAM,EAAE,YAAY,QAAQ;AAC9D,QAAI,OAAO;AACT,WAAM,YAAY;AAClB,mBAAc,oBAAoB,KAAK,UAAU,EAAE,QAAQ,eAAe,EAAE,MAAM,EAAE,CAAC;;;IAGzF,CACD,YAAY,GAAG;SAEd;;;AAgBV,eAAsB,0BAAyC;AAC7D,KAAI;EACF,MAAM,SAAS,uBAAuB;EACtC,MAAM,YAAY,OAAO,QAAQ,MAAM,CAAC,EAAE,UAAU;AACpD,MAAI,UAAU,WAAW,EACvB;EAIF,MAAM,YAAY;AAClB,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK,WAAW;GACpD,MAAM,QAAQ,UAAU,MAAM,GAAG,IAAI,UAAU;AAC/C,SAAM,QAAQ,IACZ,MAAM,IAAI,OAAO,UAAU;IACzB,MAAM,OAAO,MAAM,eAAe,MAAM,QAAQ;AAChD,QAAI,KACF,OAAM,YAAY;KAEpB,CACH;;AAIH,gBAAc,oBAAoB,KAAK,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;SAChE;;AAQV,MAAM,oBAAoB;AAG1B,IAAIC,gBAAgC;AACpC,IAAIC,uBAAgD;AACpD,IAAIC,eAA8B;AAClC,IAAI,mBAAmB;AAGvB,IAAI,mBAAmB;AACvB,MAAM,uBAAuB;AA+B7B,MAAM,eAAe;CACnB,QAAQ;EACN;EACA;EACA;EACA;EACA;EACD;CACD,OAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACD;CACD,OAAO;EACL;EACA;EACA,GAAG,QAAQ,IAAI,aAAa;EAC5B;EACA;EACD;CACF;AAED,SAAS,aAAqB;AAE5B,KAAI,QAAQ,IAAI,YACd,QAAO,QAAQ,IAAI;CAGrB,MAAM,WAAW,QAAQ;CACzB,MAAM,QAAQ,aAAa,aAAa,EAAE;AAE1C,MAAK,MAAM,KAAK,MACd,KAAI;AACF,MAAI,aAAa,SAAS;AAExB,YAAS,SAAS,KAAK,EAAE,OAAO,UAAU,CAAC;AAC3C,UAAO;;AAGT,MAAI,WAAW,EAAE,CACf,QAAO;SAEH;AAGV,OAAM,IAAI,MAAM,4EAA4E;;AAO9F,SAAS,eAAe,aAAqB,gBAAkC;CAC7E,MAAM,QAAQ,CAAC,gBAAgB,mBAAmB,cAAc;AAGhE,KAAI,QAAQ,aAAa,QAEvB,OAAM,KACJ,0BACA,4BACA,sBACA,2BACD;UACQ,QAAQ,aAAa,UAAU,OAMxC,OAAM,KAAK,yBAAyB;AAGtC,QAAO;;AAOT,SAAS,kBAAkB,WAA2B;AACpD,QAAO;;;;;;;;;;;;;;;;0BAgBiB,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqMpC,IAAa,mBAAb,MAAa,iBAAiB;CAC5B,AAAQ,UAA0B;CAClC,AAAQ,OAAoB;CAC5B,AAAQ,MAAyB;CACjC,AAAQ,aAAa;CACrB,AAAQ,cAAsB;CAC9B,AAAiB;CACjB,AAAQ,UAAU;CAClB,AAAiB,kCAAoD,IAAI,KAAK;CAC9E,AAAQ,iBAA8C,EAAE;CAExD,AAAO,SAAwB;CAE/B,AAAQ,YAAY,SAAiB;AACnC,OAAK,UAAU;;;;;CAMjB,aAAa,OAAO,UAAgC,EAAE,EAA6B;EAEjF,MAAM,UAAU,IAAI,iBADJ,QAAQ,WAAW,iCACU;AAC7C,QAAM,QAAQ,OAAO,QAAQ;AAC7B,SAAO;;;;;;CAOT,MAAc,mBACZ,YACA,SACkB;AAElB,MAAI,eAAe,WAAW;AAC5B,WAAQ,aAAa,EAAE,QAAQ,8BAA8B,CAAC;AAC9D,UAAO;;AAIT,MAAI,sBAAsB;AACxB,WAAQ,aAAa,EAAE,QAAQ,iCAAiC,CAAC;AACjE,UAAO;;AAIT,MAAI,WAAW,iBAAiB,CAC9B,KAAI;GACF,MAAM,aAAa,aAAa,kBAAkB,QAAQ,CAAC,MAAM;AACjE,WAAQ,aAAa,EAAE,QAAQ,oCAAoC,CAAC;AACpE,mBAAgB,MAAM,UAAU,QAAQ,EACtC,mBAAmB,YACpB,CAAC;AACF,UAAO;UACD;AAEN,OAAI;AACF,eAAW,iBAAiB;WACtB;;AAKZ,yBAAuB,KAAK,cAAc,YAAY,QAAQ;AAC9D,MAAI;AACF,mBAAgB,MAAM;AACtB,UAAO;YACC;AACR,0BAAuB;;;;;;CAO3B,MAAc,cACZ,YACA,UACkB;EAClB,MAAM,gBAAgB,OAAO,KAAK,MAAM,KAAK,QAAQ,GAAG,IAAK;EAG7D,MAAM,WAAW,KAAK,KAAK,aAAa,gBAAgB;AACxD,MAAI,WAAW,SAAS,CACtB,KAAI;AACF,cAAW,SAAS;AACpB,SAAM,IAAI,SAAS,MAAM,WAAW,GAAG,IAAI,CAAC;UACtC;EAKV,MAAM,UAAU,MAAM,UAAU,OAAO;GACrC,gBAAgB;GAChB,UAAU;GACV,MAAM;IACJ,GAAG,eAAe,KAAK,aAAa,cAAc;IAClD;IACA;IACA;IACA;IACA;IACA;IACD;GACD,cAAc;GACd,eAAe;GACf,cAAc;GACf,CAAC;AAGF,gBAAc,kBAAkB,QAAQ,YAAY,CAAC;AAGrD,UAAQ,GAAG,sBAAsB;AAC/B,mBAAgB;AAChB,OAAI;AACF,eAAW,iBAAiB;WACtB;IACR;AAEF,SAAO;;;;;CAMT,MAAc,OAAO,SAA8C;AAEjE,MAAI,oBAAoB,qBACtB,OAAM,IAAI,MACR,6BAA6B,qBAAqB,yFAE3B,mBACxB;EAGH,MAAM,aAAa,QAAQ,cAAc,YAAY;AAGrD,OAAK,cAAc;AACnB,MAAI,CAAC,WAAW,KAAK,YAAY,CAC/B,WAAU,KAAK,aAAa,EAAE,WAAW,MAAM,CAAC;EAKlD,MAAM,OAAO,kBAAkB,KAAK,QAAQ;AAC5C,QAAM,KAAK,YAAY,KAAK;AAE5B,UAAQ,aAAa,EAAE,QAAQ,sBAAsB,CAAC;AAGtD,OAAK,UAAU,MAAM,KAAK,mBAAmB,YAAY,QAAQ;AAGjE,OAAK,OAAO,MAAM,KAAK,QAAQ,SAAS;AACxC,OAAK,MAAM,MAAM,KAAK,KAAK,kBAAkB;AAG7C;AACA,UAAQ,aAAa,EACnB,QAAQ,iBAAiB,iBAAiB,GAAG,wBAC9C,CAAC;AAGF,OAAK,QAAQ,GAAG,sBAAsB;AACpC,WAAQ,MAAM,6CAA6C;AAC3D,QAAK,UAAU;AACf,QAAK,UAAU;AACf,QAAK,OAAO;AACZ,QAAK,MAAM;AAEX,QAAK,mCAAmB,IAAI,MAAM,sBAAsB,CAAC;IACzD;AAGF,QAAM,KAAK,IAAI,KAAK,iBAAiB;AACrC,QAAM,KAAK,IAAI,KAAK,kCAAkC,EAAE,UAAU,IAAI,CAAC;AAGvE,OAAK,IAAI,GAAG,6BAA6B,UAAU;GACjD,MAAM,OAAO,MAAM,KAAK,KAAK,MAAW,EAAE,SAAS,EAAE,eAAe,GAAG,CAAC,KAAK,IAAI;AAEjF,OAAI,MAAM,SAAS,SAAS,MAAM,KAAK,IAAI,MACzC,KAAI;IACF,MAAM,OAAO,KAAK,MAAM,MAAM,KAAK,GAAG,MAAM;AAC5C,SAAK,cAAc,MAAM,QAAQ;WAC3B;AAEN,QACE,KAAK,SAAS,OACd,CAAC,KAAK,SAAS,eAAe,IAC9B,CAAC,KAAK,SAAS,kBAAkB,EACjC;;YAIK,MAAM,SAAS,WAAW,MAAM,SAAS,WAElD;QACE,EACE,KAAK,SAAS,cAAc,IAC5B,KAAK,SAAS,iBAAiB,IAC/B,KAAK,SAAS,eAAe,IAC7B,KAAK,SAAS,kBAAkB,KAElC,KAAK,SAAS,IAEd,SAAQ,MAAM,WAAW,MAAM,KAAK,IAAI,KAAK;;IAGjD;AAGF,OAAK,IAAI,GAAG,4BAA4B,UAAU;GAChD,MAAM,UACJ,MAAM,kBAAkB,QAAQ,MAAM,kBAAkB,WAAW,eAAe;AAEpF,OACE,QAAQ,SAAS,eAAe,IAChC,QAAQ,SAAS,kBAAkB,IACnC,QAAQ,SAAS,IAEjB;AAEF,WAAQ,MAAM,sBAAsB,QAAQ;IAC5C;AAGF,QAAM,KAAK,KAAK,KAAK,oBAAoB,KAAK,WAAW,IAAI;GAC3D,WAAW;GACX,SAAS;GACV,CAAC;AAGF,QAAM,KAAK,eAAe,SAAS,IAAQ;AAE3C,OAAK,UAAU;AACf,UAAQ,aAAa,EAAE,QAAQ,mBAAmB,CAAC;AAGnD,mBAAiB,KAAK,QAAQ;;;;;CAMhC,AAAQ,cAAc,MAAW,SAAqC;EACpE,MAAM,EAAE,MAAM,GAAG,SAAS;EAG1B,MAAM,UAAU,KAAK,gBAAgB,IAAI,KAAK;AAC9C,MAAI,QACF,SAAQ,KAAK;AAIf,MAAI,SAAS,WACX,SAAQ,aAAa,KAAK;WACjB,SAAS,QAClB,SAAQ,UAAU,KAAK;;;;;CAO3B,AAAQ,eAAe,MAAc,UAAU,KAAsB;AACnE,SAAO,IAAI,SAAS,SAAS,WAAW;AAEtC,QAAK,eAAe,KAAK,OAAO;GAEhC,MAAM,gBAAgB;AACpB,iBAAa,MAAM;AACnB,SAAK,gBAAgB,OAAO,KAAK;IACjC,MAAM,MAAM,KAAK,eAAe,QAAQ,OAAO;AAC/C,QAAI,OAAO,EACT,MAAK,eAAe,OAAO,KAAK,EAAE;;GAItC,MAAM,QAAQ,iBAAiB;AAC7B,aAAS;AACT,2BAAO,IAAI,MAAM,uBAAuB,KAAK,UAAU,CAAC;MACvD,QAAQ;AAEX,QAAK,gBAAgB,IAAI,OAAO,SAAS;AACvC,aAAS;AACT,YAAQ,KAAK;KACb;IACF;;;;;CAMJ,UAAmB;AACjB,SAAO,KAAK,WAAW,KAAK,YAAY,QAAQ,KAAK,SAAS;;;;;CAMhE,YAKE;EAEA,IAAIC,MAAqB;EACzB,MAAM,iBAAiB,KAAK,SAAS,WAAW,IAAI,eAAe,WAAW;AAC9E,MAAI,gBAAgB,IAClB,OAAM,eAAe;AAEvB,SAAO;GACL;GACA,MAAM,KAAK,cAAc;GACzB,SAAS,KAAK;GACd,WAAW,KAAK,0BAAU,IAAI,MAAM,GAAG;GACxC;;;;;;CAOH,MAAM,iBAGI;AACR,MAAI,EAAE,KAAK,OAAO,KAAK,SACrB,QAAO;AAGT,MAAI;AAEF,SAAM,KAAK,IAAI,KAAK,qBAAqB;GAEzC,MAAM,EAAE,YAAa,MAAM,KAAK,IAAI,KAAK,yBAAyB;AAOlE,UAAO;IAAE,YAHU,QAAQ,MAAM,MAAM,EAAE,SAAS,iBAAiB,EAAE,SAAS;IAGzD,aAFD,QAAQ,MAAM,MAAM,EAAE,SAAS,kBAAkB,EAAE,SAAS;IAE9C;UAC5B;AACN,UAAO;;;;;;;;CASX,MAAM,sBAAsB,cAAc,GAAqB;EAC7D,MAAM,MAAM,MAAM,KAAK,gBAAgB;AACvC,MAAI,CAAC,IACH,QAAO;EAGT,MAAM,SAAS,IAAI,aAAa,QAAQ;AAExC,MAAI,SAAS,aAAa;AACxB,WAAQ,KACN,+BAA+B,OAAO,QACpC,EACD,CAAC,OAAO,YAAY,2BACtB;AACD,SAAM,KAAK,OAAO;AAClB,UAAO;;AAGT,SAAO;;;;;CAMT,MAAM,iBAII;EACR,MAAM,MAAM,MAAM,KAAK,gBAAgB;AACvC,MAAI,CAAC,IACH,QAAO;AAOT,SAAO;GAAE,QAJM,IAAI,aAAa,QAAQ;GAIvB,SAHD,IAAI,cAAc,QAAQ;GAGhB,aAFL,IAAI,aAAa,IAAI,cAAe;GAElB;;;;;CAMzC,MAAM,SAAS,QAAgB,UAA2B,EAAE,EAAmB;AAC7E,MAAI,CAAC,KAAK,SAAS,CACjB,OAAM,IAAI,MAAM,sBAAsB;EAIxC,MAAM,WAAW,CACf;GAAE,MAAM;GAAU,SAFL,QAAQ,UAAU;GAEI,EACnC;GAAE,MAAM;GAAQ,SAAS;GAAQ,CAClC;EAED,MAAM,aAAa;GACjB,WAAW,QAAQ,aAAa;GAChC,aAAa,QAAQ,eAAe;GACpC,MAAM,QAAQ,QAAQ;GACtB,MAAM,QAAQ,QAAQ;GACtB,UAAU,QAAQ,YAAY;GAC/B;AAGD,MAAI,QAAQ,QACV,MAAK,gBAAgB,IAAI,SAAS,QAAQ,QAAQ;AAGpD,MAAI;GAEF,MAAM,gBAAgB,KAAK,KAAM,UAC9B,MAAM,SAAU,OAAe,eAAe,MAAM,KAAK,EAC1D,UACA,WACD;GAGD,MAAM,eAAe,MAAM,KAAK,eAAe,YAAY,IAAQ;AAGnE,QAAK,gBAAgB,OAAO,QAAQ;AAEpC,SAAM;AAEN,UAAO,aAAa,QAAQ;WACrBC,KAAU;AAEjB,OAAI,CAAC,KAAK,SAAS,CACjB,OAAM,IAAI,MAAM,sBAAsB;AAExC,SAAM;;;;;;CAOV,MAAM,YAA2B;AAC/B,MAAI,KAAK,KACP,OAAM,KAAK,KAAK,SAAS,2BAA2B;;;;;CAOxD,MAAM,QAAuB;AAC3B,MAAI,KAAK,KACP,OAAM,KAAK,KAAK,SAAS,uBAAuB;;;;;CAOpD,QAAiB;AACf,SAAO,KAAK;;;;;;CAOd,MAAc,YAAY,MAA6B;AAErD,MAAI,gBAAgB,kBAAkB;AACpC,QAAK,SAAS;AACd,QAAK,aAAa;AAClB;;AAGF,SAAO,IAAI,SAAS,SAAS,WAAW;GACtC,MAAM,SAAS,cAAc,MAAM,QAAQ;AACzC,QAAI,UAAU,KAAK,EAAE,gBAAgB,aAAa,CAAC;AACnD,QAAI,IAAI,KAAK;KACb;AAEF,UAAO,GAAG,UAAU,QAA+B;AACjD,QAAI,IAAI,SAAS,cAAc;AAG7B,UAAK,aAAa;AAClB,wBAAmB;AACnB,cAAS;UAET,QAAO,IAAI;KAEb;AAGF,UAAO,OAAO,mBAAmB,mBAAmB;AAClD,SAAK,SAAS;AACd,SAAK,aAAa;AAClB,mBAAe;AACf,uBAAmB;AACnB,aAAS;KACT;IACF;;;;;;CAOJ,MAAM,UAAyB;AAE7B,OAAK,UAAU;AAGf,OAAK,iBAAiB,EAAE;AACxB,OAAK,gBAAgB,OAAO;AAG5B,MAAI,KAAK,MAAM;AACb,OAAI;AACF,UAAM,KAAK,KAAK,OAAO;AAEvB,uBAAmB,KAAK,IAAI,GAAG,mBAAmB,EAAE;WAC9C;AAGR,QAAK,OAAO;;AAId,OAAK,MAAM;AAIX,OAAK,UAAU;AACf,OAAK,SAAS;;;;;CAMhB,AAAQ,mBAAmB,OAAoB;AAC7C,OAAK,MAAM,UAAU,KAAK,eACxB,QAAO,MAAM;AAEf,OAAK,iBAAiB,EAAE;AACxB,OAAK,gBAAgB,OAAO;;;;;CAM9B,OAAO,aAAmB;AACxB,MAAI,WAAW,iBAAiB,CAC9B,QAAO,kBAAkB;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;;;;;CAO9D,OAAO,qBAA6B;AAClC,SAAO;;;;;CAMT,OAAO,gBAAgE;AACrE,SAAO;GACL;GACA,UAAU;GACX;;;;;CAMH,aAAa,qBAAoC;AAC/C,MAAI,eAAe;AACjB,OAAI;AACF,UAAM,cAAc,OAAO;WACrB;AAGR,mBAAgB;AAChB,0BAAuB;;AAGzB,MAAI,cAAc;AAChB,gBAAa,OAAO;AACpB,kBAAe;AACf,sBAAmB;;AAIrB,qBAAmB;AAGnB,MAAI;AACF,cAAW,iBAAiB;UACtB;;;AAKZ,IAAI,oBAAoB;AACxB,SAAS,kBAAkB;AACzB,KAAI,kBACF;AAEF,qBAAoB;CAEpB,MAAM,gBAAgB;AAEpB,MAAI,eAAe;AACjB,OAAI;IAEF,MAAM,iBAAiB,cAAc,SAAS;AAC9C,QAAI,eACF,gBAAe,KAAK,UAAU;WAE1B;AACR,mBAAgB;;AAElB,MAAI,cAAc;AAChB,gBAAa,OAAO;AACpB,kBAAe;;;AAInB,SAAQ,GAAG,QAAQ,QAAQ;AAC3B,SAAQ,GAAG,gBAAgB;AACzB,WAAS;AACT,UAAQ,KAAK,EAAE;GACf;AACF,SAAQ,GAAG,iBAAiB;AAC1B,WAAS;AACT,UAAQ,KAAK,EAAE;GACf;;AAIJ,iBAAiB"}