@tryhamster/gerbil 1.0.0-rc.11 → 1.0.0-rc.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +52 -1
  2. package/dist/browser/index.d.ts +159 -1
  3. package/dist/browser/index.d.ts.map +1 -1
  4. package/dist/browser/index.js +473 -6
  5. package/dist/browser/index.js.map +1 -1
  6. package/dist/cli.mjs +7 -7
  7. package/dist/cli.mjs.map +1 -1
  8. package/dist/frameworks/express.d.mts +1 -1
  9. package/dist/frameworks/express.mjs +2 -1
  10. package/dist/frameworks/express.mjs.map +1 -1
  11. package/dist/frameworks/fastify.d.mts +1 -1
  12. package/dist/frameworks/fastify.mjs +2 -1
  13. package/dist/frameworks/fastify.mjs.map +1 -1
  14. package/dist/frameworks/hono.d.mts +1 -1
  15. package/dist/frameworks/hono.mjs +2 -1
  16. package/dist/frameworks/hono.mjs.map +1 -1
  17. package/dist/frameworks/next.d.mts +2 -2
  18. package/dist/frameworks/next.mjs +2 -1
  19. package/dist/frameworks/next.mjs.map +1 -1
  20. package/dist/frameworks/react.d.mts +1 -1
  21. package/dist/frameworks/trpc.d.mts +1 -1
  22. package/dist/frameworks/trpc.mjs +2 -1
  23. package/dist/frameworks/trpc.mjs.map +1 -1
  24. package/dist/{gerbil-DoDGHe6Z.mjs → gerbil-BZklpDhM.mjs} +289 -1
  25. package/dist/gerbil-BZklpDhM.mjs.map +1 -0
  26. package/dist/gerbil-CAMb_nrK.mjs +5 -0
  27. package/dist/{gerbil-qOTe1nl2.d.mts → gerbil-DJygY0sJ.d.mts} +120 -2
  28. package/dist/gerbil-DJygY0sJ.d.mts.map +1 -0
  29. package/dist/index.d.mts +3 -3
  30. package/dist/index.d.mts.map +1 -1
  31. package/dist/index.mjs +2 -2
  32. package/dist/index.mjs.map +1 -1
  33. package/dist/integrations/ai-sdk.d.mts +72 -3
  34. package/dist/integrations/ai-sdk.d.mts.map +1 -1
  35. package/dist/integrations/ai-sdk.mjs +106 -3
  36. package/dist/integrations/ai-sdk.mjs.map +1 -1
  37. package/dist/integrations/langchain.d.mts +1 -1
  38. package/dist/integrations/langchain.mjs +2 -1
  39. package/dist/integrations/langchain.mjs.map +1 -1
  40. package/dist/integrations/llamaindex.d.mts +1 -1
  41. package/dist/integrations/llamaindex.mjs +2 -1
  42. package/dist/integrations/llamaindex.mjs.map +1 -1
  43. package/dist/integrations/mcp.d.mts +2 -2
  44. package/dist/integrations/mcp.mjs +5 -4
  45. package/dist/{mcp-kzDDWIoS.mjs → mcp-ZCC5OR7B.mjs} +3 -3
  46. package/dist/{mcp-kzDDWIoS.mjs.map → mcp-ZCC5OR7B.mjs.map} +1 -1
  47. package/dist/{one-liner-DxnNs_JK.mjs → one-liner-mH5SKPvT.mjs} +2 -2
  48. package/dist/{one-liner-DxnNs_JK.mjs.map → one-liner-mH5SKPvT.mjs.map} +1 -1
  49. package/dist/{repl-DGUw4fCc.mjs → repl-CSM1IBP1.mjs} +3 -3
  50. package/dist/skills/index.d.mts +3 -3
  51. package/dist/skills/index.d.mts.map +1 -1
  52. package/dist/skills/index.mjs +4 -3
  53. package/dist/{skills-DulrOPeP.mjs → skills-CPB_9YfF.mjs} +2 -2
  54. package/dist/{skills-DulrOPeP.mjs.map → skills-CPB_9YfF.mjs.map} +1 -1
  55. package/dist/{types-CiTc7ez3.d.mts → types-evP8RShr.d.mts} +26 -2
  56. package/dist/types-evP8RShr.d.mts.map +1 -0
  57. package/docs/ai-sdk.md +56 -1
  58. package/docs/browser.md +103 -0
  59. package/docs/embeddings.md +311 -0
  60. package/package.json +1 -1
  61. package/dist/gerbil-DJGqq7BX.mjs +0 -4
  62. package/dist/gerbil-DoDGHe6Z.mjs.map +0 -1
  63. package/dist/gerbil-qOTe1nl2.d.mts.map +0 -1
  64. package/dist/types-CiTc7ez3.d.mts.map +0 -1
@@ -1,5 +1,5 @@
1
- import { b as STTModelConfig, g as ModelConfig, l as GerbilModelSettings, u as GerbilProviderSettings } from "../types-CiTc7ez3.mjs";
2
- import { LanguageModelV2, LanguageModelV2CallOptions, LanguageModelV2CallWarning, LanguageModelV2Content, LanguageModelV2FinishReason, LanguageModelV2StreamPart, LanguageModelV2Usage, SpeechModelV2, SpeechModelV2CallOptions, SpeechModelV2CallWarning, TranscriptionModelV2, TranscriptionModelV2CallOptions, TranscriptionModelV2CallWarning } from "@ai-sdk/provider";
1
+ import { g as ModelConfig, l as GerbilModelSettings, u as GerbilProviderSettings, x as STTModelConfig } from "../types-evP8RShr.mjs";
2
+ import { EmbeddingModelV2, LanguageModelV2, LanguageModelV2CallOptions, LanguageModelV2CallWarning, LanguageModelV2Content, LanguageModelV2FinishReason, LanguageModelV2StreamPart, LanguageModelV2Usage, SpeechModelV2, SpeechModelV2CallOptions, SpeechModelV2CallWarning, TranscriptionModelV2, TranscriptionModelV2CallOptions, TranscriptionModelV2CallWarning } from "@ai-sdk/provider";
3
3
 
4
4
  //#region src/integrations/ai-sdk.d.ts
5
5
 
@@ -103,11 +103,46 @@ declare class GerbilTranscriptionModel implements TranscriptionModelV2 {
103
103
  };
104
104
  }>;
105
105
  }
106
+ /** Settings for Gerbil embedding model */
107
+ interface GerbilEmbeddingSettings {
108
+ /** Normalize embeddings (default: true) */
109
+ normalize?: boolean;
110
+ }
111
+ /** Default embedding models */
112
+ declare const EMBEDDING_MODELS: {
113
+ id: string;
114
+ repo: string;
115
+ description: string;
116
+ dimensions: number;
117
+ }[];
118
+ declare class GerbilEmbeddingModel implements EmbeddingModelV2<string> {
119
+ readonly specificationVersion: "v2";
120
+ readonly provider = "gerbil";
121
+ readonly modelId: string;
122
+ readonly maxEmbeddingsPerCall: number;
123
+ readonly supportsParallelCalls = false;
124
+ private instance;
125
+ private readonly settings;
126
+ private loadPromise;
127
+ constructor(modelId: string, settings?: GerbilEmbeddingSettings);
128
+ private getRepo;
129
+ private ensureLoaded;
130
+ doEmbed(options: {
131
+ values: string[];
132
+ abortSignal?: AbortSignal;
133
+ }): Promise<{
134
+ embeddings: number[][];
135
+ usage?: {
136
+ tokens: number;
137
+ };
138
+ }>;
139
+ }
106
140
  type GerbilProvider = {
107
141
  (modelId: string, settings?: GerbilModelSettings): GerbilLanguageModel;
108
142
  languageModel(modelId: string, settings?: GerbilModelSettings): GerbilLanguageModel;
109
143
  speech(modelId?: string, settings?: GerbilSpeechSettings): GerbilSpeechModel;
110
144
  transcription(modelId?: string, settings?: GerbilTranscriptionSettings): GerbilTranscriptionModel;
145
+ embedding(modelId?: string, settings?: GerbilEmbeddingSettings): GerbilEmbeddingModel;
111
146
  listModels(): ModelConfig[];
112
147
  getModel(modelId: string): ModelConfig | undefined;
113
148
  listVoices(): Array<{
@@ -117,6 +152,22 @@ type GerbilProvider = {
117
152
  language: string;
118
153
  }>;
119
154
  listTranscriptionModels(): STTModelConfig[];
155
+ listEmbeddingModels(): typeof EMBEDDING_MODELS;
156
+ /**
157
+ * Preload a model (download without initializing)
158
+ *
159
+ * @param modelId - Model to preload
160
+ * @param options.keepLoaded - Keep model in memory for instant generateText() calls
161
+ */
162
+ preload(modelId: string, options?: {
163
+ onProgress?: (info: {
164
+ status: string;
165
+ progress?: number;
166
+ }) => void;
167
+ keepLoaded?: boolean;
168
+ }): Promise<void>;
169
+ /** Check if a model is cached */
170
+ isCached(modelId: string): Promise<boolean>;
120
171
  };
121
172
  /**
122
173
  * Create a Gerbil provider
@@ -189,8 +240,26 @@ declare function createGerbil(options?: GerbilProviderSettings): GerbilProvider;
189
240
  * console.log(transcript.text);
190
241
  * console.log(transcript.segments); // Timestamped segments
191
242
  * ```
243
+ *
244
+ * @example Embeddings
245
+ * ```ts
246
+ * import { embed, embedMany } from "ai";
247
+ * import { gerbil } from "gerbil/ai";
248
+ *
249
+ * // Single embedding
250
+ * const { embedding } = await embed({
251
+ * model: gerbil.embedding(), // all-MiniLM-L6-v2 by default
252
+ * value: "Hello world",
253
+ * });
254
+ *
255
+ * // Multiple embeddings
256
+ * const { embeddings } = await embedMany({
257
+ * model: gerbil.embedding(),
258
+ * values: ["Hello", "World", "How are you?"],
259
+ * });
260
+ * ```
192
261
  */
193
262
  declare const gerbil: GerbilProvider;
194
263
  //#endregion
195
- export { GerbilProvider, GerbilSpeechSettings, GerbilTranscriptionSettings, createGerbil, gerbil as default, gerbil };
264
+ export { EMBEDDING_MODELS, GerbilEmbeddingSettings, GerbilProvider, GerbilSpeechSettings, GerbilTranscriptionSettings, createGerbil, gerbil as default, gerbil };
196
265
  //# sourceMappingURL=ai-sdk.d.mts.map
@@ -1 +1 @@
1
- {"version":3,"file":"ai-sdk.d.mts","names":[],"sources":["../../src/integrations/ai-sdk.ts"],"sourcesContent":[],"mappings":";;;;;cAwDM,mBAAA,YAA+B,eAsJe,CAAA;EAtJf,SAAA,oBAAA,EAAA,IAAA;EAAe,SAAA,QAAA,GAAA,QAAA;EAsOnC,SAAA,OAAA,EAAA,MAAoB;EAO/B,SAAA,aAAkB,EAvOE,MAuOF,CAAA,MAAA,EAvOiB,MAuOjB,EAAA,CAAA;EASiB,QAAA,QAAA;EAoBb,iBAAA,QAAA;EACjB,iBAAA,gBAAA;EACG,QAAA,WAAA;EAEa,WAAA,CAAA,OAAA,EAAA,MAAA,EAAA,QAAA,EA/Pb,mBA+Pa,EAAA,gBAAA,EA9PL,sBA8PK;EAJ4B,QAAA,YAAA;EA7BpB,QAAA,aAAA;EAAa,QAAA,eAAA;EAoI/B,UAAA,CAAA,OAAA,EA3QW,0BA2QgB,CAAA,EA3QU,OA2QV,CAAA;IAKtC,OAAA,wBAAyB,EAAA;IASU,YAAA,6BAAA;IAoBb,KAAA,sBAAA;IAEd,OAAA,EAAA;MAOA,IAAA,EAAA;QAGG,KAAA,EAAA,MAAA;QAZ6C,MAAA,EAAA,MAAA;MA7BpB,CAAA;IAAoB,CAAA;IAiHlD,QAAA,4BAAc,EAAA;EACK,CAAA,CAAA;EAAsB,QAAA,CAAA,OAAA,EAlV3B,0BAkV2B,CAAA,EAlVD,OAkVC,CAAA;IACT,MAAA,gBAAA,0BAAA,CAAA;IAAsB,OAAA,EAAA;MAC5B,IAAA,EAAA;QAAuB,KAAA,EAAA,MAAA;QAChB,MAAA,EAAA,MAAA;MAA8B,CAAA;IAC3D,CAAA;EACa,CAAA,CAAA;;;AAEc,UAzQ1B,oBAAA,CAyQ0B;EA8B3B;EA4EH,KAAA,CAAA,EAAA,MAAuB;;;;cA5W9B,iBAAA,YAA6B;;;;;;;0CASM;;sBAoBb,2BAA2B;WAC5C;cACG;;;;;iBAEa;;;;;;;;;;UAmGV,2BAAA;;;;cAKX,wBAAA,YAAoC;;;;;;;0CASD;;sBAoBb,kCAAkC;;cAEhD;;;;;;;cAOA;;;;;iBAGG;;;;;KAwEL,cAAA;+BACmB,sBAAsB;4CACT,sBAAsB;sCAC5B,uBAAuB;6CAChB,8BAA8B;gBAC3D;6BACa;gBACb;;;;;;6BACa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBA8Bb,YAAA,WAAsB,yBAA8B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cA4EvD,QAAM"}
1
+ {"version":3,"file":"ai-sdk.d.mts","names":[],"sources":["../../src/integrations/ai-sdk.ts"],"sourcesContent":[],"mappings":";;;;;cAyDM,mBAAA,YAA+B,eAsJe,CAAA;EAtJf,SAAA,oBAAA,EAAA,IAAA;EAAe,SAAA,QAAA,GAAA,QAAA;EAsOnC,SAAA,OAAA,EAAA,MAAoB;EAO/B,SAAA,aAAkB,EAvOE,MAuOF,CAAA,MAAA,EAvOiB,MAuOjB,EAAA,CAAA;EASiB,QAAA,QAAA;EAoBb,iBAAA,QAAA;EACjB,iBAAA,gBAAA;EACG,QAAA,WAAA;EAEa,WAAA,CAAA,OAAA,EAAA,MAAA,EAAA,QAAA,EA/Pb,mBA+Pa,EAAA,gBAAA,EA9PL,sBA8PK;EAJ4B,QAAA,YAAA;EA7BpB,QAAA,aAAA;EAAa,QAAA,eAAA;EAoI/B,UAAA,CAAA,OAAA,EA3QW,0BA2QgB,CAAA,EA3QU,OA2QV,CAAA;IAKtC,OAAA,wBAAyB,EAAA;IASU,YAAA,6BAAA;IAoBb,KAAA,sBAAA;IAEd,OAAA,EAAA;MAOA,IAAA,EAAA;QAGG,KAAA,EAAA,MAAA;QAZ6C,MAAA,EAAA,MAAA;MA7BpB,CAAA;IAAoB,CAAA;IAkH7C,QAAA,4BAAuB,EAAA;EAM3B,CAAA,CAAA;EAqBP,QAAA,CAAA,OAAA,EA7WoB,0BA6WC,CAAA,EA7WyB,OA6WzB,CAAA;IAWc,MAAA,gBAAA,0BAAA,CAAA;IAwBkB,OAAA,EAAA;MAAgB,IAAA,EAAA;QAnCrC,KAAA,EAAA,MAAA;QAAgB,MAAA,EAAA,MAAA;MAsE1C,CAAA;IACmB,CAAA;EAAsB,CAAA,CAAA;;;AAEf,UAtWrB,oBAAA,CAsWqB;EAAuB;EAChB,KAAA,CAAA,EAAA,MAAA;EAA8B;EAClC,KAAA,CAAA,EAAA,MAAA;;cAjWnC,iBAAA,YAA6B,aAkWnB,CAAA;EACa,SAAA,oBAAA,EAAA,IAAA;EACb,SAAA,QAAA,GAAA,QAAA;EACa,SAAA,OAAA,EAAA,MAAA;EACG,QAAA,QAAA;EAa3B,iBAAA,QAAA;EAEwB,QAAA,WAAA;EAAO,WAAA,CAAA,OAAA,EAAA,MAAA,EAAA,QAAA,CAAA,EA5WK,oBA4WL;EA8BpB,QAAA,YAAY;EAqIf,UAAuB,CAAA,OAAA,EA3fR,wBA2fQ,CAAA,EA3fmB,OA2fnB,CAAA;WA1fzB;cACG;;;;;iBAEa;;;;;;;;;;UAmGV,2BAAA;;;;cAKX,wBAAA,YAAoC;;;;;;;0CASD;;sBAoBb,kCAAkC;;cAEhD;;;;;;;cAOA;;;;;iBAGG;;;;;;UAyEA,uBAAA;;;;;cAMJ;;;;;;cAqBP,oBAAA,YAAgC;;;;;;;;;0CAWG;;;;;kBAwBkB;MAAgB;;;;;;;KAmC/D,cAAA;+BACmB,sBAAsB;4CACT,sBAAsB;sCAC5B,uBAAuB;6CAChB,8BAA8B;yCAClC,0BAA0B;gBACnD;6BACa;gBACb;;;;;;6BACa;gCACG;;;;;;;;;;;;;MAa3B;;6BAEwB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBA8Bb,YAAA,WAAsB,yBAA8B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAqIvD,QAAM"}
@@ -1,4 +1,5 @@
1
- import { n as BUILTIN_MODELS, t as Gerbil } from "../gerbil-DoDGHe6Z.mjs";
1
+ import { n as BUILTIN_MODELS, t as Gerbil } from "../gerbil-BZklpDhM.mjs";
2
+ import "../chrome-backend-CORwaIyC.mjs";
2
3
  import "../utils-CZBZ8dgR.mjs";
3
4
  import { t as WHISPER_MODELS } from "../stt-CG_7KB_0.mjs";
4
5
  import { t as KOKORO_VOICES } from "../tts-CyHhcLtN.mjs";
@@ -328,6 +329,71 @@ var GerbilTranscriptionModel = class {
328
329
  };
329
330
  }
330
331
  };
332
+ /** Default embedding models */
333
+ const EMBEDDING_MODELS = [
334
+ {
335
+ id: "all-MiniLM-L6-v2",
336
+ repo: "Xenova/all-MiniLM-L6-v2",
337
+ description: "MiniLM L6 v2 - Fast, 384 dimensions",
338
+ dimensions: 384
339
+ },
340
+ {
341
+ id: "bge-small-en-v1.5",
342
+ repo: "Xenova/bge-small-en-v1.5",
343
+ description: "BGE Small EN v1.5 - High quality, 384 dimensions",
344
+ dimensions: 384
345
+ },
346
+ {
347
+ id: "gte-small",
348
+ repo: "Xenova/gte-small",
349
+ description: "GTE Small - General text embeddings, 384 dimensions",
350
+ dimensions: 384
351
+ }
352
+ ];
353
+ var GerbilEmbeddingModel = class {
354
+ specificationVersion = "v2";
355
+ provider = "gerbil";
356
+ modelId;
357
+ maxEmbeddingsPerCall = Infinity;
358
+ supportsParallelCalls = false;
359
+ instance = null;
360
+ settings;
361
+ loadPromise = null;
362
+ constructor(modelId, settings = {}) {
363
+ this.modelId = modelId;
364
+ this.settings = settings;
365
+ }
366
+ getRepo() {
367
+ return EMBEDDING_MODELS.find((m) => m.id === this.modelId)?.repo || this.modelId;
368
+ }
369
+ async ensureLoaded() {
370
+ if (this.instance) return this.instance;
371
+ if (this.loadPromise) {
372
+ await this.loadPromise;
373
+ return this.instance;
374
+ }
375
+ this.instance = new Gerbil();
376
+ return this.instance;
377
+ }
378
+ async doEmbed(options) {
379
+ const g = await this.ensureLoaded();
380
+ const embeddings = [];
381
+ let totalTokens = 0;
382
+ for (const value of options.values) {
383
+ if (options.abortSignal?.aborted) throw new Error("Embedding aborted");
384
+ const result = await g.embed(value, {
385
+ model: this.getRepo(),
386
+ normalize: this.settings.normalize
387
+ });
388
+ embeddings.push(result.vector);
389
+ totalTokens += Math.ceil(value.length / 4);
390
+ }
391
+ return {
392
+ embeddings,
393
+ usage: { tokens: totalTokens }
394
+ };
395
+ }
396
+ };
331
397
  /**
332
398
  * Create a Gerbil provider
333
399
  *
@@ -356,13 +422,20 @@ var GerbilTranscriptionModel = class {
356
422
  * ```
357
423
  */
358
424
  function createGerbil(options = {}) {
359
- const createModel = (modelId, settings = {}) => new GerbilLanguageModel(modelId, settings, options);
425
+ const modelCache = /* @__PURE__ */ new Map();
426
+ const createModel = (modelId, settings = {}) => {
427
+ const cached = modelCache.get(modelId);
428
+ if (cached) return cached;
429
+ return new GerbilLanguageModel(modelId, settings, options);
430
+ };
360
431
  const createSpeechModel = (modelId = "kokoro-82m", settings = {}) => new GerbilSpeechModel(modelId, settings);
361
432
  const createTranscriptionModel = (modelId = "whisper-tiny.en", settings = {}) => new GerbilTranscriptionModel(modelId, settings);
433
+ const createEmbeddingModel = (modelId = "all-MiniLM-L6-v2", settings = {}) => new GerbilEmbeddingModel(modelId, settings);
362
434
  const provider = ((modelId, settings) => createModel(modelId, settings ?? {}));
363
435
  provider.languageModel = createModel;
364
436
  provider.speech = createSpeechModel;
365
437
  provider.transcription = createTranscriptionModel;
438
+ provider.embedding = createEmbeddingModel;
366
439
  provider.listModels = () => Object.values(BUILTIN_MODELS);
367
440
  provider.getModel = (id) => BUILTIN_MODELS[id];
368
441
  provider.listVoices = () => KOKORO_VOICES.map((v) => ({
@@ -372,6 +445,18 @@ function createGerbil(options = {}) {
372
445
  language: v.language
373
446
  }));
374
447
  provider.listTranscriptionModels = () => WHISPER_MODELS;
448
+ provider.listEmbeddingModels = () => EMBEDDING_MODELS;
449
+ provider.preload = async (modelId, opts) => {
450
+ if (opts?.keepLoaded ?? false) {
451
+ const model = new GerbilLanguageModel(modelId, {}, options);
452
+ await model.ensureLoaded();
453
+ modelCache.set(modelId, model);
454
+ } else await new Gerbil().preloadModel(modelId, opts);
455
+ };
456
+ provider.isCached = async (modelId) => {
457
+ if (modelCache.has(modelId)) return true;
458
+ return new Gerbil().isModelCached(modelId);
459
+ };
375
460
  return provider;
376
461
  }
377
462
  /**
@@ -417,10 +502,28 @@ function createGerbil(options = {}) {
417
502
  * console.log(transcript.text);
418
503
  * console.log(transcript.segments); // Timestamped segments
419
504
  * ```
505
+ *
506
+ * @example Embeddings
507
+ * ```ts
508
+ * import { embed, embedMany } from "ai";
509
+ * import { gerbil } from "gerbil/ai";
510
+ *
511
+ * // Single embedding
512
+ * const { embedding } = await embed({
513
+ * model: gerbil.embedding(), // all-MiniLM-L6-v2 by default
514
+ * value: "Hello world",
515
+ * });
516
+ *
517
+ * // Multiple embeddings
518
+ * const { embeddings } = await embedMany({
519
+ * model: gerbil.embedding(),
520
+ * values: ["Hello", "World", "How are you?"],
521
+ * });
522
+ * ```
420
523
  */
421
524
  const gerbil = createGerbil();
422
525
  var ai_sdk_default = gerbil;
423
526
 
424
527
  //#endregion
425
- export { createGerbil, ai_sdk_default as default, gerbil };
528
+ export { EMBEDDING_MODELS, createGerbil, ai_sdk_default as default, gerbil };
426
529
  //# sourceMappingURL=ai-sdk.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"ai-sdk.mjs","names":["system: string | undefined","images: ImageInput[]","warnings: LanguageModelV2CallWarning[]","content: LanguageModelV2Content[]","usage: LanguageModelV2Usage","warnings: SpeechModelV2CallWarning[]","warnings: TranscriptionModelV2CallWarning[]","audioData: Uint8Array"],"sources":["../../src/integrations/ai-sdk.ts"],"sourcesContent":["/**\n * Gerbil AI SDK Provider (V2 Specification)\n *\n * Compatible with AI SDK v5+\n *\n * @example\n * ```ts\n * import { generateText, streamText } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n *\n * const { text } = await generateText({\n * model: gerbil(\"qwen3-0.6b\"),\n * prompt: \"Hello world\",\n * });\n * ```\n */\n\nimport type {\n LanguageModelV2,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Prompt,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SpeechModelV2,\n SpeechModelV2CallOptions,\n SpeechModelV2CallWarning,\n TranscriptionModelV2,\n TranscriptionModelV2CallOptions,\n TranscriptionModelV2CallWarning,\n} from \"@ai-sdk/provider\";\n\nimport { Gerbil } from \"../core/gerbil.js\";\nimport { BUILTIN_MODELS } from \"../core/models.js\";\nimport { WHISPER_MODELS } from \"../core/stt.js\";\nimport { KOKORO_VOICES } from \"../core/tts.js\";\nimport type {\n GerbilModelSettings,\n GerbilProviderSettings,\n ImageInput,\n ModelConfig,\n STTModelConfig,\n} from \"../core/types.js\";\n\n// Simple ID generator\nlet idCounter = 0;\nfunction generateId(): string {\n return `gerbil-${Date.now()}-${(idCounter += 1)}`;\n}\n\n// ============================================\n// Language Model Implementation (V2 Spec)\n// ============================================\n\nclass GerbilLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = \"v2\" as const;\n readonly provider = \"gerbil\";\n readonly modelId: string;\n\n // Gerbil runs locally, no URL support needed\n readonly supportedUrls: Record<string, RegExp[]> = {};\n\n private instance: Gerbil | null = null;\n private readonly settings: GerbilModelSettings;\n private readonly providerSettings: GerbilProviderSettings;\n private loadPromise: Promise<void> | null = null;\n\n constructor(\n modelId: string,\n settings: GerbilModelSettings,\n providerSettings: GerbilProviderSettings,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.providerSettings = providerSettings;\n }\n\n private async ensureLoaded(): Promise<Gerbil> {\n if (this.instance?.isLoaded()) {\n return this.instance;\n }\n if (this.loadPromise) {\n await this.loadPromise;\n return this.instance!;\n }\n\n this.instance = new Gerbil();\n this.loadPromise = this.instance.loadModel(this.modelId, {\n device: this.settings.device ?? this.providerSettings.device ?? \"auto\",\n dtype: this.settings.dtype ?? this.providerSettings.dtype ?? \"q4\",\n });\n await this.loadPromise;\n return this.instance;\n }\n\n private convertPrompt(prompt: LanguageModelV2Prompt): {\n system?: string;\n user: string;\n images: ImageInput[];\n } {\n let system: string | undefined;\n let user = \"\";\n const images: ImageInput[] = [];\n\n for (const msg of prompt) {\n if (msg.role === \"system\") {\n system = msg.content;\n } else if (msg.role === \"user\") {\n for (const part of msg.content) {\n if (part.type === \"text\") {\n user += part.text;\n } else if ((part as any).type === \"image\") {\n // AI SDK v5 image part - can be URL, base64, or Uint8Array\n const imgPart = part as any;\n if (imgPart.image instanceof URL) {\n images.push({ source: imgPart.image.toString() });\n } else if (typeof imgPart.image === \"string\") {\n // base64 or URL string\n images.push({ source: imgPart.image });\n } else if (imgPart.image instanceof Uint8Array) {\n // Convert Uint8Array to base64 data URI\n const base64 = btoa(String.fromCharCode(...imgPart.image));\n const mimeType = imgPart.mimeType || \"image/png\";\n images.push({ source: `data:${mimeType};base64,${base64}` });\n }\n }\n }\n } else if (msg.role === \"assistant\") {\n for (const part of msg.content) {\n if (part.type === \"text\") {\n user += `\\n\\nAssistant: ${part.text}`;\n }\n }\n } else if (msg.role === \"tool\") {\n for (const part of msg.content) {\n user += `\\n\\nTool (${part.toolName}): ${JSON.stringify(part)}`;\n }\n }\n }\n\n return { system, user, images };\n }\n\n private mapFinishReason(reason: string): LanguageModelV2FinishReason {\n if (reason === \"stop\") {\n return \"stop\";\n }\n if (reason === \"length\") {\n return \"length\";\n }\n if (reason === \"error\") {\n return \"error\";\n }\n return \"other\";\n }\n\n async doGenerate(options: LanguageModelV2CallOptions) {\n const warnings: LanguageModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n const { system, user, images } = this.convertPrompt(options.prompt);\n\n const result = await g.generate(user, {\n maxTokens: options.maxOutputTokens,\n temperature: options.temperature,\n topP: options.topP,\n topK: options.topK,\n system,\n thinking: this.settings.thinking,\n stopSequences: options.stopSequences,\n images: images.length > 0 ? images : undefined,\n });\n\n // Build V2 content array\n const content: LanguageModelV2Content[] = [];\n\n // Add reasoning if thinking mode was enabled\n if (result.thinking) {\n content.push({\n type: \"reasoning\",\n text: result.thinking,\n });\n }\n\n // Add main text response\n content.push({\n type: \"text\",\n text: result.text,\n });\n\n const usage: LanguageModelV2Usage = {\n inputTokens: 0,\n outputTokens: result.tokensGenerated,\n totalTokens: result.tokensGenerated,\n };\n\n return {\n content,\n finishReason: this.mapFinishReason(result.finishReason),\n usage,\n request: { body: { model: this.modelId, prompt: user } },\n warnings,\n };\n }\n\n async doStream(options: LanguageModelV2CallOptions) {\n const warnings: LanguageModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n const { system, user, images } = this.convertPrompt(options.prompt);\n\n const streamGen = g.stream(user, {\n maxTokens: options.maxOutputTokens,\n temperature: options.temperature,\n topP: options.topP,\n topK: options.topK,\n system,\n thinking: this.settings.thinking,\n stopSequences: options.stopSequences,\n images: images.length > 0 ? images : undefined,\n });\n\n let tokens = 0;\n const textId = generateId();\n\n const stream = new ReadableStream<LanguageModelV2StreamPart>({\n async start(controller) {\n try {\n // V2: Send stream-start event first\n controller.enqueue({\n type: \"stream-start\",\n warnings,\n });\n\n // V2: Send text-start before text deltas\n controller.enqueue({\n type: \"text-start\",\n id: textId,\n });\n\n for await (const chunk of streamGen) {\n tokens += 1;\n // V2: Use 'text-delta' with id and delta\n controller.enqueue({\n type: \"text-delta\",\n id: textId,\n delta: chunk,\n });\n }\n\n // V2: Send text-end after all deltas\n controller.enqueue({\n type: \"text-end\",\n id: textId,\n });\n\n // V2: Send finish event\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n inputTokens: 0,\n outputTokens: tokens,\n totalTokens: tokens,\n },\n });\n controller.close();\n } catch (error) {\n controller.enqueue({ type: \"error\", error });\n controller.close();\n }\n },\n });\n\n return {\n stream,\n request: { body: { model: this.modelId, prompt: user } },\n };\n }\n}\n\n// ============================================\n// Speech Model Implementation (V2 Spec)\n// ============================================\n\n/** Settings for Gerbil speech model */\nexport interface GerbilSpeechSettings {\n /** Default voice ID (default: \"af_heart\") */\n voice?: string;\n /** Speech speed multiplier (default: 1.0) */\n speed?: number;\n}\n\nclass GerbilSpeechModel implements SpeechModelV2 {\n readonly specificationVersion = \"v2\" as const;\n readonly provider = \"gerbil\";\n readonly modelId: string;\n\n private instance: Gerbil | null = null;\n private readonly settings: GerbilSpeechSettings;\n private loadPromise: Promise<void> | null = null;\n\n constructor(modelId: string, settings: GerbilSpeechSettings = {}) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private async ensureLoaded(): Promise<Gerbil> {\n if (this.instance?.isTTSLoaded()) {\n return this.instance;\n }\n if (this.loadPromise) {\n await this.loadPromise;\n return this.instance!;\n }\n\n this.instance = new Gerbil();\n this.loadPromise = this.instance.ensureTTSLoaded();\n await this.loadPromise;\n return this.instance;\n }\n\n async doGenerate(options: SpeechModelV2CallOptions): Promise<{\n audio: Uint8Array;\n warnings: SpeechModelV2CallWarning[];\n request?: { body?: unknown };\n response: { timestamp: Date; modelId: string };\n }> {\n const warnings: SpeechModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n\n // Determine voice - use options.voice, fall back to settings, then default\n let voice = options.voice || this.settings.voice || \"af_heart\";\n\n // Validate voice exists\n const validVoice = KOKORO_VOICES.find((v) => v.id === voice);\n if (!validVoice) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"voice\",\n details: `Unknown voice \"${voice}\", using default \"af_heart\"`,\n });\n voice = \"af_heart\";\n }\n\n // Determine speed\n const speed = options.speed ?? this.settings.speed ?? 1.0;\n\n // Handle unsupported options\n if (options.outputFormat && options.outputFormat !== \"wav\" && options.outputFormat !== \"raw\") {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"outputFormat\",\n details: `Gerbil TTS only supports \"wav\" and \"raw\" formats, got \"${options.outputFormat}\"`,\n });\n }\n\n if (options.instructions) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"instructions\",\n details: \"Gerbil TTS does not support instructions parameter\",\n });\n }\n\n // Generate speech\n const result = await g.speak(options.text, { voice, speed });\n\n // Convert Float32Array to WAV format Uint8Array\n const audioData = this.float32ToWav(result.audio, result.sampleRate);\n\n return {\n audio: audioData,\n warnings,\n request: { body: { text: options.text, voice, speed } },\n response: {\n timestamp: new Date(),\n modelId: this.modelId,\n },\n };\n }\n\n /**\n * Convert Float32Array audio to WAV format Uint8Array\n */\n private float32ToWav(audio: Float32Array, sampleRate: number): Uint8Array {\n const buffer = new ArrayBuffer(44 + audio.length * 2);\n const view = new DataView(buffer);\n\n // WAV header\n const writeString = (offset: number, str: string) => {\n for (let i = 0; i < str.length; i++) {\n view.setUint8(offset + i, str.charCodeAt(i));\n }\n };\n\n writeString(0, \"RIFF\");\n view.setUint32(4, 36 + audio.length * 2, true);\n writeString(8, \"WAVE\");\n writeString(12, \"fmt \");\n view.setUint32(16, 16, true); // Subchunk1Size\n view.setUint16(20, 1, true); // AudioFormat (PCM)\n view.setUint16(22, 1, true); // NumChannels (mono)\n view.setUint32(24, sampleRate, true); // SampleRate\n view.setUint32(28, sampleRate * 2, true); // ByteRate\n view.setUint16(32, 2, true); // BlockAlign\n view.setUint16(34, 16, true); // BitsPerSample\n writeString(36, \"data\");\n view.setUint32(40, audio.length * 2, true);\n\n // Audio data (convert float32 to int16)\n for (let i = 0; i < audio.length; i++) {\n const s = Math.max(-1, Math.min(1, audio[i]));\n view.setInt16(44 + i * 2, Math.round(s * 32767), true);\n }\n\n return new Uint8Array(buffer);\n }\n}\n\n// ============================================\n// Transcription Model Implementation (V2 Spec)\n// ============================================\n\n/** Settings for Gerbil transcription model */\nexport interface GerbilTranscriptionSettings {\n /** Default language code (ISO-639-1) for transcription */\n language?: string;\n}\n\nclass GerbilTranscriptionModel implements TranscriptionModelV2 {\n readonly specificationVersion = \"v2\" as const;\n readonly provider = \"gerbil\";\n readonly modelId: string;\n\n private instance: Gerbil | null = null;\n private readonly settings: GerbilTranscriptionSettings;\n private loadPromise: Promise<void> | null = null;\n\n constructor(modelId: string, settings: GerbilTranscriptionSettings = {}) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private async ensureLoaded(): Promise<Gerbil> {\n if (this.instance?.isSTTLoaded()) {\n return this.instance;\n }\n if (this.loadPromise) {\n await this.loadPromise;\n return this.instance!;\n }\n\n this.instance = new Gerbil();\n this.loadPromise = this.instance.loadSTT(this.modelId);\n await this.loadPromise;\n return this.instance;\n }\n\n async doGenerate(options: TranscriptionModelV2CallOptions): Promise<{\n text: string;\n segments: Array<{\n text: string;\n startSecond: number;\n endSecond: number;\n }>;\n language: string | undefined;\n durationInSeconds: number | undefined;\n warnings: TranscriptionModelV2CallWarning[];\n request?: { body?: string };\n response: {\n timestamp: Date;\n modelId: string;\n };\n }> {\n const warnings: TranscriptionModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n\n // Convert audio to Uint8Array\n let audioData: Uint8Array;\n if (typeof options.audio === \"string\") {\n // Base64 encoded - decode it\n const binaryString = atob(options.audio);\n audioData = new Uint8Array(binaryString.length);\n for (let i = 0; i < binaryString.length; i++) {\n audioData[i] = binaryString.charCodeAt(i);\n }\n } else {\n audioData = options.audio;\n }\n\n // Check media type - we only support WAV natively\n const mediaType = options.mediaType?.toLowerCase() || \"\";\n if (mediaType && !mediaType.includes(\"wav\") && !mediaType.includes(\"wave\")) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"mediaType\",\n details: `Gerbil STT natively supports WAV format. Got \"${options.mediaType}\". Audio may not decode correctly.`,\n });\n }\n\n // Determine language from provider options or settings\n const providerOpts = options.providerOptions?.gerbil as Record<string, unknown> | undefined;\n const language = (providerOpts?.language as string) || this.settings.language;\n\n // Transcribe with timestamps to get segments\n const result = await g.transcribe(audioData, {\n language,\n timestamps: true,\n });\n\n // Map segments to V2 format\n const segments = (result.segments || []).map((seg) => ({\n text: seg.text,\n startSecond: seg.start,\n endSecond: seg.end,\n }));\n\n return {\n text: result.text,\n segments,\n language: result.language,\n durationInSeconds: result.duration,\n warnings,\n request: {\n body: JSON.stringify({\n model: this.modelId,\n mediaType: options.mediaType,\n language,\n }),\n },\n response: {\n timestamp: new Date(),\n modelId: this.modelId,\n },\n };\n }\n}\n\n// ============================================\n// Provider Factory\n// ============================================\n\nexport type GerbilProvider = {\n (modelId: string, settings?: GerbilModelSettings): GerbilLanguageModel;\n languageModel(modelId: string, settings?: GerbilModelSettings): GerbilLanguageModel;\n speech(modelId?: string, settings?: GerbilSpeechSettings): GerbilSpeechModel;\n transcription(modelId?: string, settings?: GerbilTranscriptionSettings): GerbilTranscriptionModel;\n listModels(): ModelConfig[];\n getModel(modelId: string): ModelConfig | undefined;\n listVoices(): Array<{ id: string; name: string; gender: string; language: string }>;\n listTranscriptionModels(): STTModelConfig[];\n};\n\n/**\n * Create a Gerbil provider\n *\n * @example\n * ```ts\n * const local = createGerbil({ device: \"gpu\", dtype: \"q4\" });\n *\n * // Text generation\n * const { text } = await generateText({\n * model: local(\"qwen3-0.6b\"),\n * prompt: \"Hello\",\n * });\n *\n * // Speech generation\n * const audio = await generateSpeech({\n * model: local.speech(),\n * text: \"Hello world!\",\n * voice: \"af_heart\",\n * });\n *\n * // Transcription\n * const transcript = await transcribe({\n * model: local.transcription(),\n * audio: audioBuffer,\n * });\n * ```\n */\nexport function createGerbil(options: GerbilProviderSettings = {}): GerbilProvider {\n const createModel = (modelId: string, settings: GerbilModelSettings = {}) =>\n new GerbilLanguageModel(modelId, settings, options);\n\n const createSpeechModel = (modelId = \"kokoro-82m\", settings: GerbilSpeechSettings = {}) =>\n new GerbilSpeechModel(modelId, settings);\n\n const createTranscriptionModel = (\n modelId = \"whisper-tiny.en\",\n settings: GerbilTranscriptionSettings = {},\n ) => new GerbilTranscriptionModel(modelId, settings);\n\n const provider = ((modelId: string, settings?: GerbilModelSettings) =>\n createModel(modelId, settings ?? {})) as GerbilProvider;\n\n provider.languageModel = createModel;\n provider.speech = createSpeechModel;\n provider.transcription = createTranscriptionModel;\n provider.listModels = () => Object.values(BUILTIN_MODELS);\n provider.getModel = (id: string) => BUILTIN_MODELS[id];\n provider.listVoices = () =>\n KOKORO_VOICES.map((v) => ({\n id: v.id,\n name: v.name,\n gender: v.gender,\n language: v.language,\n }));\n provider.listTranscriptionModels = () => WHISPER_MODELS;\n\n return provider;\n}\n\n/**\n * Default Gerbil provider\n *\n * @example Text Generation\n * ```ts\n * import { generateText } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n *\n * const { text } = await generateText({\n * model: gerbil(\"qwen3-0.6b\"),\n * prompt: \"Hello\",\n * });\n * ```\n *\n * @example Speech Generation\n * ```ts\n * import { experimental_generateSpeech as generateSpeech } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n *\n * const audio = await generateSpeech({\n * model: gerbil.speech(),\n * text: \"Hello world!\",\n * voice: \"af_heart\", // Or \"bf_emma\", \"am_fenrir\", etc.\n * });\n *\n * // Access audio data\n * const audioData = audio.audioData; // Uint8Array (WAV format)\n * ```\n *\n * @example Transcription\n * ```ts\n * import { experimental_transcribe as transcribe } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n * import { readFile } from \"fs/promises\";\n *\n * const transcript = await transcribe({\n * model: gerbil.transcription(), // whisper-tiny.en by default\n * audio: await readFile(\"audio.wav\"),\n * });\n *\n * console.log(transcript.text);\n * console.log(transcript.segments); // Timestamped segments\n * ```\n */\nexport const gerbil = createGerbil();\n\nexport default gerbil;\n"],"mappings":";;;;;;AA+CA,IAAI,YAAY;AAChB,SAAS,aAAqB;AAC5B,QAAO,UAAU,KAAK,KAAK,CAAC,GAAI,aAAa;;AAO/C,IAAM,sBAAN,MAAqD;CACnD,AAAS,uBAAuB;CAChC,AAAS,WAAW;CACpB,AAAS;CAGT,AAAS,gBAA0C,EAAE;CAErD,AAAQ,WAA0B;CAClC,AAAiB;CACjB,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YACE,SACA,UACA,kBACA;AACA,OAAK,UAAU;AACf,OAAK,WAAW;AAChB,OAAK,mBAAmB;;CAG1B,MAAc,eAAgC;AAC5C,MAAI,KAAK,UAAU,UAAU,CAC3B,QAAO,KAAK;AAEd,MAAI,KAAK,aAAa;AACpB,SAAM,KAAK;AACX,UAAO,KAAK;;AAGd,OAAK,WAAW,IAAI,QAAQ;AAC5B,OAAK,cAAc,KAAK,SAAS,UAAU,KAAK,SAAS;GACvD,QAAQ,KAAK,SAAS,UAAU,KAAK,iBAAiB,UAAU;GAChE,OAAO,KAAK,SAAS,SAAS,KAAK,iBAAiB,SAAS;GAC9D,CAAC;AACF,QAAM,KAAK;AACX,SAAO,KAAK;;CAGd,AAAQ,cAAc,QAIpB;EACA,IAAIA;EACJ,IAAI,OAAO;EACX,MAAMC,SAAuB,EAAE;AAE/B,OAAK,MAAM,OAAO,OAChB,KAAI,IAAI,SAAS,SACf,UAAS,IAAI;WACJ,IAAI,SAAS,QACtB;QAAK,MAAM,QAAQ,IAAI,QACrB,KAAI,KAAK,SAAS,OAChB,SAAQ,KAAK;YACH,KAAa,SAAS,SAAS;IAEzC,MAAM,UAAU;AAChB,QAAI,QAAQ,iBAAiB,IAC3B,QAAO,KAAK,EAAE,QAAQ,QAAQ,MAAM,UAAU,EAAE,CAAC;aACxC,OAAO,QAAQ,UAAU,SAElC,QAAO,KAAK,EAAE,QAAQ,QAAQ,OAAO,CAAC;aAC7B,QAAQ,iBAAiB,YAAY;KAE9C,MAAM,SAAS,KAAK,OAAO,aAAa,GAAG,QAAQ,MAAM,CAAC;KAC1D,MAAM,WAAW,QAAQ,YAAY;AACrC,YAAO,KAAK,EAAE,QAAQ,QAAQ,SAAS,UAAU,UAAU,CAAC;;;aAIzD,IAAI,SAAS,aACtB;QAAK,MAAM,QAAQ,IAAI,QACrB,KAAI,KAAK,SAAS,OAChB,SAAQ,kBAAkB,KAAK;aAG1B,IAAI,SAAS,OACtB,MAAK,MAAM,QAAQ,IAAI,QACrB,SAAQ,aAAa,KAAK,SAAS,KAAK,KAAK,UAAU,KAAK;AAKlE,SAAO;GAAE;GAAQ;GAAM;GAAQ;;CAGjC,AAAQ,gBAAgB,QAA6C;AACnE,MAAI,WAAW,OACb,QAAO;AAET,MAAI,WAAW,SACb,QAAO;AAET,MAAI,WAAW,QACb,QAAO;AAET,SAAO;;CAGT,MAAM,WAAW,SAAqC;EACpD,MAAMC,WAAyC,EAAE;EACjD,MAAM,IAAI,MAAM,KAAK,cAAc;EACnC,MAAM,EAAE,QAAQ,MAAM,WAAW,KAAK,cAAc,QAAQ,OAAO;EAEnE,MAAM,SAAS,MAAM,EAAE,SAAS,MAAM;GACpC,WAAW,QAAQ;GACnB,aAAa,QAAQ;GACrB,MAAM,QAAQ;GACd,MAAM,QAAQ;GACd;GACA,UAAU,KAAK,SAAS;GACxB,eAAe,QAAQ;GACvB,QAAQ,OAAO,SAAS,IAAI,SAAS;GACtC,CAAC;EAGF,MAAMC,UAAoC,EAAE;AAG5C,MAAI,OAAO,SACT,SAAQ,KAAK;GACX,MAAM;GACN,MAAM,OAAO;GACd,CAAC;AAIJ,UAAQ,KAAK;GACX,MAAM;GACN,MAAM,OAAO;GACd,CAAC;EAEF,MAAMC,QAA8B;GAClC,aAAa;GACb,cAAc,OAAO;GACrB,aAAa,OAAO;GACrB;AAED,SAAO;GACL;GACA,cAAc,KAAK,gBAAgB,OAAO,aAAa;GACvD;GACA,SAAS,EAAE,MAAM;IAAE,OAAO,KAAK;IAAS,QAAQ;IAAM,EAAE;GACxD;GACD;;CAGH,MAAM,SAAS,SAAqC;EAClD,MAAMF,WAAyC,EAAE;EACjD,MAAM,IAAI,MAAM,KAAK,cAAc;EACnC,MAAM,EAAE,QAAQ,MAAM,WAAW,KAAK,cAAc,QAAQ,OAAO;EAEnE,MAAM,YAAY,EAAE,OAAO,MAAM;GAC/B,WAAW,QAAQ;GACnB,aAAa,QAAQ;GACrB,MAAM,QAAQ;GACd,MAAM,QAAQ;GACd;GACA,UAAU,KAAK,SAAS;GACxB,eAAe,QAAQ;GACvB,QAAQ,OAAO,SAAS,IAAI,SAAS;GACtC,CAAC;EAEF,IAAI,SAAS;EACb,MAAM,SAAS,YAAY;AAmD3B,SAAO;GACL,QAlDa,IAAI,eAA0C,EAC3D,MAAM,MAAM,YAAY;AACtB,QAAI;AAEF,gBAAW,QAAQ;MACjB,MAAM;MACN;MACD,CAAC;AAGF,gBAAW,QAAQ;MACjB,MAAM;MACN,IAAI;MACL,CAAC;AAEF,gBAAW,MAAM,SAAS,WAAW;AACnC,gBAAU;AAEV,iBAAW,QAAQ;OACjB,MAAM;OACN,IAAI;OACJ,OAAO;OACR,CAAC;;AAIJ,gBAAW,QAAQ;MACjB,MAAM;MACN,IAAI;MACL,CAAC;AAGF,gBAAW,QAAQ;MACjB,MAAM;MACN,cAAc;MACd,OAAO;OACL,aAAa;OACb,cAAc;OACd,aAAa;OACd;MACF,CAAC;AACF,gBAAW,OAAO;aACX,OAAO;AACd,gBAAW,QAAQ;MAAE,MAAM;MAAS;MAAO,CAAC;AAC5C,gBAAW,OAAO;;MAGvB,CAAC;GAIA,SAAS,EAAE,MAAM;IAAE,OAAO,KAAK;IAAS,QAAQ;IAAM,EAAE;GACzD;;;AAgBL,IAAM,oBAAN,MAAiD;CAC/C,AAAS,uBAAuB;CAChC,AAAS,WAAW;CACpB,AAAS;CAET,AAAQ,WAA0B;CAClC,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YAAY,SAAiB,WAAiC,EAAE,EAAE;AAChE,OAAK,UAAU;AACf,OAAK,WAAW;;CAGlB,MAAc,eAAgC;AAC5C,MAAI,KAAK,UAAU,aAAa,CAC9B,QAAO,KAAK;AAEd,MAAI,KAAK,aAAa;AACpB,SAAM,KAAK;AACX,UAAO,KAAK;;AAGd,OAAK,WAAW,IAAI,QAAQ;AAC5B,OAAK,cAAc,KAAK,SAAS,iBAAiB;AAClD,QAAM,KAAK;AACX,SAAO,KAAK;;CAGd,MAAM,WAAW,SAKd;EACD,MAAMG,WAAuC,EAAE;EAC/C,MAAM,IAAI,MAAM,KAAK,cAAc;EAGnC,IAAI,QAAQ,QAAQ,SAAS,KAAK,SAAS,SAAS;AAIpD,MAAI,CADe,cAAc,MAAM,MAAM,EAAE,OAAO,MAAM,EAC3C;AACf,YAAS,KAAK;IACZ,MAAM;IACN,SAAS;IACT,SAAS,kBAAkB,MAAM;IAClC,CAAC;AACF,WAAQ;;EAIV,MAAM,QAAQ,QAAQ,SAAS,KAAK,SAAS,SAAS;AAGtD,MAAI,QAAQ,gBAAgB,QAAQ,iBAAiB,SAAS,QAAQ,iBAAiB,MACrF,UAAS,KAAK;GACZ,MAAM;GACN,SAAS;GACT,SAAS,0DAA0D,QAAQ,aAAa;GACzF,CAAC;AAGJ,MAAI,QAAQ,aACV,UAAS,KAAK;GACZ,MAAM;GACN,SAAS;GACT,SAAS;GACV,CAAC;EAIJ,MAAM,SAAS,MAAM,EAAE,MAAM,QAAQ,MAAM;GAAE;GAAO;GAAO,CAAC;AAK5D,SAAO;GACL,OAHgB,KAAK,aAAa,OAAO,OAAO,OAAO,WAAW;GAIlE;GACA,SAAS,EAAE,MAAM;IAAE,MAAM,QAAQ;IAAM;IAAO;IAAO,EAAE;GACvD,UAAU;IACR,2BAAW,IAAI,MAAM;IACrB,SAAS,KAAK;IACf;GACF;;;;;CAMH,AAAQ,aAAa,OAAqB,YAAgC;EACxE,MAAM,yBAAS,IAAI,YAAY,KAAK,MAAM,SAAS,EAAE;EACrD,MAAM,OAAO,IAAI,SAAS,OAAO;EAGjC,MAAM,eAAe,QAAgB,QAAgB;AACnD,QAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,MAAK,SAAS,SAAS,GAAG,IAAI,WAAW,EAAE,CAAC;;AAIhD,cAAY,GAAG,OAAO;AACtB,OAAK,UAAU,GAAG,KAAK,MAAM,SAAS,GAAG,KAAK;AAC9C,cAAY,GAAG,OAAO;AACtB,cAAY,IAAI,OAAO;AACvB,OAAK,UAAU,IAAI,IAAI,KAAK;AAC5B,OAAK,UAAU,IAAI,GAAG,KAAK;AAC3B,OAAK,UAAU,IAAI,GAAG,KAAK;AAC3B,OAAK,UAAU,IAAI,YAAY,KAAK;AACpC,OAAK,UAAU,IAAI,aAAa,GAAG,KAAK;AACxC,OAAK,UAAU,IAAI,GAAG,KAAK;AAC3B,OAAK,UAAU,IAAI,IAAI,KAAK;AAC5B,cAAY,IAAI,OAAO;AACvB,OAAK,UAAU,IAAI,MAAM,SAAS,GAAG,KAAK;AAG1C,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,CAAC;AAC7C,QAAK,SAAS,KAAK,IAAI,GAAG,KAAK,MAAM,IAAI,MAAM,EAAE,KAAK;;AAGxD,SAAO,IAAI,WAAW,OAAO;;;AAcjC,IAAM,2BAAN,MAA+D;CAC7D,AAAS,uBAAuB;CAChC,AAAS,WAAW;CACpB,AAAS;CAET,AAAQ,WAA0B;CAClC,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YAAY,SAAiB,WAAwC,EAAE,EAAE;AACvE,OAAK,UAAU;AACf,OAAK,WAAW;;CAGlB,MAAc,eAAgC;AAC5C,MAAI,KAAK,UAAU,aAAa,CAC9B,QAAO,KAAK;AAEd,MAAI,KAAK,aAAa;AACpB,SAAM,KAAK;AACX,UAAO,KAAK;;AAGd,OAAK,WAAW,IAAI,QAAQ;AAC5B,OAAK,cAAc,KAAK,SAAS,QAAQ,KAAK,QAAQ;AACtD,QAAM,KAAK;AACX,SAAO,KAAK;;CAGd,MAAM,WAAW,SAed;EACD,MAAMC,WAA8C,EAAE;EACtD,MAAM,IAAI,MAAM,KAAK,cAAc;EAGnC,IAAIC;AACJ,MAAI,OAAO,QAAQ,UAAU,UAAU;GAErC,MAAM,eAAe,KAAK,QAAQ,MAAM;AACxC,eAAY,IAAI,WAAW,aAAa,OAAO;AAC/C,QAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,WAAU,KAAK,aAAa,WAAW,EAAE;QAG3C,aAAY,QAAQ;EAItB,MAAM,YAAY,QAAQ,WAAW,aAAa,IAAI;AACtD,MAAI,aAAa,CAAC,UAAU,SAAS,MAAM,IAAI,CAAC,UAAU,SAAS,OAAO,CACxE,UAAS,KAAK;GACZ,MAAM;GACN,SAAS;GACT,SAAS,iDAAiD,QAAQ,UAAU;GAC7E,CAAC;EAKJ,MAAM,YADe,QAAQ,iBAAiB,SACd,YAAuB,KAAK,SAAS;EAGrE,MAAM,SAAS,MAAM,EAAE,WAAW,WAAW;GAC3C;GACA,YAAY;GACb,CAAC;EAGF,MAAM,YAAY,OAAO,YAAY,EAAE,EAAE,KAAK,SAAS;GACrD,MAAM,IAAI;GACV,aAAa,IAAI;GACjB,WAAW,IAAI;GAChB,EAAE;AAEH,SAAO;GACL,MAAM,OAAO;GACb;GACA,UAAU,OAAO;GACjB,mBAAmB,OAAO;GAC1B;GACA,SAAS,EACP,MAAM,KAAK,UAAU;IACnB,OAAO,KAAK;IACZ,WAAW,QAAQ;IACnB;IACD,CAAC,EACH;GACD,UAAU;IACR,2BAAW,IAAI,MAAM;IACrB,SAAS,KAAK;IACf;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8CL,SAAgB,aAAa,UAAkC,EAAE,EAAkB;CACjF,MAAM,eAAe,SAAiB,WAAgC,EAAE,KACtE,IAAI,oBAAoB,SAAS,UAAU,QAAQ;CAErD,MAAM,qBAAqB,UAAU,cAAc,WAAiC,EAAE,KACpF,IAAI,kBAAkB,SAAS,SAAS;CAE1C,MAAM,4BACJ,UAAU,mBACV,WAAwC,EAAE,KACvC,IAAI,yBAAyB,SAAS,SAAS;CAEpD,MAAM,aAAa,SAAiB,aAClC,YAAY,SAAS,YAAY,EAAE,CAAC;AAEtC,UAAS,gBAAgB;AACzB,UAAS,SAAS;AAClB,UAAS,gBAAgB;AACzB,UAAS,mBAAmB,OAAO,OAAO,eAAe;AACzD,UAAS,YAAY,OAAe,eAAe;AACnD,UAAS,mBACP,cAAc,KAAK,OAAO;EACxB,IAAI,EAAE;EACN,MAAM,EAAE;EACR,QAAQ,EAAE;EACV,UAAU,EAAE;EACb,EAAE;AACL,UAAS,gCAAgC;AAEzC,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CT,MAAa,SAAS,cAAc;AAEpC,qBAAe"}
1
+ {"version":3,"file":"ai-sdk.mjs","names":["system: string | undefined","images: ImageInput[]","warnings: LanguageModelV2CallWarning[]","content: LanguageModelV2Content[]","usage: LanguageModelV2Usage","warnings: SpeechModelV2CallWarning[]","warnings: TranscriptionModelV2CallWarning[]","audioData: Uint8Array","embeddings: number[][]"],"sources":["../../src/integrations/ai-sdk.ts"],"sourcesContent":["/**\n * Gerbil AI SDK Provider (V2 Specification)\n *\n * Compatible with AI SDK v5+\n *\n * @example\n * ```ts\n * import { generateText, streamText } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n *\n * const { text } = await generateText({\n * model: gerbil(\"qwen3-0.6b\"),\n * prompt: \"Hello world\",\n * });\n * ```\n */\n\nimport type {\n EmbeddingModelV2,\n LanguageModelV2,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Prompt,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SpeechModelV2,\n SpeechModelV2CallOptions,\n SpeechModelV2CallWarning,\n TranscriptionModelV2,\n TranscriptionModelV2CallOptions,\n TranscriptionModelV2CallWarning,\n} from \"@ai-sdk/provider\";\n\nimport { Gerbil } from \"../core/gerbil.js\";\nimport { BUILTIN_MODELS } from \"../core/models.js\";\nimport { WHISPER_MODELS } from \"../core/stt.js\";\nimport { KOKORO_VOICES } from \"../core/tts.js\";\nimport type {\n GerbilModelSettings,\n GerbilProviderSettings,\n ImageInput,\n ModelConfig,\n STTModelConfig,\n} from \"../core/types.js\";\n\n// Simple ID generator\nlet idCounter = 0;\nfunction generateId(): string {\n return `gerbil-${Date.now()}-${(idCounter += 1)}`;\n}\n\n// ============================================\n// Language Model Implementation (V2 Spec)\n// ============================================\n\nclass GerbilLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = \"v2\" as const;\n readonly provider = \"gerbil\";\n readonly modelId: string;\n\n // Gerbil runs locally, no URL support needed\n readonly supportedUrls: Record<string, RegExp[]> = {};\n\n private instance: Gerbil | null = null;\n private readonly settings: GerbilModelSettings;\n private readonly providerSettings: GerbilProviderSettings;\n private loadPromise: Promise<void> | null = null;\n\n constructor(\n modelId: string,\n settings: GerbilModelSettings,\n providerSettings: GerbilProviderSettings,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.providerSettings = providerSettings;\n }\n\n private async ensureLoaded(): Promise<Gerbil> {\n if (this.instance?.isLoaded()) {\n return this.instance;\n }\n if (this.loadPromise) {\n await this.loadPromise;\n return this.instance!;\n }\n\n this.instance = new Gerbil();\n this.loadPromise = this.instance.loadModel(this.modelId, {\n device: this.settings.device ?? this.providerSettings.device ?? \"auto\",\n dtype: this.settings.dtype ?? this.providerSettings.dtype ?? \"q4\",\n });\n await this.loadPromise;\n return this.instance;\n }\n\n private convertPrompt(prompt: LanguageModelV2Prompt): {\n system?: string;\n user: string;\n images: ImageInput[];\n } {\n let system: string | undefined;\n let user = \"\";\n const images: ImageInput[] = [];\n\n for (const msg of prompt) {\n if (msg.role === \"system\") {\n system = msg.content;\n } else if (msg.role === \"user\") {\n for (const part of msg.content) {\n if (part.type === \"text\") {\n user += part.text;\n } else if ((part as any).type === \"image\") {\n // AI SDK v5 image part - can be URL, base64, or Uint8Array\n const imgPart = part as any;\n if (imgPart.image instanceof URL) {\n images.push({ source: imgPart.image.toString() });\n } else if (typeof imgPart.image === \"string\") {\n // base64 or URL string\n images.push({ source: imgPart.image });\n } else if (imgPart.image instanceof Uint8Array) {\n // Convert Uint8Array to base64 data URI\n const base64 = btoa(String.fromCharCode(...imgPart.image));\n const mimeType = imgPart.mimeType || \"image/png\";\n images.push({ source: `data:${mimeType};base64,${base64}` });\n }\n }\n }\n } else if (msg.role === \"assistant\") {\n for (const part of msg.content) {\n if (part.type === \"text\") {\n user += `\\n\\nAssistant: ${part.text}`;\n }\n }\n } else if (msg.role === \"tool\") {\n for (const part of msg.content) {\n user += `\\n\\nTool (${part.toolName}): ${JSON.stringify(part)}`;\n }\n }\n }\n\n return { system, user, images };\n }\n\n private mapFinishReason(reason: string): LanguageModelV2FinishReason {\n if (reason === \"stop\") {\n return \"stop\";\n }\n if (reason === \"length\") {\n return \"length\";\n }\n if (reason === \"error\") {\n return \"error\";\n }\n return \"other\";\n }\n\n async doGenerate(options: LanguageModelV2CallOptions) {\n const warnings: LanguageModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n const { system, user, images } = this.convertPrompt(options.prompt);\n\n const result = await g.generate(user, {\n maxTokens: options.maxOutputTokens,\n temperature: options.temperature,\n topP: options.topP,\n topK: options.topK,\n system,\n thinking: this.settings.thinking,\n stopSequences: options.stopSequences,\n images: images.length > 0 ? images : undefined,\n });\n\n // Build V2 content array\n const content: LanguageModelV2Content[] = [];\n\n // Add reasoning if thinking mode was enabled\n if (result.thinking) {\n content.push({\n type: \"reasoning\",\n text: result.thinking,\n });\n }\n\n // Add main text response\n content.push({\n type: \"text\",\n text: result.text,\n });\n\n const usage: LanguageModelV2Usage = {\n inputTokens: 0,\n outputTokens: result.tokensGenerated,\n totalTokens: result.tokensGenerated,\n };\n\n return {\n content,\n finishReason: this.mapFinishReason(result.finishReason),\n usage,\n request: { body: { model: this.modelId, prompt: user } },\n warnings,\n };\n }\n\n async doStream(options: LanguageModelV2CallOptions) {\n const warnings: LanguageModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n const { system, user, images } = this.convertPrompt(options.prompt);\n\n const streamGen = g.stream(user, {\n maxTokens: options.maxOutputTokens,\n temperature: options.temperature,\n topP: options.topP,\n topK: options.topK,\n system,\n thinking: this.settings.thinking,\n stopSequences: options.stopSequences,\n images: images.length > 0 ? images : undefined,\n });\n\n let tokens = 0;\n const textId = generateId();\n\n const stream = new ReadableStream<LanguageModelV2StreamPart>({\n async start(controller) {\n try {\n // V2: Send stream-start event first\n controller.enqueue({\n type: \"stream-start\",\n warnings,\n });\n\n // V2: Send text-start before text deltas\n controller.enqueue({\n type: \"text-start\",\n id: textId,\n });\n\n for await (const chunk of streamGen) {\n tokens += 1;\n // V2: Use 'text-delta' with id and delta\n controller.enqueue({\n type: \"text-delta\",\n id: textId,\n delta: chunk,\n });\n }\n\n // V2: Send text-end after all deltas\n controller.enqueue({\n type: \"text-end\",\n id: textId,\n });\n\n // V2: Send finish event\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n inputTokens: 0,\n outputTokens: tokens,\n totalTokens: tokens,\n },\n });\n controller.close();\n } catch (error) {\n controller.enqueue({ type: \"error\", error });\n controller.close();\n }\n },\n });\n\n return {\n stream,\n request: { body: { model: this.modelId, prompt: user } },\n };\n }\n}\n\n// ============================================\n// Speech Model Implementation (V2 Spec)\n// ============================================\n\n/** Settings for Gerbil speech model */\nexport interface GerbilSpeechSettings {\n /** Default voice ID (default: \"af_heart\") */\n voice?: string;\n /** Speech speed multiplier (default: 1.0) */\n speed?: number;\n}\n\nclass GerbilSpeechModel implements SpeechModelV2 {\n readonly specificationVersion = \"v2\" as const;\n readonly provider = \"gerbil\";\n readonly modelId: string;\n\n private instance: Gerbil | null = null;\n private readonly settings: GerbilSpeechSettings;\n private loadPromise: Promise<void> | null = null;\n\n constructor(modelId: string, settings: GerbilSpeechSettings = {}) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private async ensureLoaded(): Promise<Gerbil> {\n if (this.instance?.isTTSLoaded()) {\n return this.instance;\n }\n if (this.loadPromise) {\n await this.loadPromise;\n return this.instance!;\n }\n\n this.instance = new Gerbil();\n this.loadPromise = this.instance.ensureTTSLoaded();\n await this.loadPromise;\n return this.instance;\n }\n\n async doGenerate(options: SpeechModelV2CallOptions): Promise<{\n audio: Uint8Array;\n warnings: SpeechModelV2CallWarning[];\n request?: { body?: unknown };\n response: { timestamp: Date; modelId: string };\n }> {\n const warnings: SpeechModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n\n // Determine voice - use options.voice, fall back to settings, then default\n let voice = options.voice || this.settings.voice || \"af_heart\";\n\n // Validate voice exists\n const validVoice = KOKORO_VOICES.find((v) => v.id === voice);\n if (!validVoice) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"voice\",\n details: `Unknown voice \"${voice}\", using default \"af_heart\"`,\n });\n voice = \"af_heart\";\n }\n\n // Determine speed\n const speed = options.speed ?? this.settings.speed ?? 1.0;\n\n // Handle unsupported options\n if (options.outputFormat && options.outputFormat !== \"wav\" && options.outputFormat !== \"raw\") {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"outputFormat\",\n details: `Gerbil TTS only supports \"wav\" and \"raw\" formats, got \"${options.outputFormat}\"`,\n });\n }\n\n if (options.instructions) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"instructions\",\n details: \"Gerbil TTS does not support instructions parameter\",\n });\n }\n\n // Generate speech\n const result = await g.speak(options.text, { voice, speed });\n\n // Convert Float32Array to WAV format Uint8Array\n const audioData = this.float32ToWav(result.audio, result.sampleRate);\n\n return {\n audio: audioData,\n warnings,\n request: { body: { text: options.text, voice, speed } },\n response: {\n timestamp: new Date(),\n modelId: this.modelId,\n },\n };\n }\n\n /**\n * Convert Float32Array audio to WAV format Uint8Array\n */\n private float32ToWav(audio: Float32Array, sampleRate: number): Uint8Array {\n const buffer = new ArrayBuffer(44 + audio.length * 2);\n const view = new DataView(buffer);\n\n // WAV header\n const writeString = (offset: number, str: string) => {\n for (let i = 0; i < str.length; i++) {\n view.setUint8(offset + i, str.charCodeAt(i));\n }\n };\n\n writeString(0, \"RIFF\");\n view.setUint32(4, 36 + audio.length * 2, true);\n writeString(8, \"WAVE\");\n writeString(12, \"fmt \");\n view.setUint32(16, 16, true); // Subchunk1Size\n view.setUint16(20, 1, true); // AudioFormat (PCM)\n view.setUint16(22, 1, true); // NumChannels (mono)\n view.setUint32(24, sampleRate, true); // SampleRate\n view.setUint32(28, sampleRate * 2, true); // ByteRate\n view.setUint16(32, 2, true); // BlockAlign\n view.setUint16(34, 16, true); // BitsPerSample\n writeString(36, \"data\");\n view.setUint32(40, audio.length * 2, true);\n\n // Audio data (convert float32 to int16)\n for (let i = 0; i < audio.length; i++) {\n const s = Math.max(-1, Math.min(1, audio[i]));\n view.setInt16(44 + i * 2, Math.round(s * 32767), true);\n }\n\n return new Uint8Array(buffer);\n }\n}\n\n// ============================================\n// Transcription Model Implementation (V2 Spec)\n// ============================================\n\n/** Settings for Gerbil transcription model */\nexport interface GerbilTranscriptionSettings {\n /** Default language code (ISO-639-1) for transcription */\n language?: string;\n}\n\nclass GerbilTranscriptionModel implements TranscriptionModelV2 {\n readonly specificationVersion = \"v2\" as const;\n readonly provider = \"gerbil\";\n readonly modelId: string;\n\n private instance: Gerbil | null = null;\n private readonly settings: GerbilTranscriptionSettings;\n private loadPromise: Promise<void> | null = null;\n\n constructor(modelId: string, settings: GerbilTranscriptionSettings = {}) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private async ensureLoaded(): Promise<Gerbil> {\n if (this.instance?.isSTTLoaded()) {\n return this.instance;\n }\n if (this.loadPromise) {\n await this.loadPromise;\n return this.instance!;\n }\n\n this.instance = new Gerbil();\n this.loadPromise = this.instance.loadSTT(this.modelId);\n await this.loadPromise;\n return this.instance;\n }\n\n async doGenerate(options: TranscriptionModelV2CallOptions): Promise<{\n text: string;\n segments: Array<{\n text: string;\n startSecond: number;\n endSecond: number;\n }>;\n language: string | undefined;\n durationInSeconds: number | undefined;\n warnings: TranscriptionModelV2CallWarning[];\n request?: { body?: string };\n response: {\n timestamp: Date;\n modelId: string;\n };\n }> {\n const warnings: TranscriptionModelV2CallWarning[] = [];\n const g = await this.ensureLoaded();\n\n // Convert audio to Uint8Array\n let audioData: Uint8Array;\n if (typeof options.audio === \"string\") {\n // Base64 encoded - decode it\n const binaryString = atob(options.audio);\n audioData = new Uint8Array(binaryString.length);\n for (let i = 0; i < binaryString.length; i++) {\n audioData[i] = binaryString.charCodeAt(i);\n }\n } else {\n audioData = options.audio;\n }\n\n // Check media type - we only support WAV natively\n const mediaType = options.mediaType?.toLowerCase() || \"\";\n if (mediaType && !mediaType.includes(\"wav\") && !mediaType.includes(\"wave\")) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"mediaType\",\n details: `Gerbil STT natively supports WAV format. Got \"${options.mediaType}\". Audio may not decode correctly.`,\n });\n }\n\n // Determine language from provider options or settings\n const providerOpts = options.providerOptions?.gerbil as Record<string, unknown> | undefined;\n const language = (providerOpts?.language as string) || this.settings.language;\n\n // Transcribe with timestamps to get segments\n const result = await g.transcribe(audioData, {\n language,\n timestamps: true,\n });\n\n // Map segments to V2 format\n const segments = (result.segments || []).map((seg) => ({\n text: seg.text,\n startSecond: seg.start,\n endSecond: seg.end,\n }));\n\n return {\n text: result.text,\n segments,\n language: result.language,\n durationInSeconds: result.duration,\n warnings,\n request: {\n body: JSON.stringify({\n model: this.modelId,\n mediaType: options.mediaType,\n language,\n }),\n },\n response: {\n timestamp: new Date(),\n modelId: this.modelId,\n },\n };\n }\n}\n\n// ============================================\n// Embedding Model Implementation (V2 Spec)\n// ============================================\n\n/** Settings for Gerbil embedding model */\nexport interface GerbilEmbeddingSettings {\n /** Normalize embeddings (default: true) */\n normalize?: boolean;\n}\n\n/** Default embedding models */\nexport const EMBEDDING_MODELS = [\n {\n id: \"all-MiniLM-L6-v2\",\n repo: \"Xenova/all-MiniLM-L6-v2\",\n description: \"MiniLM L6 v2 - Fast, 384 dimensions\",\n dimensions: 384,\n },\n {\n id: \"bge-small-en-v1.5\",\n repo: \"Xenova/bge-small-en-v1.5\",\n description: \"BGE Small EN v1.5 - High quality, 384 dimensions\",\n dimensions: 384,\n },\n {\n id: \"gte-small\",\n repo: \"Xenova/gte-small\",\n description: \"GTE Small - General text embeddings, 384 dimensions\",\n dimensions: 384,\n },\n];\n\nclass GerbilEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = \"v2\" as const;\n readonly provider = \"gerbil\";\n readonly modelId: string;\n readonly maxEmbeddingsPerCall = Infinity;\n readonly supportsParallelCalls = false;\n\n private instance: Gerbil | null = null;\n private readonly settings: GerbilEmbeddingSettings;\n private loadPromise: Promise<void> | null = null;\n\n constructor(modelId: string, settings: GerbilEmbeddingSettings = {}) {\n this.modelId = modelId;\n this.settings = settings;\n }\n\n private getRepo(): string {\n const model = EMBEDDING_MODELS.find((m) => m.id === this.modelId);\n return model?.repo || this.modelId;\n }\n\n private async ensureLoaded(): Promise<Gerbil> {\n if (this.instance) {\n return this.instance;\n }\n if (this.loadPromise) {\n await this.loadPromise;\n return this.instance!;\n }\n\n this.instance = new Gerbil();\n // Embedding models load lazily on first embed() call\n return this.instance;\n }\n\n async doEmbed(options: { values: string[]; abortSignal?: AbortSignal }): Promise<{\n embeddings: number[][];\n usage?: { tokens: number };\n }> {\n const g = await this.ensureLoaded();\n\n const embeddings: number[][] = [];\n let totalTokens = 0;\n\n for (const value of options.values) {\n if (options.abortSignal?.aborted) {\n throw new Error(\"Embedding aborted\");\n }\n\n const result = await g.embed(value, {\n model: this.getRepo(),\n normalize: this.settings.normalize,\n });\n\n embeddings.push(result.vector);\n // Approximate token count (1 token ≈ 4 chars)\n totalTokens += Math.ceil(value.length / 4);\n }\n\n return {\n embeddings,\n usage: { tokens: totalTokens },\n };\n }\n}\n\n// ============================================\n// Provider Factory\n// ============================================\n\nexport type GerbilProvider = {\n (modelId: string, settings?: GerbilModelSettings): GerbilLanguageModel;\n languageModel(modelId: string, settings?: GerbilModelSettings): GerbilLanguageModel;\n speech(modelId?: string, settings?: GerbilSpeechSettings): GerbilSpeechModel;\n transcription(modelId?: string, settings?: GerbilTranscriptionSettings): GerbilTranscriptionModel;\n embedding(modelId?: string, settings?: GerbilEmbeddingSettings): GerbilEmbeddingModel;\n listModels(): ModelConfig[];\n getModel(modelId: string): ModelConfig | undefined;\n listVoices(): Array<{ id: string; name: string; gender: string; language: string }>;\n listTranscriptionModels(): STTModelConfig[];\n listEmbeddingModels(): typeof EMBEDDING_MODELS;\n /**\n * Preload a model (download without initializing)\n *\n * @param modelId - Model to preload\n * @param options.keepLoaded - Keep model in memory for instant generateText() calls\n */\n preload(\n modelId: string,\n options?: {\n onProgress?: (info: { status: string; progress?: number }) => void;\n keepLoaded?: boolean;\n },\n ): Promise<void>;\n /** Check if a model is cached */\n isCached(modelId: string): Promise<boolean>;\n};\n\n/**\n * Create a Gerbil provider\n *\n * @example\n * ```ts\n * const local = createGerbil({ device: \"gpu\", dtype: \"q4\" });\n *\n * // Text generation\n * const { text } = await generateText({\n * model: local(\"qwen3-0.6b\"),\n * prompt: \"Hello\",\n * });\n *\n * // Speech generation\n * const audio = await generateSpeech({\n * model: local.speech(),\n * text: \"Hello world!\",\n * voice: \"af_heart\",\n * });\n *\n * // Transcription\n * const transcript = await transcribe({\n * model: local.transcription(),\n * audio: audioBuffer,\n * });\n * ```\n */\nexport function createGerbil(options: GerbilProviderSettings = {}): GerbilProvider {\n // Cache model instances so preload + generateText share the same loaded model\n const modelCache = new Map<string, GerbilLanguageModel>();\n\n const createModel = (modelId: string, settings: GerbilModelSettings = {}) => {\n // Return cached instance if available (for keepLoaded support)\n const cached = modelCache.get(modelId);\n if (cached) return cached;\n\n return new GerbilLanguageModel(modelId, settings, options);\n };\n\n const createSpeechModel = (modelId = \"kokoro-82m\", settings: GerbilSpeechSettings = {}) =>\n new GerbilSpeechModel(modelId, settings);\n\n const createTranscriptionModel = (\n modelId = \"whisper-tiny.en\",\n settings: GerbilTranscriptionSettings = {},\n ) => new GerbilTranscriptionModel(modelId, settings);\n\n const createEmbeddingModel = (\n modelId = \"all-MiniLM-L6-v2\",\n settings: GerbilEmbeddingSettings = {},\n ) => new GerbilEmbeddingModel(modelId, settings);\n\n const provider = ((modelId: string, settings?: GerbilModelSettings) =>\n createModel(modelId, settings ?? {})) as GerbilProvider;\n\n provider.languageModel = createModel;\n provider.speech = createSpeechModel;\n provider.transcription = createTranscriptionModel;\n provider.embedding = createEmbeddingModel;\n provider.listModels = () => Object.values(BUILTIN_MODELS);\n provider.getModel = (id: string) => BUILTIN_MODELS[id];\n provider.listVoices = () =>\n KOKORO_VOICES.map((v) => ({\n id: v.id,\n name: v.name,\n gender: v.gender,\n language: v.language,\n }));\n provider.listTranscriptionModels = () => WHISPER_MODELS;\n provider.listEmbeddingModels = () => EMBEDDING_MODELS;\n\n // Preload and cache checking\n provider.preload = async (modelId, opts) => {\n const keepLoaded = (opts as any)?.keepLoaded ?? false;\n\n if (keepLoaded) {\n // Create model instance, load it, and cache for later gerbil() calls\n const model = new GerbilLanguageModel(modelId, {}, options);\n await (model as any).ensureLoaded(); // Trigger load\n modelCache.set(modelId, model);\n } else {\n // Just download to disk cache\n const g = new Gerbil();\n await g.preloadModel(modelId, opts);\n }\n };\n\n provider.isCached = async (modelId) => {\n // Check memory cache first\n if (modelCache.has(modelId)) return true;\n // Then check disk cache\n const g = new Gerbil();\n return g.isModelCached(modelId);\n };\n\n return provider;\n}\n\n/**\n * Default Gerbil provider\n *\n * @example Text Generation\n * ```ts\n * import { generateText } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n *\n * const { text } = await generateText({\n * model: gerbil(\"qwen3-0.6b\"),\n * prompt: \"Hello\",\n * });\n * ```\n *\n * @example Speech Generation\n * ```ts\n * import { experimental_generateSpeech as generateSpeech } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n *\n * const audio = await generateSpeech({\n * model: gerbil.speech(),\n * text: \"Hello world!\",\n * voice: \"af_heart\", // Or \"bf_emma\", \"am_fenrir\", etc.\n * });\n *\n * // Access audio data\n * const audioData = audio.audioData; // Uint8Array (WAV format)\n * ```\n *\n * @example Transcription\n * ```ts\n * import { experimental_transcribe as transcribe } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n * import { readFile } from \"fs/promises\";\n *\n * const transcript = await transcribe({\n * model: gerbil.transcription(), // whisper-tiny.en by default\n * audio: await readFile(\"audio.wav\"),\n * });\n *\n * console.log(transcript.text);\n * console.log(transcript.segments); // Timestamped segments\n * ```\n *\n * @example Embeddings\n * ```ts\n * import { embed, embedMany } from \"ai\";\n * import { gerbil } from \"gerbil/ai\";\n *\n * // Single embedding\n * const { embedding } = await embed({\n * model: gerbil.embedding(), // all-MiniLM-L6-v2 by default\n * value: \"Hello world\",\n * });\n *\n * // Multiple embeddings\n * const { embeddings } = await embedMany({\n * model: gerbil.embedding(),\n * values: [\"Hello\", \"World\", \"How are you?\"],\n * });\n * ```\n */\nexport const gerbil = createGerbil();\n\nexport default gerbil;\n"],"mappings":";;;;;;;AAgDA,IAAI,YAAY;AAChB,SAAS,aAAqB;AAC5B,QAAO,UAAU,KAAK,KAAK,CAAC,GAAI,aAAa;;AAO/C,IAAM,sBAAN,MAAqD;CACnD,AAAS,uBAAuB;CAChC,AAAS,WAAW;CACpB,AAAS;CAGT,AAAS,gBAA0C,EAAE;CAErD,AAAQ,WAA0B;CAClC,AAAiB;CACjB,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YACE,SACA,UACA,kBACA;AACA,OAAK,UAAU;AACf,OAAK,WAAW;AAChB,OAAK,mBAAmB;;CAG1B,MAAc,eAAgC;AAC5C,MAAI,KAAK,UAAU,UAAU,CAC3B,QAAO,KAAK;AAEd,MAAI,KAAK,aAAa;AACpB,SAAM,KAAK;AACX,UAAO,KAAK;;AAGd,OAAK,WAAW,IAAI,QAAQ;AAC5B,OAAK,cAAc,KAAK,SAAS,UAAU,KAAK,SAAS;GACvD,QAAQ,KAAK,SAAS,UAAU,KAAK,iBAAiB,UAAU;GAChE,OAAO,KAAK,SAAS,SAAS,KAAK,iBAAiB,SAAS;GAC9D,CAAC;AACF,QAAM,KAAK;AACX,SAAO,KAAK;;CAGd,AAAQ,cAAc,QAIpB;EACA,IAAIA;EACJ,IAAI,OAAO;EACX,MAAMC,SAAuB,EAAE;AAE/B,OAAK,MAAM,OAAO,OAChB,KAAI,IAAI,SAAS,SACf,UAAS,IAAI;WACJ,IAAI,SAAS,QACtB;QAAK,MAAM,QAAQ,IAAI,QACrB,KAAI,KAAK,SAAS,OAChB,SAAQ,KAAK;YACH,KAAa,SAAS,SAAS;IAEzC,MAAM,UAAU;AAChB,QAAI,QAAQ,iBAAiB,IAC3B,QAAO,KAAK,EAAE,QAAQ,QAAQ,MAAM,UAAU,EAAE,CAAC;aACxC,OAAO,QAAQ,UAAU,SAElC,QAAO,KAAK,EAAE,QAAQ,QAAQ,OAAO,CAAC;aAC7B,QAAQ,iBAAiB,YAAY;KAE9C,MAAM,SAAS,KAAK,OAAO,aAAa,GAAG,QAAQ,MAAM,CAAC;KAC1D,MAAM,WAAW,QAAQ,YAAY;AACrC,YAAO,KAAK,EAAE,QAAQ,QAAQ,SAAS,UAAU,UAAU,CAAC;;;aAIzD,IAAI,SAAS,aACtB;QAAK,MAAM,QAAQ,IAAI,QACrB,KAAI,KAAK,SAAS,OAChB,SAAQ,kBAAkB,KAAK;aAG1B,IAAI,SAAS,OACtB,MAAK,MAAM,QAAQ,IAAI,QACrB,SAAQ,aAAa,KAAK,SAAS,KAAK,KAAK,UAAU,KAAK;AAKlE,SAAO;GAAE;GAAQ;GAAM;GAAQ;;CAGjC,AAAQ,gBAAgB,QAA6C;AACnE,MAAI,WAAW,OACb,QAAO;AAET,MAAI,WAAW,SACb,QAAO;AAET,MAAI,WAAW,QACb,QAAO;AAET,SAAO;;CAGT,MAAM,WAAW,SAAqC;EACpD,MAAMC,WAAyC,EAAE;EACjD,MAAM,IAAI,MAAM,KAAK,cAAc;EACnC,MAAM,EAAE,QAAQ,MAAM,WAAW,KAAK,cAAc,QAAQ,OAAO;EAEnE,MAAM,SAAS,MAAM,EAAE,SAAS,MAAM;GACpC,WAAW,QAAQ;GACnB,aAAa,QAAQ;GACrB,MAAM,QAAQ;GACd,MAAM,QAAQ;GACd;GACA,UAAU,KAAK,SAAS;GACxB,eAAe,QAAQ;GACvB,QAAQ,OAAO,SAAS,IAAI,SAAS;GACtC,CAAC;EAGF,MAAMC,UAAoC,EAAE;AAG5C,MAAI,OAAO,SACT,SAAQ,KAAK;GACX,MAAM;GACN,MAAM,OAAO;GACd,CAAC;AAIJ,UAAQ,KAAK;GACX,MAAM;GACN,MAAM,OAAO;GACd,CAAC;EAEF,MAAMC,QAA8B;GAClC,aAAa;GACb,cAAc,OAAO;GACrB,aAAa,OAAO;GACrB;AAED,SAAO;GACL;GACA,cAAc,KAAK,gBAAgB,OAAO,aAAa;GACvD;GACA,SAAS,EAAE,MAAM;IAAE,OAAO,KAAK;IAAS,QAAQ;IAAM,EAAE;GACxD;GACD;;CAGH,MAAM,SAAS,SAAqC;EAClD,MAAMF,WAAyC,EAAE;EACjD,MAAM,IAAI,MAAM,KAAK,cAAc;EACnC,MAAM,EAAE,QAAQ,MAAM,WAAW,KAAK,cAAc,QAAQ,OAAO;EAEnE,MAAM,YAAY,EAAE,OAAO,MAAM;GAC/B,WAAW,QAAQ;GACnB,aAAa,QAAQ;GACrB,MAAM,QAAQ;GACd,MAAM,QAAQ;GACd;GACA,UAAU,KAAK,SAAS;GACxB,eAAe,QAAQ;GACvB,QAAQ,OAAO,SAAS,IAAI,SAAS;GACtC,CAAC;EAEF,IAAI,SAAS;EACb,MAAM,SAAS,YAAY;AAmD3B,SAAO;GACL,QAlDa,IAAI,eAA0C,EAC3D,MAAM,MAAM,YAAY;AACtB,QAAI;AAEF,gBAAW,QAAQ;MACjB,MAAM;MACN;MACD,CAAC;AAGF,gBAAW,QAAQ;MACjB,MAAM;MACN,IAAI;MACL,CAAC;AAEF,gBAAW,MAAM,SAAS,WAAW;AACnC,gBAAU;AAEV,iBAAW,QAAQ;OACjB,MAAM;OACN,IAAI;OACJ,OAAO;OACR,CAAC;;AAIJ,gBAAW,QAAQ;MACjB,MAAM;MACN,IAAI;MACL,CAAC;AAGF,gBAAW,QAAQ;MACjB,MAAM;MACN,cAAc;MACd,OAAO;OACL,aAAa;OACb,cAAc;OACd,aAAa;OACd;MACF,CAAC;AACF,gBAAW,OAAO;aACX,OAAO;AACd,gBAAW,QAAQ;MAAE,MAAM;MAAS;MAAO,CAAC;AAC5C,gBAAW,OAAO;;MAGvB,CAAC;GAIA,SAAS,EAAE,MAAM;IAAE,OAAO,KAAK;IAAS,QAAQ;IAAM,EAAE;GACzD;;;AAgBL,IAAM,oBAAN,MAAiD;CAC/C,AAAS,uBAAuB;CAChC,AAAS,WAAW;CACpB,AAAS;CAET,AAAQ,WAA0B;CAClC,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YAAY,SAAiB,WAAiC,EAAE,EAAE;AAChE,OAAK,UAAU;AACf,OAAK,WAAW;;CAGlB,MAAc,eAAgC;AAC5C,MAAI,KAAK,UAAU,aAAa,CAC9B,QAAO,KAAK;AAEd,MAAI,KAAK,aAAa;AACpB,SAAM,KAAK;AACX,UAAO,KAAK;;AAGd,OAAK,WAAW,IAAI,QAAQ;AAC5B,OAAK,cAAc,KAAK,SAAS,iBAAiB;AAClD,QAAM,KAAK;AACX,SAAO,KAAK;;CAGd,MAAM,WAAW,SAKd;EACD,MAAMG,WAAuC,EAAE;EAC/C,MAAM,IAAI,MAAM,KAAK,cAAc;EAGnC,IAAI,QAAQ,QAAQ,SAAS,KAAK,SAAS,SAAS;AAIpD,MAAI,CADe,cAAc,MAAM,MAAM,EAAE,OAAO,MAAM,EAC3C;AACf,YAAS,KAAK;IACZ,MAAM;IACN,SAAS;IACT,SAAS,kBAAkB,MAAM;IAClC,CAAC;AACF,WAAQ;;EAIV,MAAM,QAAQ,QAAQ,SAAS,KAAK,SAAS,SAAS;AAGtD,MAAI,QAAQ,gBAAgB,QAAQ,iBAAiB,SAAS,QAAQ,iBAAiB,MACrF,UAAS,KAAK;GACZ,MAAM;GACN,SAAS;GACT,SAAS,0DAA0D,QAAQ,aAAa;GACzF,CAAC;AAGJ,MAAI,QAAQ,aACV,UAAS,KAAK;GACZ,MAAM;GACN,SAAS;GACT,SAAS;GACV,CAAC;EAIJ,MAAM,SAAS,MAAM,EAAE,MAAM,QAAQ,MAAM;GAAE;GAAO;GAAO,CAAC;AAK5D,SAAO;GACL,OAHgB,KAAK,aAAa,OAAO,OAAO,OAAO,WAAW;GAIlE;GACA,SAAS,EAAE,MAAM;IAAE,MAAM,QAAQ;IAAM;IAAO;IAAO,EAAE;GACvD,UAAU;IACR,2BAAW,IAAI,MAAM;IACrB,SAAS,KAAK;IACf;GACF;;;;;CAMH,AAAQ,aAAa,OAAqB,YAAgC;EACxE,MAAM,yBAAS,IAAI,YAAY,KAAK,MAAM,SAAS,EAAE;EACrD,MAAM,OAAO,IAAI,SAAS,OAAO;EAGjC,MAAM,eAAe,QAAgB,QAAgB;AACnD,QAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,MAAK,SAAS,SAAS,GAAG,IAAI,WAAW,EAAE,CAAC;;AAIhD,cAAY,GAAG,OAAO;AACtB,OAAK,UAAU,GAAG,KAAK,MAAM,SAAS,GAAG,KAAK;AAC9C,cAAY,GAAG,OAAO;AACtB,cAAY,IAAI,OAAO;AACvB,OAAK,UAAU,IAAI,IAAI,KAAK;AAC5B,OAAK,UAAU,IAAI,GAAG,KAAK;AAC3B,OAAK,UAAU,IAAI,GAAG,KAAK;AAC3B,OAAK,UAAU,IAAI,YAAY,KAAK;AACpC,OAAK,UAAU,IAAI,aAAa,GAAG,KAAK;AACxC,OAAK,UAAU,IAAI,GAAG,KAAK;AAC3B,OAAK,UAAU,IAAI,IAAI,KAAK;AAC5B,cAAY,IAAI,OAAO;AACvB,OAAK,UAAU,IAAI,MAAM,SAAS,GAAG,KAAK;AAG1C,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,CAAC;AAC7C,QAAK,SAAS,KAAK,IAAI,GAAG,KAAK,MAAM,IAAI,MAAM,EAAE,KAAK;;AAGxD,SAAO,IAAI,WAAW,OAAO;;;AAcjC,IAAM,2BAAN,MAA+D;CAC7D,AAAS,uBAAuB;CAChC,AAAS,WAAW;CACpB,AAAS;CAET,AAAQ,WAA0B;CAClC,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YAAY,SAAiB,WAAwC,EAAE,EAAE;AACvE,OAAK,UAAU;AACf,OAAK,WAAW;;CAGlB,MAAc,eAAgC;AAC5C,MAAI,KAAK,UAAU,aAAa,CAC9B,QAAO,KAAK;AAEd,MAAI,KAAK,aAAa;AACpB,SAAM,KAAK;AACX,UAAO,KAAK;;AAGd,OAAK,WAAW,IAAI,QAAQ;AAC5B,OAAK,cAAc,KAAK,SAAS,QAAQ,KAAK,QAAQ;AACtD,QAAM,KAAK;AACX,SAAO,KAAK;;CAGd,MAAM,WAAW,SAed;EACD,MAAMC,WAA8C,EAAE;EACtD,MAAM,IAAI,MAAM,KAAK,cAAc;EAGnC,IAAIC;AACJ,MAAI,OAAO,QAAQ,UAAU,UAAU;GAErC,MAAM,eAAe,KAAK,QAAQ,MAAM;AACxC,eAAY,IAAI,WAAW,aAAa,OAAO;AAC/C,QAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,WAAU,KAAK,aAAa,WAAW,EAAE;QAG3C,aAAY,QAAQ;EAItB,MAAM,YAAY,QAAQ,WAAW,aAAa,IAAI;AACtD,MAAI,aAAa,CAAC,UAAU,SAAS,MAAM,IAAI,CAAC,UAAU,SAAS,OAAO,CACxE,UAAS,KAAK;GACZ,MAAM;GACN,SAAS;GACT,SAAS,iDAAiD,QAAQ,UAAU;GAC7E,CAAC;EAKJ,MAAM,YADe,QAAQ,iBAAiB,SACd,YAAuB,KAAK,SAAS;EAGrE,MAAM,SAAS,MAAM,EAAE,WAAW,WAAW;GAC3C;GACA,YAAY;GACb,CAAC;EAGF,MAAM,YAAY,OAAO,YAAY,EAAE,EAAE,KAAK,SAAS;GACrD,MAAM,IAAI;GACV,aAAa,IAAI;GACjB,WAAW,IAAI;GAChB,EAAE;AAEH,SAAO;GACL,MAAM,OAAO;GACb;GACA,UAAU,OAAO;GACjB,mBAAmB,OAAO;GAC1B;GACA,SAAS,EACP,MAAM,KAAK,UAAU;IACnB,OAAO,KAAK;IACZ,WAAW,QAAQ;IACnB;IACD,CAAC,EACH;GACD,UAAU;IACR,2BAAW,IAAI,MAAM;IACrB,SAAS,KAAK;IACf;GACF;;;;AAeL,MAAa,mBAAmB;CAC9B;EACE,IAAI;EACJ,MAAM;EACN,aAAa;EACb,YAAY;EACb;CACD;EACE,IAAI;EACJ,MAAM;EACN,aAAa;EACb,YAAY;EACb;CACD;EACE,IAAI;EACJ,MAAM;EACN,aAAa;EACb,YAAY;EACb;CACF;AAED,IAAM,uBAAN,MAA+D;CAC7D,AAAS,uBAAuB;CAChC,AAAS,WAAW;CACpB,AAAS;CACT,AAAS,uBAAuB;CAChC,AAAS,wBAAwB;CAEjC,AAAQ,WAA0B;CAClC,AAAiB;CACjB,AAAQ,cAAoC;CAE5C,YAAY,SAAiB,WAAoC,EAAE,EAAE;AACnE,OAAK,UAAU;AACf,OAAK,WAAW;;CAGlB,AAAQ,UAAkB;AAExB,SADc,iBAAiB,MAAM,MAAM,EAAE,OAAO,KAAK,QAAQ,EACnD,QAAQ,KAAK;;CAG7B,MAAc,eAAgC;AAC5C,MAAI,KAAK,SACP,QAAO,KAAK;AAEd,MAAI,KAAK,aAAa;AACpB,SAAM,KAAK;AACX,UAAO,KAAK;;AAGd,OAAK,WAAW,IAAI,QAAQ;AAE5B,SAAO,KAAK;;CAGd,MAAM,QAAQ,SAGX;EACD,MAAM,IAAI,MAAM,KAAK,cAAc;EAEnC,MAAMC,aAAyB,EAAE;EACjC,IAAI,cAAc;AAElB,OAAK,MAAM,SAAS,QAAQ,QAAQ;AAClC,OAAI,QAAQ,aAAa,QACvB,OAAM,IAAI,MAAM,oBAAoB;GAGtC,MAAM,SAAS,MAAM,EAAE,MAAM,OAAO;IAClC,OAAO,KAAK,SAAS;IACrB,WAAW,KAAK,SAAS;IAC1B,CAAC;AAEF,cAAW,KAAK,OAAO,OAAO;AAE9B,kBAAe,KAAK,KAAK,MAAM,SAAS,EAAE;;AAG5C,SAAO;GACL;GACA,OAAO,EAAE,QAAQ,aAAa;GAC/B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+DL,SAAgB,aAAa,UAAkC,EAAE,EAAkB;CAEjF,MAAM,6BAAa,IAAI,KAAkC;CAEzD,MAAM,eAAe,SAAiB,WAAgC,EAAE,KAAK;EAE3E,MAAM,SAAS,WAAW,IAAI,QAAQ;AACtC,MAAI,OAAQ,QAAO;AAEnB,SAAO,IAAI,oBAAoB,SAAS,UAAU,QAAQ;;CAG5D,MAAM,qBAAqB,UAAU,cAAc,WAAiC,EAAE,KACpF,IAAI,kBAAkB,SAAS,SAAS;CAE1C,MAAM,4BACJ,UAAU,mBACV,WAAwC,EAAE,KACvC,IAAI,yBAAyB,SAAS,SAAS;CAEpD,MAAM,wBACJ,UAAU,oBACV,WAAoC,EAAE,KACnC,IAAI,qBAAqB,SAAS,SAAS;CAEhD,MAAM,aAAa,SAAiB,aAClC,YAAY,SAAS,YAAY,EAAE,CAAC;AAEtC,UAAS,gBAAgB;AACzB,UAAS,SAAS;AAClB,UAAS,gBAAgB;AACzB,UAAS,YAAY;AACrB,UAAS,mBAAmB,OAAO,OAAO,eAAe;AACzD,UAAS,YAAY,OAAe,eAAe;AACnD,UAAS,mBACP,cAAc,KAAK,OAAO;EACxB,IAAI,EAAE;EACN,MAAM,EAAE;EACR,QAAQ,EAAE;EACV,UAAU,EAAE;EACb,EAAE;AACL,UAAS,gCAAgC;AACzC,UAAS,4BAA4B;AAGrC,UAAS,UAAU,OAAO,SAAS,SAAS;AAG1C,MAFoB,MAAc,cAAc,OAEhC;GAEd,MAAM,QAAQ,IAAI,oBAAoB,SAAS,EAAE,EAAE,QAAQ;AAC3D,SAAO,MAAc,cAAc;AACnC,cAAW,IAAI,SAAS,MAAM;QAI9B,OADU,IAAI,QAAQ,CACd,aAAa,SAAS,KAAK;;AAIvC,UAAS,WAAW,OAAO,YAAY;AAErC,MAAI,WAAW,IAAI,QAAQ,CAAE,QAAO;AAGpC,SADU,IAAI,QAAQ,CACb,cAAc,QAAQ;;AAGjC,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiET,MAAa,SAAS,cAAc;AAEpC,qBAAe"}
@@ -1,4 +1,4 @@
1
- import { O as TranscribeOptions, S as SpeakOptions, c as GerbilConfig, d as ImageInput, o as GenerateOptions } from "../types-CiTc7ez3.mjs";
1
+ import { T as SpeakOptions, c as GerbilConfig, d as ImageInput, j as TranscribeOptions, o as GenerateOptions } from "../types-evP8RShr.mjs";
2
2
 
3
3
  //#region src/integrations/langchain.d.ts
4
4
 
@@ -1,4 +1,5 @@
1
- import { t as Gerbil } from "../gerbil-DoDGHe6Z.mjs";
1
+ import { t as Gerbil } from "../gerbil-BZklpDhM.mjs";
2
+ import "../chrome-backend-CORwaIyC.mjs";
2
3
  import "../utils-CZBZ8dgR.mjs";
3
4
 
4
5
  //#region src/integrations/langchain.ts
@@ -1 +1 @@
1
- {"version":3,"file":"langchain.mjs","names":["instance: Gerbil | null","loadPromise: Promise<void> | null","results: string[]"],"sources":["../../src/integrations/langchain.ts"],"sourcesContent":["/**\n * Gerbil LangChain Integration\n *\n * @example Text Generation\n * ```ts\n * import { GerbilLLM } from \"gerbil/langchain\";\n * const llm = new GerbilLLM({ model: \"qwen3-0.6b\" });\n * const result = await llm.invoke(\"Hello!\");\n * ```\n *\n * @example Embeddings\n * ```ts\n * import { GerbilEmbeddings } from \"gerbil/langchain\";\n * const embeddings = new GerbilEmbeddings();\n * const vectors = await embeddings.embedDocuments([\"Hello\", \"World\"]);\n * ```\n *\n * @example Text-to-Speech\n * ```ts\n * import { GerbilTTS } from \"gerbil/langchain\";\n * const tts = new GerbilTTS({ voice: \"af_heart\" });\n * const audio = await tts.speak(\"Hello world!\");\n * ```\n *\n * @example Speech-to-Text\n * ```ts\n * import { GerbilSTT } from \"gerbil/langchain\";\n * const stt = new GerbilSTT({ model: \"whisper-tiny.en\" });\n * const text = await stt.transcribe(audioBuffer);\n * ```\n */\n\nimport { Gerbil } from \"../core/gerbil.js\";\nimport type {\n GenerateOptions,\n GerbilConfig,\n ImageInput,\n SpeakOptions,\n TranscribeOptions,\n} from \"../core/types.js\";\n\n// Singleton Gerbil instance\nlet instance: Gerbil | null = null;\nlet loadPromise: Promise<void> | null = null;\n\nasync function getInstance(model: string): Promise<Gerbil> {\n if (!instance) {\n instance = new Gerbil();\n }\n if (!(instance.isLoaded() || loadPromise)) {\n loadPromise = instance.loadModel(model);\n }\n if (loadPromise) {\n await loadPromise;\n loadPromise = null;\n }\n return instance;\n}\n\n/**\n * Gerbil LLM for LangChain\n *\n * Supports text generation with optional vision (images) input.\n */\nexport class GerbilLLM {\n private readonly model: string;\n private readonly options: GenerateOptions;\n\n constructor(config: GerbilConfig & GenerateOptions = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n this.options = config;\n }\n\n get _llmType(): string {\n return \"gerbil\";\n }\n\n /**\n * Generate text from a prompt\n */\n async invoke(prompt: string, options?: GenerateOptions): Promise<string> {\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, { ...this.options, ...options });\n return result.text;\n }\n\n /**\n * Generate text with images (vision)\n *\n * @example\n * ```ts\n * const llm = new GerbilLLM({ model: \"ministral-3b\" });\n * const result = await llm.invokeWithImages(\"Describe this image\", [\n * { source: \"https://example.com/photo.jpg\" }\n * ]);\n * ```\n */\n async invokeWithImages(\n prompt: string,\n images: ImageInput[],\n options?: GenerateOptions,\n ): Promise<string> {\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, { ...this.options, ...options, images });\n return result.text;\n }\n\n /**\n * Stream text generation\n */\n async *stream(prompt: string, options?: GenerateOptions): AsyncGenerator<string> {\n const g = await getInstance(this.model);\n yield* g.stream(prompt, { ...this.options, ...options });\n }\n\n /**\n * Batch generate text for multiple prompts\n */\n async batch(prompts: string[], options?: GenerateOptions): Promise<string[]> {\n const results: string[] = [];\n for (const prompt of prompts) {\n results.push(await this.invoke(prompt, options));\n }\n return results;\n }\n\n /**\n * Check if the model supports vision input\n */\n async supportsVision(): Promise<boolean> {\n const g = await getInstance(this.model);\n return g.supportsVision();\n }\n\n // LangChain compatibility methods\n async call(prompt: string): Promise<string> {\n return this.invoke(prompt);\n }\n\n async predict(text: string): Promise<string> {\n return this.invoke(text);\n }\n\n async generate(prompts: string[]): Promise<{ generations: { text: string }[][] }> {\n const results = await this.batch(prompts);\n return {\n generations: results.map((text) => [{ text }]),\n };\n }\n}\n\n/**\n * Gerbil Embeddings for LangChain\n */\nexport class GerbilEmbeddings {\n private readonly model: string;\n\n constructor(config: { model?: string } = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n }\n\n async embedQuery(text: string): Promise<number[]> {\n const g = await getInstance(this.model);\n const result = await g.embed(text);\n return result.vector;\n }\n\n async embedDocuments(documents: string[]): Promise<number[][]> {\n const g = await getInstance(this.model);\n const results = await g.embedBatch(documents);\n return results.map((r) => r.vector);\n }\n}\n\n/**\n * Gerbil Text-to-Speech for LangChain\n *\n * Note: LangChain doesn't have a standard TTS interface, so this is a\n * utility class for use in LangChain pipelines.\n *\n * @example\n * ```ts\n * import { GerbilTTS } from \"gerbil/langchain\";\n *\n * const tts = new GerbilTTS({ voice: \"af_heart\" });\n * const result = await tts.speak(\"Hello world!\");\n * // result.audio = Float32Array, result.sampleRate = 24000\n *\n * // List voices\n * const voices = await tts.listVoices();\n * ```\n */\nexport class GerbilTTS {\n private readonly voice: string;\n private readonly speed: number;\n private gerbil: Gerbil | null = null;\n\n constructor(config: { voice?: string; speed?: number } = {}) {\n this.voice = config.voice || \"af_heart\";\n this.speed = config.speed || 1.0;\n }\n\n private async getInstance(): Promise<Gerbil> {\n if (!this.gerbil) {\n this.gerbil = new Gerbil();\n }\n await this.gerbil.ensureTTSLoaded();\n return this.gerbil;\n }\n\n /**\n * Generate speech from text\n */\n async speak(\n text: string,\n options?: SpeakOptions,\n ): Promise<{ audio: Float32Array; sampleRate: number; duration: number }> {\n const g = await this.getInstance();\n const result = await g.speak(text, {\n voice: options?.voice || this.voice,\n speed: options?.speed || this.speed,\n });\n return {\n audio: result.audio,\n sampleRate: result.sampleRate,\n duration: result.duration,\n };\n }\n\n /**\n * Stream speech generation for long text\n */\n async *speakStream(\n text: string,\n options?: SpeakOptions,\n ): AsyncGenerator<{ samples: Float32Array; index: number; isFinal: boolean }> {\n const g = await this.getInstance();\n yield* g.speakStream(text, {\n voice: options?.voice || this.voice,\n speed: options?.speed || this.speed,\n });\n }\n\n /**\n * List available voices\n */\n async listVoices(): Promise<\n Array<{ id: string; name: string; gender: string; language: string }>\n > {\n const g = await this.getInstance();\n return g.listVoices();\n }\n}\n\n/**\n * Gerbil Speech-to-Text for LangChain\n *\n * Note: LangChain doesn't have a standard STT interface, so this is a\n * utility class for use in LangChain pipelines.\n *\n * @example\n * ```ts\n * import { GerbilSTT } from \"gerbil/langchain\";\n * import { readFileSync } from \"fs\";\n *\n * const stt = new GerbilSTT({ model: \"whisper-tiny.en\" });\n * const audio = new Uint8Array(readFileSync(\"audio.wav\"));\n * const result = await stt.transcribe(audio);\n * console.log(result.text);\n * ```\n */\nexport class GerbilSTT {\n private readonly model: string;\n private gerbil: Gerbil | null = null;\n\n constructor(config: { model?: string } = {}) {\n this.model = config.model || \"whisper-tiny.en\";\n }\n\n private async getInstance(): Promise<Gerbil> {\n if (!this.gerbil) {\n this.gerbil = new Gerbil();\n }\n await this.gerbil.loadSTT(this.model);\n return this.gerbil;\n }\n\n /**\n * Transcribe audio to text\n *\n * @param audio - WAV file as Uint8Array or Float32Array at 16kHz\n */\n async transcribe(\n audio: Uint8Array | Float32Array,\n options?: TranscribeOptions,\n ): Promise<{\n text: string;\n language?: string;\n duration?: number;\n segments?: Array<{ text: string; start: number; end: number }>;\n }> {\n const g = await this.getInstance();\n const result = await g.transcribe(audio, options);\n return {\n text: result.text,\n language: result.language,\n duration: result.duration,\n segments: result.segments,\n };\n }\n\n /**\n * List available STT models\n */\n async listModels(): Promise<Array<{ id: string; size: string; multilingual: boolean }>> {\n const g = await this.getInstance();\n const models = await g.listSTTModels();\n return models.map((m) => ({\n id: m.id,\n size: m.size,\n multilingual: m.multilingual,\n }));\n }\n}\n\nexport default { GerbilLLM, GerbilEmbeddings, GerbilTTS, GerbilSTT };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0CA,IAAIA,WAA0B;AAC9B,IAAIC,cAAoC;AAExC,eAAe,YAAY,OAAgC;AACzD,KAAI,CAAC,SACH,YAAW,IAAI,QAAQ;AAEzB,KAAI,EAAE,SAAS,UAAU,IAAI,aAC3B,eAAc,SAAS,UAAU,MAAM;AAEzC,KAAI,aAAa;AACf,QAAM;AACN,gBAAc;;AAEhB,QAAO;;;;;;;AAQT,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAiB;CAEjB,YAAY,SAAyC,EAAE,EAAE;AACvD,OAAK,QAAQ,OAAO,SAAS;AAC7B,OAAK,UAAU;;CAGjB,IAAI,WAAmB;AACrB,SAAO;;;;;CAMT,MAAM,OAAO,QAAgB,SAA4C;AAGvE,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC,EAC1D;;;;;;;;;;;;;CAchB,MAAM,iBACJ,QACA,QACA,SACiB;AAGjB,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS;GAAQ,CAAC,EAClE;;;;;CAMhB,OAAO,OAAO,QAAgB,SAAmD;AAE/E,UADU,MAAM,YAAY,KAAK,MAAM,EAC9B,OAAO,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC;;;;;CAM1D,MAAM,MAAM,SAAmB,SAA8C;EAC3E,MAAMC,UAAoB,EAAE;AAC5B,OAAK,MAAM,UAAU,QACnB,SAAQ,KAAK,MAAM,KAAK,OAAO,QAAQ,QAAQ,CAAC;AAElD,SAAO;;;;;CAMT,MAAM,iBAAmC;AAEvC,UADU,MAAM,YAAY,KAAK,MAAM,EAC9B,gBAAgB;;CAI3B,MAAM,KAAK,QAAiC;AAC1C,SAAO,KAAK,OAAO,OAAO;;CAG5B,MAAM,QAAQ,MAA+B;AAC3C,SAAO,KAAK,OAAO,KAAK;;CAG1B,MAAM,SAAS,SAAmE;AAEhF,SAAO,EACL,cAFc,MAAM,KAAK,MAAM,QAAQ,EAElB,KAAK,SAAS,CAAC,EAAE,MAAM,CAAC,CAAC,EAC/C;;;;;;AAOL,IAAa,mBAAb,MAA8B;CAC5B,AAAiB;CAEjB,YAAY,SAA6B,EAAE,EAAE;AAC3C,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAM,WAAW,MAAiC;AAGhD,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,MAAM,KAAK,EACpB;;CAGhB,MAAM,eAAe,WAA0C;AAG7D,UADgB,OADN,MAAM,YAAY,KAAK,MAAM,EACf,WAAW,UAAU,EAC9B,KAAK,MAAM,EAAE,OAAO;;;;;;;;;;;;;;;;;;;;;AAsBvC,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAiB;CACjB,AAAQ,SAAwB;CAEhC,YAAY,SAA6C,EAAE,EAAE;AAC3D,OAAK,QAAQ,OAAO,SAAS;AAC7B,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAc,cAA+B;AAC3C,MAAI,CAAC,KAAK,OACR,MAAK,SAAS,IAAI,QAAQ;AAE5B,QAAM,KAAK,OAAO,iBAAiB;AACnC,SAAO,KAAK;;;;;CAMd,MAAM,MACJ,MACA,SACwE;EAExE,MAAM,SAAS,OADL,MAAM,KAAK,aAAa,EACX,MAAM,MAAM;GACjC,OAAO,SAAS,SAAS,KAAK;GAC9B,OAAO,SAAS,SAAS,KAAK;GAC/B,CAAC;AACF,SAAO;GACL,OAAO,OAAO;GACd,YAAY,OAAO;GACnB,UAAU,OAAO;GAClB;;;;;CAMH,OAAO,YACL,MACA,SAC4E;AAE5E,UADU,MAAM,KAAK,aAAa,EACzB,YAAY,MAAM;GACzB,OAAO,SAAS,SAAS,KAAK;GAC9B,OAAO,SAAS,SAAS,KAAK;GAC/B,CAAC;;;;;CAMJ,MAAM,aAEJ;AAEA,UADU,MAAM,KAAK,aAAa,EACzB,YAAY;;;;;;;;;;;;;;;;;;;;AAqBzB,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAQ,SAAwB;CAEhC,YAAY,SAA6B,EAAE,EAAE;AAC3C,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAc,cAA+B;AAC3C,MAAI,CAAC,KAAK,OACR,MAAK,SAAS,IAAI,QAAQ;AAE5B,QAAM,KAAK,OAAO,QAAQ,KAAK,MAAM;AACrC,SAAO,KAAK;;;;;;;CAQd,MAAM,WACJ,OACA,SAMC;EAED,MAAM,SAAS,OADL,MAAM,KAAK,aAAa,EACX,WAAW,OAAO,QAAQ;AACjD,SAAO;GACL,MAAM,OAAO;GACb,UAAU,OAAO;GACjB,UAAU,OAAO;GACjB,UAAU,OAAO;GAClB;;;;;CAMH,MAAM,aAAkF;AAGtF,UADe,OADL,MAAM,KAAK,aAAa,EACX,eAAe,EACxB,KAAK,OAAO;GACxB,IAAI,EAAE;GACN,MAAM,EAAE;GACR,cAAc,EAAE;GACjB,EAAE;;;AAIP,wBAAe;CAAE;CAAW;CAAkB;CAAW;CAAW"}
1
+ {"version":3,"file":"langchain.mjs","names":["instance: Gerbil | null","loadPromise: Promise<void> | null","results: string[]"],"sources":["../../src/integrations/langchain.ts"],"sourcesContent":["/**\n * Gerbil LangChain Integration\n *\n * @example Text Generation\n * ```ts\n * import { GerbilLLM } from \"gerbil/langchain\";\n * const llm = new GerbilLLM({ model: \"qwen3-0.6b\" });\n * const result = await llm.invoke(\"Hello!\");\n * ```\n *\n * @example Embeddings\n * ```ts\n * import { GerbilEmbeddings } from \"gerbil/langchain\";\n * const embeddings = new GerbilEmbeddings();\n * const vectors = await embeddings.embedDocuments([\"Hello\", \"World\"]);\n * ```\n *\n * @example Text-to-Speech\n * ```ts\n * import { GerbilTTS } from \"gerbil/langchain\";\n * const tts = new GerbilTTS({ voice: \"af_heart\" });\n * const audio = await tts.speak(\"Hello world!\");\n * ```\n *\n * @example Speech-to-Text\n * ```ts\n * import { GerbilSTT } from \"gerbil/langchain\";\n * const stt = new GerbilSTT({ model: \"whisper-tiny.en\" });\n * const text = await stt.transcribe(audioBuffer);\n * ```\n */\n\nimport { Gerbil } from \"../core/gerbil.js\";\nimport type {\n GenerateOptions,\n GerbilConfig,\n ImageInput,\n SpeakOptions,\n TranscribeOptions,\n} from \"../core/types.js\";\n\n// Singleton Gerbil instance\nlet instance: Gerbil | null = null;\nlet loadPromise: Promise<void> | null = null;\n\nasync function getInstance(model: string): Promise<Gerbil> {\n if (!instance) {\n instance = new Gerbil();\n }\n if (!(instance.isLoaded() || loadPromise)) {\n loadPromise = instance.loadModel(model);\n }\n if (loadPromise) {\n await loadPromise;\n loadPromise = null;\n }\n return instance;\n}\n\n/**\n * Gerbil LLM for LangChain\n *\n * Supports text generation with optional vision (images) input.\n */\nexport class GerbilLLM {\n private readonly model: string;\n private readonly options: GenerateOptions;\n\n constructor(config: GerbilConfig & GenerateOptions = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n this.options = config;\n }\n\n get _llmType(): string {\n return \"gerbil\";\n }\n\n /**\n * Generate text from a prompt\n */\n async invoke(prompt: string, options?: GenerateOptions): Promise<string> {\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, { ...this.options, ...options });\n return result.text;\n }\n\n /**\n * Generate text with images (vision)\n *\n * @example\n * ```ts\n * const llm = new GerbilLLM({ model: \"ministral-3b\" });\n * const result = await llm.invokeWithImages(\"Describe this image\", [\n * { source: \"https://example.com/photo.jpg\" }\n * ]);\n * ```\n */\n async invokeWithImages(\n prompt: string,\n images: ImageInput[],\n options?: GenerateOptions,\n ): Promise<string> {\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, { ...this.options, ...options, images });\n return result.text;\n }\n\n /**\n * Stream text generation\n */\n async *stream(prompt: string, options?: GenerateOptions): AsyncGenerator<string> {\n const g = await getInstance(this.model);\n yield* g.stream(prompt, { ...this.options, ...options });\n }\n\n /**\n * Batch generate text for multiple prompts\n */\n async batch(prompts: string[], options?: GenerateOptions): Promise<string[]> {\n const results: string[] = [];\n for (const prompt of prompts) {\n results.push(await this.invoke(prompt, options));\n }\n return results;\n }\n\n /**\n * Check if the model supports vision input\n */\n async supportsVision(): Promise<boolean> {\n const g = await getInstance(this.model);\n return g.supportsVision();\n }\n\n // LangChain compatibility methods\n async call(prompt: string): Promise<string> {\n return this.invoke(prompt);\n }\n\n async predict(text: string): Promise<string> {\n return this.invoke(text);\n }\n\n async generate(prompts: string[]): Promise<{ generations: { text: string }[][] }> {\n const results = await this.batch(prompts);\n return {\n generations: results.map((text) => [{ text }]),\n };\n }\n}\n\n/**\n * Gerbil Embeddings for LangChain\n */\nexport class GerbilEmbeddings {\n private readonly model: string;\n\n constructor(config: { model?: string } = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n }\n\n async embedQuery(text: string): Promise<number[]> {\n const g = await getInstance(this.model);\n const result = await g.embed(text);\n return result.vector;\n }\n\n async embedDocuments(documents: string[]): Promise<number[][]> {\n const g = await getInstance(this.model);\n const results = await g.embedBatch(documents);\n return results.map((r) => r.vector);\n }\n}\n\n/**\n * Gerbil Text-to-Speech for LangChain\n *\n * Note: LangChain doesn't have a standard TTS interface, so this is a\n * utility class for use in LangChain pipelines.\n *\n * @example\n * ```ts\n * import { GerbilTTS } from \"gerbil/langchain\";\n *\n * const tts = new GerbilTTS({ voice: \"af_heart\" });\n * const result = await tts.speak(\"Hello world!\");\n * // result.audio = Float32Array, result.sampleRate = 24000\n *\n * // List voices\n * const voices = await tts.listVoices();\n * ```\n */\nexport class GerbilTTS {\n private readonly voice: string;\n private readonly speed: number;\n private gerbil: Gerbil | null = null;\n\n constructor(config: { voice?: string; speed?: number } = {}) {\n this.voice = config.voice || \"af_heart\";\n this.speed = config.speed || 1.0;\n }\n\n private async getInstance(): Promise<Gerbil> {\n if (!this.gerbil) {\n this.gerbil = new Gerbil();\n }\n await this.gerbil.ensureTTSLoaded();\n return this.gerbil;\n }\n\n /**\n * Generate speech from text\n */\n async speak(\n text: string,\n options?: SpeakOptions,\n ): Promise<{ audio: Float32Array; sampleRate: number; duration: number }> {\n const g = await this.getInstance();\n const result = await g.speak(text, {\n voice: options?.voice || this.voice,\n speed: options?.speed || this.speed,\n });\n return {\n audio: result.audio,\n sampleRate: result.sampleRate,\n duration: result.duration,\n };\n }\n\n /**\n * Stream speech generation for long text\n */\n async *speakStream(\n text: string,\n options?: SpeakOptions,\n ): AsyncGenerator<{ samples: Float32Array; index: number; isFinal: boolean }> {\n const g = await this.getInstance();\n yield* g.speakStream(text, {\n voice: options?.voice || this.voice,\n speed: options?.speed || this.speed,\n });\n }\n\n /**\n * List available voices\n */\n async listVoices(): Promise<\n Array<{ id: string; name: string; gender: string; language: string }>\n > {\n const g = await this.getInstance();\n return g.listVoices();\n }\n}\n\n/**\n * Gerbil Speech-to-Text for LangChain\n *\n * Note: LangChain doesn't have a standard STT interface, so this is a\n * utility class for use in LangChain pipelines.\n *\n * @example\n * ```ts\n * import { GerbilSTT } from \"gerbil/langchain\";\n * import { readFileSync } from \"fs\";\n *\n * const stt = new GerbilSTT({ model: \"whisper-tiny.en\" });\n * const audio = new Uint8Array(readFileSync(\"audio.wav\"));\n * const result = await stt.transcribe(audio);\n * console.log(result.text);\n * ```\n */\nexport class GerbilSTT {\n private readonly model: string;\n private gerbil: Gerbil | null = null;\n\n constructor(config: { model?: string } = {}) {\n this.model = config.model || \"whisper-tiny.en\";\n }\n\n private async getInstance(): Promise<Gerbil> {\n if (!this.gerbil) {\n this.gerbil = new Gerbil();\n }\n await this.gerbil.loadSTT(this.model);\n return this.gerbil;\n }\n\n /**\n * Transcribe audio to text\n *\n * @param audio - WAV file as Uint8Array or Float32Array at 16kHz\n */\n async transcribe(\n audio: Uint8Array | Float32Array,\n options?: TranscribeOptions,\n ): Promise<{\n text: string;\n language?: string;\n duration?: number;\n segments?: Array<{ text: string; start: number; end: number }>;\n }> {\n const g = await this.getInstance();\n const result = await g.transcribe(audio, options);\n return {\n text: result.text,\n language: result.language,\n duration: result.duration,\n segments: result.segments,\n };\n }\n\n /**\n * List available STT models\n */\n async listModels(): Promise<Array<{ id: string; size: string; multilingual: boolean }>> {\n const g = await this.getInstance();\n const models = await g.listSTTModels();\n return models.map((m) => ({\n id: m.id,\n size: m.size,\n multilingual: m.multilingual,\n }));\n }\n}\n\nexport default { GerbilLLM, GerbilEmbeddings, GerbilTTS, GerbilSTT };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0CA,IAAIA,WAA0B;AAC9B,IAAIC,cAAoC;AAExC,eAAe,YAAY,OAAgC;AACzD,KAAI,CAAC,SACH,YAAW,IAAI,QAAQ;AAEzB,KAAI,EAAE,SAAS,UAAU,IAAI,aAC3B,eAAc,SAAS,UAAU,MAAM;AAEzC,KAAI,aAAa;AACf,QAAM;AACN,gBAAc;;AAEhB,QAAO;;;;;;;AAQT,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAiB;CAEjB,YAAY,SAAyC,EAAE,EAAE;AACvD,OAAK,QAAQ,OAAO,SAAS;AAC7B,OAAK,UAAU;;CAGjB,IAAI,WAAmB;AACrB,SAAO;;;;;CAMT,MAAM,OAAO,QAAgB,SAA4C;AAGvE,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC,EAC1D;;;;;;;;;;;;;CAchB,MAAM,iBACJ,QACA,QACA,SACiB;AAGjB,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS;GAAQ,CAAC,EAClE;;;;;CAMhB,OAAO,OAAO,QAAgB,SAAmD;AAE/E,UADU,MAAM,YAAY,KAAK,MAAM,EAC9B,OAAO,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC;;;;;CAM1D,MAAM,MAAM,SAAmB,SAA8C;EAC3E,MAAMC,UAAoB,EAAE;AAC5B,OAAK,MAAM,UAAU,QACnB,SAAQ,KAAK,MAAM,KAAK,OAAO,QAAQ,QAAQ,CAAC;AAElD,SAAO;;;;;CAMT,MAAM,iBAAmC;AAEvC,UADU,MAAM,YAAY,KAAK,MAAM,EAC9B,gBAAgB;;CAI3B,MAAM,KAAK,QAAiC;AAC1C,SAAO,KAAK,OAAO,OAAO;;CAG5B,MAAM,QAAQ,MAA+B;AAC3C,SAAO,KAAK,OAAO,KAAK;;CAG1B,MAAM,SAAS,SAAmE;AAEhF,SAAO,EACL,cAFc,MAAM,KAAK,MAAM,QAAQ,EAElB,KAAK,SAAS,CAAC,EAAE,MAAM,CAAC,CAAC,EAC/C;;;;;;AAOL,IAAa,mBAAb,MAA8B;CAC5B,AAAiB;CAEjB,YAAY,SAA6B,EAAE,EAAE;AAC3C,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAM,WAAW,MAAiC;AAGhD,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,MAAM,KAAK,EACpB;;CAGhB,MAAM,eAAe,WAA0C;AAG7D,UADgB,OADN,MAAM,YAAY,KAAK,MAAM,EACf,WAAW,UAAU,EAC9B,KAAK,MAAM,EAAE,OAAO;;;;;;;;;;;;;;;;;;;;;AAsBvC,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAiB;CACjB,AAAQ,SAAwB;CAEhC,YAAY,SAA6C,EAAE,EAAE;AAC3D,OAAK,QAAQ,OAAO,SAAS;AAC7B,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAc,cAA+B;AAC3C,MAAI,CAAC,KAAK,OACR,MAAK,SAAS,IAAI,QAAQ;AAE5B,QAAM,KAAK,OAAO,iBAAiB;AACnC,SAAO,KAAK;;;;;CAMd,MAAM,MACJ,MACA,SACwE;EAExE,MAAM,SAAS,OADL,MAAM,KAAK,aAAa,EACX,MAAM,MAAM;GACjC,OAAO,SAAS,SAAS,KAAK;GAC9B,OAAO,SAAS,SAAS,KAAK;GAC/B,CAAC;AACF,SAAO;GACL,OAAO,OAAO;GACd,YAAY,OAAO;GACnB,UAAU,OAAO;GAClB;;;;;CAMH,OAAO,YACL,MACA,SAC4E;AAE5E,UADU,MAAM,KAAK,aAAa,EACzB,YAAY,MAAM;GACzB,OAAO,SAAS,SAAS,KAAK;GAC9B,OAAO,SAAS,SAAS,KAAK;GAC/B,CAAC;;;;;CAMJ,MAAM,aAEJ;AAEA,UADU,MAAM,KAAK,aAAa,EACzB,YAAY;;;;;;;;;;;;;;;;;;;;AAqBzB,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAQ,SAAwB;CAEhC,YAAY,SAA6B,EAAE,EAAE;AAC3C,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAc,cAA+B;AAC3C,MAAI,CAAC,KAAK,OACR,MAAK,SAAS,IAAI,QAAQ;AAE5B,QAAM,KAAK,OAAO,QAAQ,KAAK,MAAM;AACrC,SAAO,KAAK;;;;;;;CAQd,MAAM,WACJ,OACA,SAMC;EAED,MAAM,SAAS,OADL,MAAM,KAAK,aAAa,EACX,WAAW,OAAO,QAAQ;AACjD,SAAO;GACL,MAAM,OAAO;GACb,UAAU,OAAO;GACjB,UAAU,OAAO;GACjB,UAAU,OAAO;GAClB;;;;;CAMH,MAAM,aAAkF;AAGtF,UADe,OADL,MAAM,KAAK,aAAa,EACX,eAAe,EACxB,KAAK,OAAO;GACxB,IAAI,EAAE;GACN,MAAM,EAAE;GACR,cAAc,EAAE;GACjB,EAAE;;;AAIP,wBAAe;CAAE;CAAW;CAAkB;CAAW;CAAW"}
@@ -1,4 +1,4 @@
1
- import { c as GerbilConfig, o as GenerateOptions } from "../types-CiTc7ez3.mjs";
1
+ import { c as GerbilConfig, o as GenerateOptions } from "../types-evP8RShr.mjs";
2
2
 
3
3
  //#region src/integrations/llamaindex.d.ts
4
4
 
@@ -1,4 +1,5 @@
1
- import { t as Gerbil } from "../gerbil-DoDGHe6Z.mjs";
1
+ import { t as Gerbil } from "../gerbil-BZklpDhM.mjs";
2
+ import "../chrome-backend-CORwaIyC.mjs";
2
3
  import "../utils-CZBZ8dgR.mjs";
3
4
 
4
5
  //#region src/integrations/llamaindex.ts
@@ -1 +1 @@
1
- {"version":3,"file":"llamaindex.mjs","names":["instance: Gerbil | null","loadPromise: Promise<void> | null"],"sources":["../../src/integrations/llamaindex.ts"],"sourcesContent":["/**\n * Gerbil LlamaIndex Integration\n *\n * @example\n * ```ts\n * import { GerbilLLM, GerbilEmbedding } from \"gerbil/llamaindex\";\n *\n * const llm = new GerbilLLM({ model: \"qwen3-0.6b\" });\n * const embedModel = new GerbilEmbedding();\n *\n * const index = await VectorStoreIndex.fromDocuments(documents, {\n * llm,\n * embedModel,\n * });\n * ```\n */\n\nimport { Gerbil } from \"../core/gerbil.js\";\nimport type { GenerateOptions, GerbilConfig } from \"../core/types.js\";\n\n// Singleton Gerbil instance\nlet instance: Gerbil | null = null;\nlet loadPromise: Promise<void> | null = null;\n\nasync function getInstance(model: string): Promise<Gerbil> {\n if (!instance) {\n instance = new Gerbil();\n }\n if (!(instance.isLoaded() || loadPromise)) {\n loadPromise = instance.loadModel(model);\n }\n if (loadPromise) {\n await loadPromise;\n loadPromise = null;\n }\n return instance;\n}\n\n/**\n * Gerbil LLM for LlamaIndex\n */\nexport class GerbilLLM {\n private readonly model: string;\n private readonly options: GenerateOptions;\n\n constructor(config: GerbilConfig & GenerateOptions = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n this.options = config;\n }\n\n async complete(prompt: string, options?: GenerateOptions): Promise<{ text: string }> {\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, { ...this.options, ...options });\n return { text: result.text };\n }\n\n async chat(\n messages: { role: string; content: string }[],\n ): Promise<{ message: { content: string } }> {\n // Convert chat messages to a single prompt\n const prompt = messages.map((m) => `${m.role}: ${m.content}`).join(\"\\n\");\n\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, this.options);\n\n return {\n message: { content: result.text },\n };\n }\n\n async *stream(prompt: string, options?: GenerateOptions): AsyncGenerator<{ delta: string }> {\n const g = await getInstance(this.model);\n for await (const chunk of g.stream(prompt, { ...this.options, ...options })) {\n yield { delta: chunk };\n }\n }\n}\n\n/**\n * Gerbil Embedding Model for LlamaIndex\n */\nexport class GerbilEmbedding {\n private readonly model: string;\n\n constructor(config: { model?: string } = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n }\n\n async getTextEmbedding(text: string): Promise<number[]> {\n const g = await getInstance(this.model);\n const result = await g.embed(text);\n return result.vector;\n }\n\n async getTextEmbeddings(texts: string[]): Promise<number[][]> {\n const g = await getInstance(this.model);\n const results = await g.embedBatch(texts);\n return results.map((r) => r.vector);\n }\n\n async getQueryEmbedding(query: string): Promise<number[]> {\n return this.getTextEmbedding(query);\n }\n}\n\nexport default { GerbilLLM, GerbilEmbedding };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAqBA,IAAIA,WAA0B;AAC9B,IAAIC,cAAoC;AAExC,eAAe,YAAY,OAAgC;AACzD,KAAI,CAAC,SACH,YAAW,IAAI,QAAQ;AAEzB,KAAI,EAAE,SAAS,UAAU,IAAI,aAC3B,eAAc,SAAS,UAAU,MAAM;AAEzC,KAAI,aAAa;AACf,QAAM;AACN,gBAAc;;AAEhB,QAAO;;;;;AAMT,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAiB;CAEjB,YAAY,SAAyC,EAAE,EAAE;AACvD,OAAK,QAAQ,OAAO,SAAS;AAC7B,OAAK,UAAU;;CAGjB,MAAM,SAAS,QAAgB,SAAsD;AAGnF,SAAO,EAAE,OADM,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC,EAClD,MAAM;;CAG9B,MAAM,KACJ,UAC2C;EAE3C,MAAM,SAAS,SAAS,KAAK,MAAM,GAAG,EAAE,KAAK,IAAI,EAAE,UAAU,CAAC,KAAK,KAAK;AAKxE,SAAO,EACL,SAAS,EAAE,UAHE,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ,KAAK,QAAQ,EAGxB,MAAM,EAClC;;CAGH,OAAO,OAAO,QAAgB,SAA8D;EAC1F,MAAM,IAAI,MAAM,YAAY,KAAK,MAAM;AACvC,aAAW,MAAM,SAAS,EAAE,OAAO,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC,CACzE,OAAM,EAAE,OAAO,OAAO;;;;;;AAQ5B,IAAa,kBAAb,MAA6B;CAC3B,AAAiB;CAEjB,YAAY,SAA6B,EAAE,EAAE;AAC3C,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAM,iBAAiB,MAAiC;AAGtD,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,MAAM,KAAK,EACpB;;CAGhB,MAAM,kBAAkB,OAAsC;AAG5D,UADgB,OADN,MAAM,YAAY,KAAK,MAAM,EACf,WAAW,MAAM,EAC1B,KAAK,MAAM,EAAE,OAAO;;CAGrC,MAAM,kBAAkB,OAAkC;AACxD,SAAO,KAAK,iBAAiB,MAAM;;;AAIvC,yBAAe;CAAE;CAAW;CAAiB"}
1
+ {"version":3,"file":"llamaindex.mjs","names":["instance: Gerbil | null","loadPromise: Promise<void> | null"],"sources":["../../src/integrations/llamaindex.ts"],"sourcesContent":["/**\n * Gerbil LlamaIndex Integration\n *\n * @example\n * ```ts\n * import { GerbilLLM, GerbilEmbedding } from \"gerbil/llamaindex\";\n *\n * const llm = new GerbilLLM({ model: \"qwen3-0.6b\" });\n * const embedModel = new GerbilEmbedding();\n *\n * const index = await VectorStoreIndex.fromDocuments(documents, {\n * llm,\n * embedModel,\n * });\n * ```\n */\n\nimport { Gerbil } from \"../core/gerbil.js\";\nimport type { GenerateOptions, GerbilConfig } from \"../core/types.js\";\n\n// Singleton Gerbil instance\nlet instance: Gerbil | null = null;\nlet loadPromise: Promise<void> | null = null;\n\nasync function getInstance(model: string): Promise<Gerbil> {\n if (!instance) {\n instance = new Gerbil();\n }\n if (!(instance.isLoaded() || loadPromise)) {\n loadPromise = instance.loadModel(model);\n }\n if (loadPromise) {\n await loadPromise;\n loadPromise = null;\n }\n return instance;\n}\n\n/**\n * Gerbil LLM for LlamaIndex\n */\nexport class GerbilLLM {\n private readonly model: string;\n private readonly options: GenerateOptions;\n\n constructor(config: GerbilConfig & GenerateOptions = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n this.options = config;\n }\n\n async complete(prompt: string, options?: GenerateOptions): Promise<{ text: string }> {\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, { ...this.options, ...options });\n return { text: result.text };\n }\n\n async chat(\n messages: { role: string; content: string }[],\n ): Promise<{ message: { content: string } }> {\n // Convert chat messages to a single prompt\n const prompt = messages.map((m) => `${m.role}: ${m.content}`).join(\"\\n\");\n\n const g = await getInstance(this.model);\n const result = await g.generate(prompt, this.options);\n\n return {\n message: { content: result.text },\n };\n }\n\n async *stream(prompt: string, options?: GenerateOptions): AsyncGenerator<{ delta: string }> {\n const g = await getInstance(this.model);\n for await (const chunk of g.stream(prompt, { ...this.options, ...options })) {\n yield { delta: chunk };\n }\n }\n}\n\n/**\n * Gerbil Embedding Model for LlamaIndex\n */\nexport class GerbilEmbedding {\n private readonly model: string;\n\n constructor(config: { model?: string } = {}) {\n this.model = config.model || \"qwen3-0.6b\";\n }\n\n async getTextEmbedding(text: string): Promise<number[]> {\n const g = await getInstance(this.model);\n const result = await g.embed(text);\n return result.vector;\n }\n\n async getTextEmbeddings(texts: string[]): Promise<number[][]> {\n const g = await getInstance(this.model);\n const results = await g.embedBatch(texts);\n return results.map((r) => r.vector);\n }\n\n async getQueryEmbedding(query: string): Promise<number[]> {\n return this.getTextEmbedding(query);\n }\n}\n\nexport default { GerbilLLM, GerbilEmbedding };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAqBA,IAAIA,WAA0B;AAC9B,IAAIC,cAAoC;AAExC,eAAe,YAAY,OAAgC;AACzD,KAAI,CAAC,SACH,YAAW,IAAI,QAAQ;AAEzB,KAAI,EAAE,SAAS,UAAU,IAAI,aAC3B,eAAc,SAAS,UAAU,MAAM;AAEzC,KAAI,aAAa;AACf,QAAM;AACN,gBAAc;;AAEhB,QAAO;;;;;AAMT,IAAa,YAAb,MAAuB;CACrB,AAAiB;CACjB,AAAiB;CAEjB,YAAY,SAAyC,EAAE,EAAE;AACvD,OAAK,QAAQ,OAAO,SAAS;AAC7B,OAAK,UAAU;;CAGjB,MAAM,SAAS,QAAgB,SAAsD;AAGnF,SAAO,EAAE,OADM,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC,EAClD,MAAM;;CAG9B,MAAM,KACJ,UAC2C;EAE3C,MAAM,SAAS,SAAS,KAAK,MAAM,GAAG,EAAE,KAAK,IAAI,EAAE,UAAU,CAAC,KAAK,KAAK;AAKxE,SAAO,EACL,SAAS,EAAE,UAHE,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,SAAS,QAAQ,KAAK,QAAQ,EAGxB,MAAM,EAClC;;CAGH,OAAO,OAAO,QAAgB,SAA8D;EAC1F,MAAM,IAAI,MAAM,YAAY,KAAK,MAAM;AACvC,aAAW,MAAM,SAAS,EAAE,OAAO,QAAQ;GAAE,GAAG,KAAK;GAAS,GAAG;GAAS,CAAC,CACzE,OAAM,EAAE,OAAO,OAAO;;;;;;AAQ5B,IAAa,kBAAb,MAA6B;CAC3B,AAAiB;CAEjB,YAAY,SAA6B,EAAE,EAAE;AAC3C,OAAK,QAAQ,OAAO,SAAS;;CAG/B,MAAM,iBAAiB,MAAiC;AAGtD,UADe,OADL,MAAM,YAAY,KAAK,MAAM,EAChB,MAAM,KAAK,EACpB;;CAGhB,MAAM,kBAAkB,OAAsC;AAG5D,UADgB,OADN,MAAM,YAAY,KAAK,MAAM,EACf,WAAW,MAAM,EAC1B,KAAK,MAAM,EAAE,OAAO;;CAGrC,MAAM,kBAAkB,OAAkC;AACxD,SAAO,KAAK,iBAAiB,MAAM;;;AAIvC,yBAAe;CAAE;CAAW;CAAiB"}
@@ -1,5 +1,5 @@
1
- import { c as GerbilConfig } from "../types-CiTc7ez3.mjs";
2
- import { t as Gerbil } from "../gerbil-qOTe1nl2.mjs";
1
+ import { c as GerbilConfig } from "../types-evP8RShr.mjs";
2
+ import { t as Gerbil } from "../gerbil-DJygY0sJ.mjs";
3
3
 
4
4
  //#region src/integrations/mcp.d.ts
5
5
 
@@ -1,7 +1,8 @@
1
- import "../gerbil-DoDGHe6Z.mjs";
1
+ import "../gerbil-BZklpDhM.mjs";
2
+ import "../chrome-backend-CORwaIyC.mjs";
2
3
  import "../utils-CZBZ8dgR.mjs";
3
- import "../one-liner-DxnNs_JK.mjs";
4
- import "../skills-DulrOPeP.mjs";
5
- import { n as mcp_default, r as startMCPServer, t as createMCPServer } from "../mcp-kzDDWIoS.mjs";
4
+ import "../one-liner-mH5SKPvT.mjs";
5
+ import "../skills-CPB_9YfF.mjs";
6
+ import { n as mcp_default, r as startMCPServer, t as createMCPServer } from "../mcp-ZCC5OR7B.mjs";
6
7
 
7
8
  export { createMCPServer, mcp_default as default, startMCPServer };
@@ -1,5 +1,5 @@
1
- import { t as Gerbil } from "./gerbil-DoDGHe6Z.mjs";
2
- import { a as summarize, d as explain, m as commit, s as review, t as translate } from "./skills-DulrOPeP.mjs";
1
+ import { t as Gerbil } from "./gerbil-BZklpDhM.mjs";
2
+ import { a as summarize, d as explain, m as commit, s as review, t as translate } from "./skills-CPB_9YfF.mjs";
3
3
 
4
4
  //#region src/integrations/mcp.ts
5
5
  /**
@@ -318,4 +318,4 @@ var mcp_default = {
318
318
 
319
319
  //#endregion
320
320
  export { mcp_default as n, startMCPServer as r, createMCPServer as t };
321
- //# sourceMappingURL=mcp-kzDDWIoS.mjs.map
321
+ //# sourceMappingURL=mcp-ZCC5OR7B.mjs.map