@ai-sdk/groq 2.0.0-canary.8 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -20,7 +20,7 @@ import {
20
20
  parseProviderOptions,
21
21
  postJsonToApi
22
22
  } from "@ai-sdk/provider-utils";
23
- import { z as z3 } from "zod";
23
+ import { z as z3 } from "zod/v4";
24
24
 
25
25
  // src/convert-to-groq-chat-messages.ts
26
26
  import {
@@ -80,7 +80,7 @@ function convertToGroqChatMessages(prompt) {
80
80
  type: "function",
81
81
  function: {
82
82
  name: part.toolName,
83
- arguments: JSON.stringify(part.args)
83
+ arguments: JSON.stringify(part.input)
84
84
  }
85
85
  });
86
86
  break;
@@ -96,10 +96,23 @@ function convertToGroqChatMessages(prompt) {
96
96
  }
97
97
  case "tool": {
98
98
  for (const toolResponse of content) {
99
+ const output = toolResponse.output;
100
+ let contentValue;
101
+ switch (output.type) {
102
+ case "text":
103
+ case "error-text":
104
+ contentValue = output.value;
105
+ break;
106
+ case "content":
107
+ case "json":
108
+ case "error-json":
109
+ contentValue = JSON.stringify(output.value);
110
+ break;
111
+ }
99
112
  messages.push({
100
113
  role: "tool",
101
114
  tool_call_id: toolResponse.toolCallId,
102
- content: JSON.stringify(toolResponse.result)
115
+ content: contentValue
103
116
  });
104
117
  }
105
118
  break;
@@ -127,22 +140,28 @@ function getResponseMetadata({
127
140
  }
128
141
 
129
142
  // src/groq-chat-options.ts
130
- import { z } from "zod";
143
+ import { z } from "zod/v4";
131
144
  var groqProviderOptions = z.object({
132
- reasoningFormat: z.enum(["parsed", "raw", "hidden"]).nullish(),
145
+ reasoningFormat: z.enum(["parsed", "raw", "hidden"]).optional(),
133
146
  /**
134
147
  * Whether to enable parallel function calling during tool use. Default to true.
135
148
  */
136
- parallelToolCalls: z.boolean().nullish(),
149
+ parallelToolCalls: z.boolean().optional(),
137
150
  /**
138
151
  * A unique identifier representing your end-user, which can help OpenAI to
139
152
  * monitor and detect abuse. Learn more.
140
153
  */
141
- user: z.string().nullish()
154
+ user: z.string().optional(),
155
+ /**
156
+ * Whether to use structured outputs.
157
+ *
158
+ * @default true
159
+ */
160
+ structuredOutputs: z.boolean().optional()
142
161
  });
143
162
 
144
163
  // src/groq-error.ts
145
- import { z as z2 } from "zod";
164
+ import { z as z2 } from "zod/v4";
146
165
  import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
147
166
  var groqErrorDataSchema = z2.object({
148
167
  error: z2.object({
@@ -178,7 +197,7 @@ function prepareTools({
178
197
  function: {
179
198
  name: tool.name,
180
199
  description: tool.description,
181
- parameters: tool.parameters
200
+ parameters: tool.inputSchema
182
201
  }
183
202
  });
184
203
  }
@@ -231,21 +250,18 @@ function mapGroqFinishReason(finishReason) {
231
250
 
232
251
  // src/groq-chat-language-model.ts
233
252
  var GroqChatLanguageModel = class {
234
- constructor(modelId, settings, config) {
253
+ constructor(modelId, config) {
235
254
  this.specificationVersion = "v2";
236
- this.supportsStructuredOutputs = false;
237
- this.defaultObjectGenerationMode = "json";
255
+ this.supportedUrls = {
256
+ "image/*": [/^https?:\/\/.*$/]
257
+ };
238
258
  this.modelId = modelId;
239
- this.settings = settings;
240
259
  this.config = config;
241
260
  }
242
261
  get provider() {
243
262
  return this.config.provider;
244
263
  }
245
- get supportsImageUrls() {
246
- return !this.settings.downloadImages;
247
- }
248
- getArgs({
264
+ async getArgs({
249
265
  prompt,
250
266
  maxOutputTokens,
251
267
  temperature,
@@ -261,25 +277,27 @@ var GroqChatLanguageModel = class {
261
277
  toolChoice,
262
278
  providerOptions
263
279
  }) {
280
+ var _a, _b;
264
281
  const warnings = [];
282
+ const groqOptions = await parseProviderOptions({
283
+ provider: "groq",
284
+ providerOptions,
285
+ schema: groqProviderOptions
286
+ });
287
+ const structuredOutputs = (_a = groqOptions == null ? void 0 : groqOptions.structuredOutputs) != null ? _a : true;
265
288
  if (topK != null) {
266
289
  warnings.push({
267
290
  type: "unsupported-setting",
268
291
  setting: "topK"
269
292
  });
270
293
  }
271
- if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
294
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !structuredOutputs) {
272
295
  warnings.push({
273
296
  type: "unsupported-setting",
274
297
  setting: "responseFormat",
275
- details: "JSON response format schema is not supported"
298
+ details: "JSON response format schema is only supported with structuredOutputs"
276
299
  });
277
300
  }
278
- const groqOptions = parseProviderOptions({
279
- provider: "groq",
280
- providerOptions,
281
- schema: groqProviderOptions
282
- });
283
301
  const {
284
302
  tools: groqTools,
285
303
  toolChoice: groqToolChoice,
@@ -301,10 +319,14 @@ var GroqChatLanguageModel = class {
301
319
  stop: stopSequences,
302
320
  seed,
303
321
  // response format:
304
- response_format: (
305
- // json object response format is not supported for streaming:
306
- stream === false && (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0
307
- ),
322
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && responseFormat.schema != null ? {
323
+ type: "json_schema",
324
+ json_schema: {
325
+ schema: responseFormat.schema,
326
+ name: (_b = responseFormat.name) != null ? _b : "response",
327
+ description: responseFormat.description
328
+ }
329
+ } : { type: "json_object" } : void 0,
308
330
  // provider options:
309
331
  reasoning_format: groqOptions == null ? void 0 : groqOptions.reasoningFormat,
310
332
  // messages:
@@ -317,8 +339,11 @@ var GroqChatLanguageModel = class {
317
339
  };
318
340
  }
319
341
  async doGenerate(options) {
320
- var _a, _b, _c, _d, _e;
321
- const { args, warnings } = this.getArgs({ ...options, stream: false });
342
+ var _a, _b, _c, _d, _e, _f, _g;
343
+ const { args, warnings } = await this.getArgs({
344
+ ...options,
345
+ stream: false
346
+ });
322
347
  const body = JSON.stringify(args);
323
348
  const {
324
349
  responseHeaders,
@@ -339,29 +364,35 @@ var GroqChatLanguageModel = class {
339
364
  fetch: this.config.fetch
340
365
  });
341
366
  const choice = response.choices[0];
342
- return {
343
- text: choice.message.content != null ? { type: "text", text: choice.message.content } : void 0,
344
- reasoning: choice.message.reasoning ? [
345
- {
346
- type: "reasoning",
347
- reasoningType: "text",
348
- text: choice.message.reasoning
349
- }
350
- ] : void 0,
351
- toolCalls: (_a = choice.message.tool_calls) == null ? void 0 : _a.map((toolCall) => {
352
- var _a2;
353
- return {
367
+ const content = [];
368
+ const text = choice.message.content;
369
+ if (text != null && text.length > 0) {
370
+ content.push({ type: "text", text });
371
+ }
372
+ const reasoning = choice.message.reasoning;
373
+ if (reasoning != null && reasoning.length > 0) {
374
+ content.push({
375
+ type: "reasoning",
376
+ text: reasoning
377
+ });
378
+ }
379
+ if (choice.message.tool_calls != null) {
380
+ for (const toolCall of choice.message.tool_calls) {
381
+ content.push({
354
382
  type: "tool-call",
355
- toolCallType: "function",
356
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
383
+ toolCallId: (_a = toolCall.id) != null ? _a : generateId(),
357
384
  toolName: toolCall.function.name,
358
- args: toolCall.function.arguments
359
- };
360
- }),
385
+ input: toolCall.function.arguments
386
+ });
387
+ }
388
+ }
389
+ return {
390
+ content,
361
391
  finishReason: mapGroqFinishReason(choice.finish_reason),
362
392
  usage: {
363
393
  inputTokens: (_c = (_b = response.usage) == null ? void 0 : _b.prompt_tokens) != null ? _c : void 0,
364
- outputTokens: (_e = (_d = response.usage) == null ? void 0 : _d.completion_tokens) != null ? _e : void 0
394
+ outputTokens: (_e = (_d = response.usage) == null ? void 0 : _d.completion_tokens) != null ? _e : void 0,
395
+ totalTokens: (_g = (_f = response.usage) == null ? void 0 : _f.total_tokens) != null ? _g : void 0
365
396
  },
366
397
  response: {
367
398
  ...getResponseMetadata(response),
@@ -373,7 +404,7 @@ var GroqChatLanguageModel = class {
373
404
  };
374
405
  }
375
406
  async doStream(options) {
376
- const { args, warnings } = this.getArgs({ ...options, stream: true });
407
+ const { args, warnings } = await this.getArgs({ ...options, stream: true });
377
408
  const body = JSON.stringify({ ...args, stream: true });
378
409
  const { responseHeaders, value: response } = await postJsonToApi({
379
410
  url: this.config.url({
@@ -394,15 +425,24 @@ var GroqChatLanguageModel = class {
394
425
  let finishReason = "unknown";
395
426
  const usage = {
396
427
  inputTokens: void 0,
397
- outputTokens: void 0
428
+ outputTokens: void 0,
429
+ totalTokens: void 0
398
430
  };
399
431
  let isFirstChunk = true;
432
+ let isActiveText = false;
433
+ let isActiveReasoning = false;
400
434
  let providerMetadata;
401
435
  return {
402
436
  stream: response.pipeThrough(
403
437
  new TransformStream({
438
+ start(controller) {
439
+ controller.enqueue({ type: "stream-start", warnings });
440
+ },
404
441
  transform(chunk, controller) {
405
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o;
442
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
443
+ if (options.includeRawChunks) {
444
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
445
+ }
406
446
  if (!chunk.success) {
407
447
  finishReason = "error";
408
448
  controller.enqueue({ type: "error", error: chunk.error });
@@ -424,6 +464,7 @@ var GroqChatLanguageModel = class {
424
464
  if (((_a = value.x_groq) == null ? void 0 : _a.usage) != null) {
425
465
  usage.inputTokens = (_b = value.x_groq.usage.prompt_tokens) != null ? _b : void 0;
426
466
  usage.outputTokens = (_c = value.x_groq.usage.completion_tokens) != null ? _c : void 0;
467
+ usage.totalTokens = (_d = value.x_groq.usage.total_tokens) != null ? _d : void 0;
427
468
  }
428
469
  const choice = value.choices[0];
429
470
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
@@ -434,16 +475,28 @@ var GroqChatLanguageModel = class {
434
475
  }
435
476
  const delta = choice.delta;
436
477
  if (delta.reasoning != null && delta.reasoning.length > 0) {
478
+ if (!isActiveReasoning) {
479
+ controller.enqueue({
480
+ type: "reasoning-start",
481
+ id: "reasoning-0"
482
+ });
483
+ isActiveReasoning = true;
484
+ }
437
485
  controller.enqueue({
438
- type: "reasoning",
439
- reasoningType: "text",
440
- text: delta.reasoning
486
+ type: "reasoning-delta",
487
+ id: "reasoning-0",
488
+ delta: delta.reasoning
441
489
  });
442
490
  }
443
491
  if (delta.content != null && delta.content.length > 0) {
492
+ if (!isActiveText) {
493
+ controller.enqueue({ type: "text-start", id: "txt-0" });
494
+ isActiveText = true;
495
+ }
444
496
  controller.enqueue({
445
- type: "text",
446
- text: delta.content
497
+ type: "text-delta",
498
+ id: "txt-0",
499
+ delta: delta.content
447
500
  });
448
501
  }
449
502
  if (delta.tool_calls != null) {
@@ -462,39 +515,45 @@ var GroqChatLanguageModel = class {
462
515
  message: `Expected 'id' to be a string.`
463
516
  });
464
517
  }
465
- if (((_d = toolCallDelta.function) == null ? void 0 : _d.name) == null) {
518
+ if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
466
519
  throw new InvalidResponseDataError({
467
520
  data: toolCallDelta,
468
521
  message: `Expected 'function.name' to be a string.`
469
522
  });
470
523
  }
524
+ controller.enqueue({
525
+ type: "tool-input-start",
526
+ id: toolCallDelta.id,
527
+ toolName: toolCallDelta.function.name
528
+ });
471
529
  toolCalls[index] = {
472
530
  id: toolCallDelta.id,
473
531
  type: "function",
474
532
  function: {
475
533
  name: toolCallDelta.function.name,
476
- arguments: (_e = toolCallDelta.function.arguments) != null ? _e : ""
534
+ arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
477
535
  },
478
536
  hasFinished: false
479
537
  };
480
538
  const toolCall2 = toolCalls[index];
481
- if (((_f = toolCall2.function) == null ? void 0 : _f.name) != null && ((_g = toolCall2.function) == null ? void 0 : _g.arguments) != null) {
539
+ if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
482
540
  if (toolCall2.function.arguments.length > 0) {
483
541
  controller.enqueue({
484
- type: "tool-call-delta",
485
- toolCallType: "function",
486
- toolCallId: toolCall2.id,
487
- toolName: toolCall2.function.name,
488
- argsTextDelta: toolCall2.function.arguments
542
+ type: "tool-input-delta",
543
+ id: toolCall2.id,
544
+ delta: toolCall2.function.arguments
489
545
  });
490
546
  }
491
547
  if (isParsableJson(toolCall2.function.arguments)) {
548
+ controller.enqueue({
549
+ type: "tool-input-end",
550
+ id: toolCall2.id
551
+ });
492
552
  controller.enqueue({
493
553
  type: "tool-call",
494
- toolCallType: "function",
495
- toolCallId: (_h = toolCall2.id) != null ? _h : generateId(),
554
+ toolCallId: (_i = toolCall2.id) != null ? _i : generateId(),
496
555
  toolName: toolCall2.function.name,
497
- args: toolCall2.function.arguments
556
+ input: toolCall2.function.arguments
498
557
  });
499
558
  toolCall2.hasFinished = true;
500
559
  }
@@ -505,23 +564,24 @@ var GroqChatLanguageModel = class {
505
564
  if (toolCall.hasFinished) {
506
565
  continue;
507
566
  }
508
- if (((_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null) {
509
- toolCall.function.arguments += (_k = (_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null ? _k : "";
567
+ if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
568
+ toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
510
569
  }
511
570
  controller.enqueue({
512
- type: "tool-call-delta",
513
- toolCallType: "function",
514
- toolCallId: toolCall.id,
515
- toolName: toolCall.function.name,
516
- argsTextDelta: (_l = toolCallDelta.function.arguments) != null ? _l : ""
571
+ type: "tool-input-delta",
572
+ id: toolCall.id,
573
+ delta: (_m = toolCallDelta.function.arguments) != null ? _m : ""
517
574
  });
518
- if (((_m = toolCall.function) == null ? void 0 : _m.name) != null && ((_n = toolCall.function) == null ? void 0 : _n.arguments) != null && isParsableJson(toolCall.function.arguments)) {
575
+ if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && isParsableJson(toolCall.function.arguments)) {
576
+ controller.enqueue({
577
+ type: "tool-input-end",
578
+ id: toolCall.id
579
+ });
519
580
  controller.enqueue({
520
581
  type: "tool-call",
521
- toolCallType: "function",
522
- toolCallId: (_o = toolCall.id) != null ? _o : generateId(),
582
+ toolCallId: (_p = toolCall.id) != null ? _p : generateId(),
523
583
  toolName: toolCall.function.name,
524
- args: toolCall.function.arguments
584
+ input: toolCall.function.arguments
525
585
  });
526
586
  toolCall.hasFinished = true;
527
587
  }
@@ -529,6 +589,12 @@ var GroqChatLanguageModel = class {
529
589
  }
530
590
  },
531
591
  flush(controller) {
592
+ if (isActiveReasoning) {
593
+ controller.enqueue({ type: "reasoning-end", id: "reasoning-0" });
594
+ }
595
+ if (isActiveText) {
596
+ controller.enqueue({ type: "text-end", id: "txt-0" });
597
+ }
532
598
  controller.enqueue({
533
599
  type: "finish",
534
600
  finishReason,
@@ -538,9 +604,8 @@ var GroqChatLanguageModel = class {
538
604
  }
539
605
  })
540
606
  ),
541
- response: { headers: responseHeaders },
542
- warnings,
543
- request: { body }
607
+ request: { body },
608
+ response: { headers: responseHeaders }
544
609
  };
545
610
  }
546
611
  };
@@ -570,7 +635,8 @@ var groqChatResponseSchema = z3.object({
570
635
  ),
571
636
  usage: z3.object({
572
637
  prompt_tokens: z3.number().nullish(),
573
- completion_tokens: z3.number().nullish()
638
+ completion_tokens: z3.number().nullish(),
639
+ total_tokens: z3.number().nullish()
574
640
  }).nullish()
575
641
  });
576
642
  var groqChatChunkSchema = z3.union([
@@ -602,13 +668,140 @@ var groqChatChunkSchema = z3.union([
602
668
  x_groq: z3.object({
603
669
  usage: z3.object({
604
670
  prompt_tokens: z3.number().nullish(),
605
- completion_tokens: z3.number().nullish()
671
+ completion_tokens: z3.number().nullish(),
672
+ total_tokens: z3.number().nullish()
606
673
  }).nullish()
607
674
  }).nullish()
608
675
  }),
609
676
  groqErrorDataSchema
610
677
  ]);
611
678
 
679
+ // src/groq-transcription-model.ts
680
+ import {
681
+ combineHeaders as combineHeaders2,
682
+ convertBase64ToUint8Array,
683
+ createJsonResponseHandler as createJsonResponseHandler2,
684
+ parseProviderOptions as parseProviderOptions2,
685
+ postFormDataToApi
686
+ } from "@ai-sdk/provider-utils";
687
+ import { z as z4 } from "zod/v4";
688
+ var groqProviderOptionsSchema = z4.object({
689
+ language: z4.string().nullish(),
690
+ prompt: z4.string().nullish(),
691
+ responseFormat: z4.string().nullish(),
692
+ temperature: z4.number().min(0).max(1).nullish(),
693
+ timestampGranularities: z4.array(z4.string()).nullish()
694
+ });
695
+ var GroqTranscriptionModel = class {
696
+ constructor(modelId, config) {
697
+ this.modelId = modelId;
698
+ this.config = config;
699
+ this.specificationVersion = "v2";
700
+ }
701
+ get provider() {
702
+ return this.config.provider;
703
+ }
704
+ async getArgs({
705
+ audio,
706
+ mediaType,
707
+ providerOptions
708
+ }) {
709
+ var _a, _b, _c, _d, _e;
710
+ const warnings = [];
711
+ const groqOptions = await parseProviderOptions2({
712
+ provider: "groq",
713
+ providerOptions,
714
+ schema: groqProviderOptionsSchema
715
+ });
716
+ const formData = new FormData();
717
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
718
+ formData.append("model", this.modelId);
719
+ formData.append("file", new File([blob], "audio", { type: mediaType }));
720
+ if (groqOptions) {
721
+ const transcriptionModelOptions = {
722
+ language: (_a = groqOptions.language) != null ? _a : void 0,
723
+ prompt: (_b = groqOptions.prompt) != null ? _b : void 0,
724
+ response_format: (_c = groqOptions.responseFormat) != null ? _c : void 0,
725
+ temperature: (_d = groqOptions.temperature) != null ? _d : void 0,
726
+ timestamp_granularities: (_e = groqOptions.timestampGranularities) != null ? _e : void 0
727
+ };
728
+ for (const key in transcriptionModelOptions) {
729
+ const value = transcriptionModelOptions[key];
730
+ if (value !== void 0) {
731
+ formData.append(key, String(value));
732
+ }
733
+ }
734
+ }
735
+ return {
736
+ formData,
737
+ warnings
738
+ };
739
+ }
740
+ async doGenerate(options) {
741
+ var _a, _b, _c, _d, _e;
742
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
743
+ const { formData, warnings } = await this.getArgs(options);
744
+ const {
745
+ value: response,
746
+ responseHeaders,
747
+ rawValue: rawResponse
748
+ } = await postFormDataToApi({
749
+ url: this.config.url({
750
+ path: "/audio/transcriptions",
751
+ modelId: this.modelId
752
+ }),
753
+ headers: combineHeaders2(this.config.headers(), options.headers),
754
+ formData,
755
+ failedResponseHandler: groqFailedResponseHandler,
756
+ successfulResponseHandler: createJsonResponseHandler2(
757
+ groqTranscriptionResponseSchema
758
+ ),
759
+ abortSignal: options.abortSignal,
760
+ fetch: this.config.fetch
761
+ });
762
+ return {
763
+ text: response.text,
764
+ segments: (_e = (_d = response.segments) == null ? void 0 : _d.map((segment) => ({
765
+ text: segment.text,
766
+ startSecond: segment.start,
767
+ endSecond: segment.end
768
+ }))) != null ? _e : [],
769
+ language: response.language,
770
+ durationInSeconds: response.duration,
771
+ warnings,
772
+ response: {
773
+ timestamp: currentDate,
774
+ modelId: this.modelId,
775
+ headers: responseHeaders,
776
+ body: rawResponse
777
+ }
778
+ };
779
+ }
780
+ };
781
+ var groqTranscriptionResponseSchema = z4.object({
782
+ task: z4.string(),
783
+ language: z4.string(),
784
+ duration: z4.number(),
785
+ text: z4.string(),
786
+ segments: z4.array(
787
+ z4.object({
788
+ id: z4.number(),
789
+ seek: z4.number(),
790
+ start: z4.number(),
791
+ end: z4.number(),
792
+ text: z4.string(),
793
+ tokens: z4.array(z4.number()),
794
+ temperature: z4.number(),
795
+ avg_logprob: z4.number(),
796
+ compression_ratio: z4.number(),
797
+ no_speech_prob: z4.number()
798
+ })
799
+ ),
800
+ x_groq: z4.object({
801
+ id: z4.string()
802
+ })
803
+ });
804
+
612
805
  // src/groq-provider.ts
613
806
  function createGroq(options = {}) {
614
807
  var _a;
@@ -621,22 +814,30 @@ function createGroq(options = {}) {
621
814
  })}`,
622
815
  ...options.headers
623
816
  });
624
- const createChatModel = (modelId, settings = {}) => new GroqChatLanguageModel(modelId, settings, {
817
+ const createChatModel = (modelId) => new GroqChatLanguageModel(modelId, {
625
818
  provider: "groq.chat",
626
819
  url: ({ path }) => `${baseURL}${path}`,
627
820
  headers: getHeaders,
628
821
  fetch: options.fetch
629
822
  });
630
- const createLanguageModel = (modelId, settings) => {
823
+ const createLanguageModel = (modelId) => {
631
824
  if (new.target) {
632
825
  throw new Error(
633
826
  "The Groq model function cannot be called with the new keyword."
634
827
  );
635
828
  }
636
- return createChatModel(modelId, settings);
829
+ return createChatModel(modelId);
830
+ };
831
+ const createTranscriptionModel = (modelId) => {
832
+ return new GroqTranscriptionModel(modelId, {
833
+ provider: "groq.transcription",
834
+ url: ({ path }) => `${baseURL}${path}`,
835
+ headers: getHeaders,
836
+ fetch: options.fetch
837
+ });
637
838
  };
638
- const provider = function(modelId, settings) {
639
- return createLanguageModel(modelId, settings);
839
+ const provider = function(modelId) {
840
+ return createLanguageModel(modelId);
640
841
  };
641
842
  provider.languageModel = createLanguageModel;
642
843
  provider.chat = createChatModel;
@@ -646,6 +847,7 @@ function createGroq(options = {}) {
646
847
  provider.imageModel = (modelId) => {
647
848
  throw new NoSuchModelError({ modelId, modelType: "imageModel" });
648
849
  };
850
+ provider.transcription = createTranscriptionModel;
649
851
  return provider;
650
852
  }
651
853
  var groq = createGroq();