@llmgateway/ai-sdk-provider 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,6 @@
1
- "use strict";
2
1
  var __defProp = Object.defineProperty;
3
2
  var __defProps = Object.defineProperties;
4
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
3
  var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
6
- var __getOwnPropNames = Object.getOwnPropertyNames;
7
4
  var __getOwnPropSymbols = Object.getOwnPropertySymbols;
8
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
9
6
  var __propIsEnum = Object.prototype.propertyIsEnumerable;
@@ -32,65 +29,53 @@ var __objRest = (source, exclude) => {
32
29
  }
33
30
  return target;
34
31
  };
35
- var __export = (target, all) => {
36
- for (var name in all)
37
- __defProp(target, name, { get: all[name], enumerable: true });
38
- };
39
- var __copyProps = (to, from, except, desc) => {
40
- if (from && typeof from === "object" || typeof from === "function") {
41
- for (let key of __getOwnPropNames(from))
42
- if (!__hasOwnProp.call(to, key) && key !== except)
43
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
44
- }
45
- return to;
46
- };
47
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
48
-
49
- // src/index.ts
50
- var index_exports = {};
51
- __export(index_exports, {
52
- LLMGateway: () => LLMGateway,
53
- createLLMGateway: () => createLLMGateway,
54
- llmgateway: () => llmgateway
55
- });
56
- module.exports = __toCommonJS(index_exports);
57
32
 
58
33
  // src/llmgateway-facade.ts
59
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
34
+ import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
60
35
 
61
36
  // src/schemas/reasoning-details.ts
62
- var import_zod = require("zod");
63
- var ReasoningDetailSummarySchema = import_zod.z.object({
64
- type: import_zod.z.literal("reasoning.summary" /* Summary */),
65
- summary: import_zod.z.string()
37
+ import { z } from "zod";
38
+ var ReasoningDetailSummarySchema = z.object({
39
+ type: z.literal("reasoning.summary" /* Summary */),
40
+ summary: z.string()
66
41
  });
67
- var ReasoningDetailEncryptedSchema = import_zod.z.object({
68
- type: import_zod.z.literal("reasoning.encrypted" /* Encrypted */),
69
- data: import_zod.z.string()
42
+ var ReasoningDetailEncryptedSchema = z.object({
43
+ type: z.literal("reasoning.encrypted" /* Encrypted */),
44
+ data: z.string()
70
45
  });
71
- var ReasoningDetailTextSchema = import_zod.z.object({
72
- type: import_zod.z.literal("reasoning.text" /* Text */),
73
- text: import_zod.z.string().nullish(),
74
- signature: import_zod.z.string().nullish()
46
+ var ReasoningDetailTextSchema = z.object({
47
+ type: z.literal("reasoning.text" /* Text */),
48
+ text: z.string().nullish(),
49
+ signature: z.string().nullish()
75
50
  });
76
- var ReasoningDetailUnionSchema = import_zod.z.union([
51
+ var ReasoningDetailUnionSchema = z.union([
77
52
  ReasoningDetailSummarySchema,
78
53
  ReasoningDetailEncryptedSchema,
79
54
  ReasoningDetailTextSchema
80
55
  ]);
81
- var ReasoningDetailsWithUnknownSchema = import_zod.z.union([
56
+ var ReasoningDetailsWithUnknownSchema = z.union([
82
57
  ReasoningDetailUnionSchema,
83
- import_zod.z.unknown().transform(() => null)
58
+ z.unknown().transform(() => null)
84
59
  ]);
85
- var ReasoningDetailArraySchema = import_zod.z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
60
+ var ReasoningDetailArraySchema = z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
86
61
 
87
62
  // src/llmgateway-chat-language-model.ts
88
- var import_provider = require("@ai-sdk/provider");
89
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
90
- var import_zod3 = require("zod");
63
+ import {
64
+ InvalidResponseDataError,
65
+ UnsupportedFunctionalityError
66
+ } from "@ai-sdk/provider";
67
+ import {
68
+ combineHeaders,
69
+ createEventSourceResponseHandler,
70
+ createJsonResponseHandler,
71
+ generateId,
72
+ isParsableJson,
73
+ postJsonToApi
74
+ } from "@ai-sdk/provider-utils";
75
+ import { z as z3 } from "zod";
91
76
 
92
77
  // src/convert-to-llmgateway-chat-messages.ts
93
- var import_provider_utils = require("@ai-sdk/provider-utils");
78
+ import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
94
79
  function getCacheControl(providerMetadata) {
95
80
  var _a, _b, _c;
96
81
  const anthropic = providerMetadata == null ? void 0 : providerMetadata.anthropic;
@@ -136,7 +121,7 @@ function convertToLLMGatewayChatMessages(prompt) {
136
121
  return {
137
122
  type: "image_url",
138
123
  image_url: {
139
- url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(
124
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
140
125
  part.image
141
126
  )}`
142
127
  },
@@ -150,7 +135,7 @@ function convertToLLMGatewayChatMessages(prompt) {
150
135
  filename: String(
151
136
  (_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.llmgateway) == null ? void 0 : _d.filename
152
137
  ),
153
- file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.data)}` : `data:${part.mimeType};base64,${part.data}`
138
+ file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
154
139
  },
155
140
  cache_control: cacheControl
156
141
  };
@@ -276,17 +261,17 @@ function mapLLMGatewayFinishReason(finishReason) {
276
261
  }
277
262
 
278
263
  // src/llmgateway-error.ts
279
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
280
- var import_zod2 = require("zod");
281
- var LLMGatewayErrorResponseSchema = import_zod2.z.object({
282
- error: import_zod2.z.object({
283
- message: import_zod2.z.string(),
284
- type: import_zod2.z.string(),
285
- param: import_zod2.z.any().nullable(),
286
- code: import_zod2.z.string().nullable()
264
+ import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
265
+ import { z as z2 } from "zod";
266
+ var LLMGatewayErrorResponseSchema = z2.object({
267
+ error: z2.object({
268
+ message: z2.string(),
269
+ type: z2.string(),
270
+ param: z2.any().nullable(),
271
+ code: z2.string().nullable()
287
272
  })
288
273
  });
289
- var llmgatewayFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
274
+ var llmgatewayFailedResponseHandler = createJsonErrorResponseHandler({
290
275
  errorSchema: LLMGatewayErrorResponseSchema,
291
276
  errorToMessage: (data) => data.error.message
292
277
  });
@@ -377,7 +362,7 @@ var LLMGatewayChatLanguageModel = class {
377
362
  // Handle all non-text types with a single default case
378
363
  default: {
379
364
  const _exhaustiveCheck = type;
380
- throw new import_provider.UnsupportedFunctionalityError({
365
+ throw new UnsupportedFunctionalityError({
381
366
  functionality: `${_exhaustiveCheck} mode`
382
367
  });
383
368
  }
@@ -386,15 +371,15 @@ var LLMGatewayChatLanguageModel = class {
386
371
  async doGenerate(options) {
387
372
  var _b, _c, _d, _e, _f, _g, _h, _i, _j;
388
373
  const args = this.getArgs(options);
389
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
374
+ const { responseHeaders, value: response } = await postJsonToApi({
390
375
  url: this.config.url({
391
376
  path: "/chat/completions",
392
377
  modelId: this.modelId
393
378
  }),
394
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
379
+ headers: combineHeaders(this.config.headers(), options.headers),
395
380
  body: args,
396
381
  failedResponseHandler: llmgatewayFailedResponseHandler,
397
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
382
+ successfulResponseHandler: createJsonResponseHandler(
398
383
  LLMGatewayNonStreamChatCompletionResponseSchema
399
384
  ),
400
385
  abortSignal: options.abortSignal,
@@ -484,7 +469,7 @@ var LLMGatewayChatLanguageModel = class {
484
469
  var _a2;
485
470
  return {
486
471
  toolCallType: "function",
487
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
472
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
488
473
  toolName: toolCall.function.name,
489
474
  args: toolCall.function.arguments
490
475
  };
@@ -500,12 +485,12 @@ var LLMGatewayChatLanguageModel = class {
500
485
  async doStream(options) {
501
486
  var _a, _c;
502
487
  const args = this.getArgs(options);
503
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
488
+ const { responseHeaders, value: response } = await postJsonToApi({
504
489
  url: this.config.url({
505
490
  path: "/chat/completions",
506
491
  modelId: this.modelId
507
492
  }),
508
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
493
+ headers: combineHeaders(this.config.headers(), options.headers),
509
494
  body: __spreadProps(__spreadValues({}, args), {
510
495
  stream: true,
511
496
  // only include stream_options when in strict compatibility mode:
@@ -514,7 +499,7 @@ var LLMGatewayChatLanguageModel = class {
514
499
  }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
515
500
  }),
516
501
  failedResponseHandler: llmgatewayFailedResponseHandler,
517
- successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
502
+ successfulResponseHandler: createEventSourceResponseHandler(
518
503
  LLMGatewayStreamChatCompletionChunkSchema
519
504
  ),
520
505
  abortSignal: options.abortSignal,
@@ -655,19 +640,19 @@ var LLMGatewayChatLanguageModel = class {
655
640
  const index = toolCallDelta.index;
656
641
  if (toolCalls[index] == null) {
657
642
  if (toolCallDelta.type !== "function") {
658
- throw new import_provider.InvalidResponseDataError({
643
+ throw new InvalidResponseDataError({
659
644
  data: toolCallDelta,
660
645
  message: `Expected 'function' type.`
661
646
  });
662
647
  }
663
648
  if (toolCallDelta.id == null) {
664
- throw new import_provider.InvalidResponseDataError({
649
+ throw new InvalidResponseDataError({
665
650
  data: toolCallDelta,
666
651
  message: `Expected 'id' to be a string.`
667
652
  });
668
653
  }
669
654
  if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
670
- throw new import_provider.InvalidResponseDataError({
655
+ throw new InvalidResponseDataError({
671
656
  data: toolCallDelta,
672
657
  message: `Expected 'function.name' to be a string.`
673
658
  });
@@ -685,7 +670,7 @@ var LLMGatewayChatLanguageModel = class {
685
670
  if (toolCall2 == null) {
686
671
  throw new Error("Tool call is missing");
687
672
  }
688
- if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
673
+ if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
689
674
  controller.enqueue({
690
675
  type: "tool-call-delta",
691
676
  toolCallType: "function",
@@ -696,7 +681,7 @@ var LLMGatewayChatLanguageModel = class {
696
681
  controller.enqueue({
697
682
  type: "tool-call",
698
683
  toolCallType: "function",
699
- toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
684
+ toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
700
685
  toolName: toolCall2.function.name,
701
686
  args: toolCall2.function.arguments
702
687
  });
@@ -718,11 +703,11 @@ var LLMGatewayChatLanguageModel = class {
718
703
  toolName: toolCall.function.name,
719
704
  argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
720
705
  });
721
- if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
706
+ if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
722
707
  controller.enqueue({
723
708
  type: "tool-call",
724
709
  toolCallType: "function",
725
- toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
710
+ toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
726
711
  toolName: toolCall.function.name,
727
712
  args: toolCall.function.arguments
728
713
  });
@@ -739,10 +724,10 @@ var LLMGatewayChatLanguageModel = class {
739
724
  controller.enqueue({
740
725
  type: "tool-call",
741
726
  toolCallType: "function",
742
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
727
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
743
728
  toolName: toolCall.function.name,
744
729
  // Coerce invalid arguments to an empty JSON object
745
- args: (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
730
+ args: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
746
731
  });
747
732
  toolCall.sent = true;
748
733
  }
@@ -770,97 +755,97 @@ var LLMGatewayChatLanguageModel = class {
770
755
  };
771
756
  }
772
757
  };
773
- var LLMGatewayChatCompletionBaseResponseSchema = import_zod3.z.object({
774
- id: import_zod3.z.string().optional(),
775
- model: import_zod3.z.string().optional(),
776
- usage: import_zod3.z.object({
777
- prompt_tokens: import_zod3.z.number(),
778
- prompt_tokens_details: import_zod3.z.object({
779
- cached_tokens: import_zod3.z.number()
758
+ var LLMGatewayChatCompletionBaseResponseSchema = z3.object({
759
+ id: z3.string().optional(),
760
+ model: z3.string().optional(),
761
+ usage: z3.object({
762
+ prompt_tokens: z3.number(),
763
+ prompt_tokens_details: z3.object({
764
+ cached_tokens: z3.number()
780
765
  }).nullish(),
781
- completion_tokens: import_zod3.z.number(),
782
- completion_tokens_details: import_zod3.z.object({
783
- reasoning_tokens: import_zod3.z.number()
766
+ completion_tokens: z3.number(),
767
+ completion_tokens_details: z3.object({
768
+ reasoning_tokens: z3.number()
784
769
  }).nullish(),
785
- total_tokens: import_zod3.z.number(),
786
- cost: import_zod3.z.number().optional()
770
+ total_tokens: z3.number(),
771
+ cost: z3.number().optional()
787
772
  }).nullish()
788
773
  });
789
774
  var LLMGatewayNonStreamChatCompletionResponseSchema = LLMGatewayChatCompletionBaseResponseSchema.extend({
790
- choices: import_zod3.z.array(
791
- import_zod3.z.object({
792
- message: import_zod3.z.object({
793
- role: import_zod3.z.literal("assistant"),
794
- content: import_zod3.z.string().nullable().optional(),
795
- reasoning: import_zod3.z.string().nullable().optional(),
775
+ choices: z3.array(
776
+ z3.object({
777
+ message: z3.object({
778
+ role: z3.literal("assistant"),
779
+ content: z3.string().nullable().optional(),
780
+ reasoning: z3.string().nullable().optional(),
796
781
  reasoning_details: ReasoningDetailArraySchema.nullish(),
797
- tool_calls: import_zod3.z.array(
798
- import_zod3.z.object({
799
- id: import_zod3.z.string().optional().nullable(),
800
- type: import_zod3.z.literal("function"),
801
- function: import_zod3.z.object({
802
- name: import_zod3.z.string(),
803
- arguments: import_zod3.z.string()
782
+ tool_calls: z3.array(
783
+ z3.object({
784
+ id: z3.string().optional().nullable(),
785
+ type: z3.literal("function"),
786
+ function: z3.object({
787
+ name: z3.string(),
788
+ arguments: z3.string()
804
789
  })
805
790
  })
806
791
  ).optional()
807
792
  }),
808
- index: import_zod3.z.number(),
809
- logprobs: import_zod3.z.object({
810
- content: import_zod3.z.array(
811
- import_zod3.z.object({
812
- token: import_zod3.z.string(),
813
- logprob: import_zod3.z.number(),
814
- top_logprobs: import_zod3.z.array(
815
- import_zod3.z.object({
816
- token: import_zod3.z.string(),
817
- logprob: import_zod3.z.number()
793
+ index: z3.number(),
794
+ logprobs: z3.object({
795
+ content: z3.array(
796
+ z3.object({
797
+ token: z3.string(),
798
+ logprob: z3.number(),
799
+ top_logprobs: z3.array(
800
+ z3.object({
801
+ token: z3.string(),
802
+ logprob: z3.number()
818
803
  })
819
804
  )
820
805
  })
821
806
  ).nullable()
822
807
  }).nullable().optional(),
823
- finish_reason: import_zod3.z.string().optional().nullable()
808
+ finish_reason: z3.string().optional().nullable()
824
809
  })
825
810
  )
826
811
  });
827
- var LLMGatewayStreamChatCompletionChunkSchema = import_zod3.z.union([
812
+ var LLMGatewayStreamChatCompletionChunkSchema = z3.union([
828
813
  LLMGatewayChatCompletionBaseResponseSchema.extend({
829
- choices: import_zod3.z.array(
830
- import_zod3.z.object({
831
- delta: import_zod3.z.object({
832
- role: import_zod3.z.enum(["assistant"]).optional(),
833
- content: import_zod3.z.string().nullish(),
834
- reasoning: import_zod3.z.string().nullish().optional(),
814
+ choices: z3.array(
815
+ z3.object({
816
+ delta: z3.object({
817
+ role: z3.enum(["assistant"]).optional(),
818
+ content: z3.string().nullish(),
819
+ reasoning: z3.string().nullish().optional(),
835
820
  reasoning_details: ReasoningDetailArraySchema.nullish(),
836
- tool_calls: import_zod3.z.array(
837
- import_zod3.z.object({
838
- index: import_zod3.z.number(),
839
- id: import_zod3.z.string().nullish(),
840
- type: import_zod3.z.literal("function").optional(),
841
- function: import_zod3.z.object({
842
- name: import_zod3.z.string().nullish(),
843
- arguments: import_zod3.z.string().nullish()
821
+ tool_calls: z3.array(
822
+ z3.object({
823
+ index: z3.number(),
824
+ id: z3.string().nullish(),
825
+ type: z3.literal("function").optional(),
826
+ function: z3.object({
827
+ name: z3.string().nullish(),
828
+ arguments: z3.string().nullish()
844
829
  })
845
830
  })
846
831
  ).nullish()
847
832
  }).nullish(),
848
- logprobs: import_zod3.z.object({
849
- content: import_zod3.z.array(
850
- import_zod3.z.object({
851
- token: import_zod3.z.string(),
852
- logprob: import_zod3.z.number(),
853
- top_logprobs: import_zod3.z.array(
854
- import_zod3.z.object({
855
- token: import_zod3.z.string(),
856
- logprob: import_zod3.z.number()
833
+ logprobs: z3.object({
834
+ content: z3.array(
835
+ z3.object({
836
+ token: z3.string(),
837
+ logprob: z3.number(),
838
+ top_logprobs: z3.array(
839
+ z3.object({
840
+ token: z3.string(),
841
+ logprob: z3.number()
857
842
  })
858
843
  )
859
844
  })
860
845
  ).nullable()
861
846
  }).nullish(),
862
- finish_reason: import_zod3.z.string().nullable().optional(),
863
- index: import_zod3.z.number()
847
+ finish_reason: z3.string().nullable().optional(),
848
+ index: z3.number()
864
849
  })
865
850
  )
866
851
  }),
@@ -918,12 +903,20 @@ function prepareToolsAndToolChoice(mode) {
918
903
  }
919
904
 
920
905
  // src/llmgateway-completion-language-model.ts
921
- var import_provider3 = require("@ai-sdk/provider");
922
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
923
- var import_zod4 = require("zod");
906
+ import { UnsupportedFunctionalityError as UnsupportedFunctionalityError3 } from "@ai-sdk/provider";
907
+ import {
908
+ combineHeaders as combineHeaders2,
909
+ createEventSourceResponseHandler as createEventSourceResponseHandler2,
910
+ createJsonResponseHandler as createJsonResponseHandler2,
911
+ postJsonToApi as postJsonToApi2
912
+ } from "@ai-sdk/provider-utils";
913
+ import { z as z4 } from "zod";
924
914
 
925
915
  // src/convert-to-llmgateway-completion-prompt.ts
926
- var import_provider2 = require("@ai-sdk/provider");
916
+ import {
917
+ InvalidPromptError,
918
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
919
+ } from "@ai-sdk/provider";
927
920
  function convertToLLMGatewayCompletionPrompt({
928
921
  prompt,
929
922
  inputFormat,
@@ -943,7 +936,7 @@ function convertToLLMGatewayCompletionPrompt({
943
936
  for (const { role, content } of prompt) {
944
937
  switch (role) {
945
938
  case "system": {
946
- throw new import_provider2.InvalidPromptError({
939
+ throw new InvalidPromptError({
947
940
  message: "Unexpected system message in prompt: ${content}",
948
941
  prompt
949
942
  });
@@ -955,12 +948,12 @@ function convertToLLMGatewayCompletionPrompt({
955
948
  return part.text;
956
949
  }
957
950
  case "image": {
958
- throw new import_provider2.UnsupportedFunctionalityError({
951
+ throw new UnsupportedFunctionalityError2({
959
952
  functionality: "images"
960
953
  });
961
954
  }
962
955
  case "file": {
963
- throw new import_provider2.UnsupportedFunctionalityError({
956
+ throw new UnsupportedFunctionalityError2({
964
957
  functionality: "file attachments"
965
958
  });
966
959
  }
@@ -985,22 +978,22 @@ ${userMessage}
985
978
  return part.text;
986
979
  }
987
980
  case "tool-call": {
988
- throw new import_provider2.UnsupportedFunctionalityError({
981
+ throw new UnsupportedFunctionalityError2({
989
982
  functionality: "tool-call messages"
990
983
  });
991
984
  }
992
985
  case "reasoning": {
993
- throw new import_provider2.UnsupportedFunctionalityError({
986
+ throw new UnsupportedFunctionalityError2({
994
987
  functionality: "reasoning messages"
995
988
  });
996
989
  }
997
990
  case "redacted-reasoning": {
998
- throw new import_provider2.UnsupportedFunctionalityError({
991
+ throw new UnsupportedFunctionalityError2({
999
992
  functionality: "redacted reasoning messages"
1000
993
  });
1001
994
  }
1002
995
  case "file": {
1003
- throw new import_provider2.UnsupportedFunctionalityError({
996
+ throw new UnsupportedFunctionalityError2({
1004
997
  functionality: "file attachments"
1005
998
  });
1006
999
  }
@@ -1019,7 +1012,7 @@ ${assistantMessage}
1019
1012
  break;
1020
1013
  }
1021
1014
  case "tool": {
1022
- throw new import_provider2.UnsupportedFunctionalityError({
1015
+ throw new UnsupportedFunctionalityError2({
1023
1016
  functionality: "tool messages"
1024
1017
  });
1025
1018
  }
@@ -1112,31 +1105,31 @@ var LLMGatewayCompletionLanguageModel = class {
1112
1105
  switch (type) {
1113
1106
  case "regular": {
1114
1107
  if ((_b = mode.tools) == null ? void 0 : _b.length) {
1115
- throw new import_provider3.UnsupportedFunctionalityError({
1108
+ throw new UnsupportedFunctionalityError3({
1116
1109
  functionality: "tools"
1117
1110
  });
1118
1111
  }
1119
1112
  if (mode.toolChoice) {
1120
- throw new import_provider3.UnsupportedFunctionalityError({
1113
+ throw new UnsupportedFunctionalityError3({
1121
1114
  functionality: "toolChoice"
1122
1115
  });
1123
1116
  }
1124
1117
  return baseArgs;
1125
1118
  }
1126
1119
  case "object-json": {
1127
- throw new import_provider3.UnsupportedFunctionalityError({
1120
+ throw new UnsupportedFunctionalityError3({
1128
1121
  functionality: "object-json mode"
1129
1122
  });
1130
1123
  }
1131
1124
  case "object-tool": {
1132
- throw new import_provider3.UnsupportedFunctionalityError({
1125
+ throw new UnsupportedFunctionalityError3({
1133
1126
  functionality: "object-tool mode"
1134
1127
  });
1135
1128
  }
1136
1129
  // Handle all non-text types with a single default case
1137
1130
  default: {
1138
1131
  const _exhaustiveCheck = type;
1139
- throw new import_provider3.UnsupportedFunctionalityError({
1132
+ throw new UnsupportedFunctionalityError3({
1140
1133
  functionality: `${_exhaustiveCheck} mode`
1141
1134
  });
1142
1135
  }
@@ -1145,15 +1138,15 @@ var LLMGatewayCompletionLanguageModel = class {
1145
1138
  async doGenerate(options) {
1146
1139
  var _b, _c, _d, _e, _f;
1147
1140
  const args = this.getArgs(options);
1148
- const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1141
+ const { responseHeaders, value: response } = await postJsonToApi2({
1149
1142
  url: this.config.url({
1150
1143
  path: "/completions",
1151
1144
  modelId: this.modelId
1152
1145
  }),
1153
- headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1146
+ headers: combineHeaders2(this.config.headers(), options.headers),
1154
1147
  body: args,
1155
1148
  failedResponseHandler: llmgatewayFailedResponseHandler,
1156
- successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1149
+ successfulResponseHandler: createJsonResponseHandler2(
1157
1150
  LLMGatewayCompletionChunkSchema
1158
1151
  ),
1159
1152
  abortSignal: options.abortSignal,
@@ -1187,19 +1180,19 @@ var LLMGatewayCompletionLanguageModel = class {
1187
1180
  }
1188
1181
  async doStream(options) {
1189
1182
  const args = this.getArgs(options);
1190
- const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1183
+ const { responseHeaders, value: response } = await postJsonToApi2({
1191
1184
  url: this.config.url({
1192
1185
  path: "/completions",
1193
1186
  modelId: this.modelId
1194
1187
  }),
1195
- headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1188
+ headers: combineHeaders2(this.config.headers(), options.headers),
1196
1189
  body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
1197
1190
  stream: true,
1198
1191
  // only include stream_options when in strict compatibility mode:
1199
1192
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1200
1193
  }),
1201
1194
  failedResponseHandler: llmgatewayFailedResponseHandler,
1202
- successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
1195
+ successfulResponseHandler: createEventSourceResponseHandler2(
1203
1196
  LLMGatewayCompletionChunkSchema
1204
1197
  ),
1205
1198
  abortSignal: options.abortSignal,
@@ -1269,27 +1262,27 @@ var LLMGatewayCompletionLanguageModel = class {
1269
1262
  };
1270
1263
  }
1271
1264
  };
1272
- var LLMGatewayCompletionChunkSchema = import_zod4.z.union([
1273
- import_zod4.z.object({
1274
- id: import_zod4.z.string().optional(),
1275
- model: import_zod4.z.string().optional(),
1276
- choices: import_zod4.z.array(
1277
- import_zod4.z.object({
1278
- text: import_zod4.z.string(),
1279
- reasoning: import_zod4.z.string().nullish().optional(),
1265
+ var LLMGatewayCompletionChunkSchema = z4.union([
1266
+ z4.object({
1267
+ id: z4.string().optional(),
1268
+ model: z4.string().optional(),
1269
+ choices: z4.array(
1270
+ z4.object({
1271
+ text: z4.string(),
1272
+ reasoning: z4.string().nullish().optional(),
1280
1273
  reasoning_details: ReasoningDetailArraySchema.nullish(),
1281
- finish_reason: import_zod4.z.string().nullish(),
1282
- index: import_zod4.z.number(),
1283
- logprobs: import_zod4.z.object({
1284
- tokens: import_zod4.z.array(import_zod4.z.string()),
1285
- token_logprobs: import_zod4.z.array(import_zod4.z.number()),
1286
- top_logprobs: import_zod4.z.array(import_zod4.z.record(import_zod4.z.string(), import_zod4.z.number())).nullable()
1274
+ finish_reason: z4.string().nullish(),
1275
+ index: z4.number(),
1276
+ logprobs: z4.object({
1277
+ tokens: z4.array(z4.string()),
1278
+ token_logprobs: z4.array(z4.number()),
1279
+ top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullable()
1287
1280
  }).nullable().optional()
1288
1281
  })
1289
1282
  ),
1290
- usage: import_zod4.z.object({
1291
- prompt_tokens: import_zod4.z.number(),
1292
- completion_tokens: import_zod4.z.number()
1283
+ usage: z4.object({
1284
+ prompt_tokens: z4.number(),
1285
+ completion_tokens: z4.number()
1293
1286
  }).optional().nullable()
1294
1287
  }),
1295
1288
  LLMGatewayErrorResponseSchema
@@ -1302,7 +1295,7 @@ var LLMGateway = class {
1302
1295
  */
1303
1296
  constructor(options = {}) {
1304
1297
  var _a, _b;
1305
- this.baseURL = (_b = (0, import_provider_utils5.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1298
+ this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1306
1299
  this.apiKey = options.apiKey;
1307
1300
  this.headers = options.headers;
1308
1301
  }
@@ -1310,7 +1303,7 @@ var LLMGateway = class {
1310
1303
  return {
1311
1304
  baseURL: this.baseURL,
1312
1305
  headers: () => __spreadValues({
1313
- Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
1306
+ Authorization: `Bearer ${loadApiKey({
1314
1307
  apiKey: this.apiKey,
1315
1308
  environmentVariableName: "LLMGATEWAY_API_KEY",
1316
1309
  description: "LLMGateway"
@@ -1337,13 +1330,13 @@ var LLMGateway = class {
1337
1330
  };
1338
1331
 
1339
1332
  // src/llmgateway-provider.ts
1340
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
1333
+ import { loadApiKey as loadApiKey2, withoutTrailingSlash as withoutTrailingSlash2 } from "@ai-sdk/provider-utils";
1341
1334
  function createLLMGateway(options = {}) {
1342
1335
  var _a, _b, _c;
1343
- const baseURL = (_b = (0, import_provider_utils6.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1336
+ const baseURL = (_b = withoutTrailingSlash2((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1344
1337
  const compatibility = (_c = options.compatibility) != null ? _c : "compatible";
1345
1338
  const getHeaders = () => __spreadValues({
1346
- Authorization: `Bearer ${(0, import_provider_utils6.loadApiKey)({
1339
+ Authorization: `Bearer ${loadApiKey2({
1347
1340
  apiKey: options.apiKey,
1348
1341
  environmentVariableName: "LLMGATEWAY_API_KEY",
1349
1342
  description: "LLMGateway"
@@ -1389,10 +1382,9 @@ var llmgateway = createLLMGateway({
1389
1382
  compatibility: "strict"
1390
1383
  // strict for LLMGateway API
1391
1384
  });
1392
- // Annotate the CommonJS export names for ESM import in node:
1393
- 0 && (module.exports = {
1385
+ export {
1394
1386
  LLMGateway,
1395
1387
  createLLMGateway,
1396
1388
  llmgateway
1397
- });
1398
- //# sourceMappingURL=index.cjs.map
1389
+ };
1390
+ //# sourceMappingURL=index.mjs.map