@llmgateway/ai-sdk-provider 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,9 @@
1
+ "use strict";
1
2
  var __defProp = Object.defineProperty;
2
3
  var __defProps = Object.defineProperties;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
5
  var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
7
  var __getOwnPropSymbols = Object.getOwnPropertySymbols;
5
8
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
9
  var __propIsEnum = Object.prototype.propertyIsEnumerable;
@@ -29,53 +32,65 @@ var __objRest = (source, exclude) => {
29
32
  }
30
33
  return target;
31
34
  };
35
+ var __export = (target, all) => {
36
+ for (var name in all)
37
+ __defProp(target, name, { get: all[name], enumerable: true });
38
+ };
39
+ var __copyProps = (to, from, except, desc) => {
40
+ if (from && typeof from === "object" || typeof from === "function") {
41
+ for (let key of __getOwnPropNames(from))
42
+ if (!__hasOwnProp.call(to, key) && key !== except)
43
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
44
+ }
45
+ return to;
46
+ };
47
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
48
+
49
+ // src/index.ts
50
+ var index_exports = {};
51
+ __export(index_exports, {
52
+ LLMGateway: () => LLMGateway,
53
+ createLLMGateway: () => createLLMGateway,
54
+ llmgateway: () => llmgateway
55
+ });
56
+ module.exports = __toCommonJS(index_exports);
32
57
 
33
58
  // src/llmgateway-facade.ts
34
- import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
59
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
35
60
 
36
61
  // src/schemas/reasoning-details.ts
37
- import { z } from "zod";
38
- var ReasoningDetailSummarySchema = z.object({
39
- type: z.literal("reasoning.summary" /* Summary */),
40
- summary: z.string()
62
+ var import_zod = require("zod");
63
+ var ReasoningDetailSummarySchema = import_zod.z.object({
64
+ type: import_zod.z.literal("reasoning.summary" /* Summary */),
65
+ summary: import_zod.z.string()
41
66
  });
42
- var ReasoningDetailEncryptedSchema = z.object({
43
- type: z.literal("reasoning.encrypted" /* Encrypted */),
44
- data: z.string()
67
+ var ReasoningDetailEncryptedSchema = import_zod.z.object({
68
+ type: import_zod.z.literal("reasoning.encrypted" /* Encrypted */),
69
+ data: import_zod.z.string()
45
70
  });
46
- var ReasoningDetailTextSchema = z.object({
47
- type: z.literal("reasoning.text" /* Text */),
48
- text: z.string().nullish(),
49
- signature: z.string().nullish()
71
+ var ReasoningDetailTextSchema = import_zod.z.object({
72
+ type: import_zod.z.literal("reasoning.text" /* Text */),
73
+ text: import_zod.z.string().nullish(),
74
+ signature: import_zod.z.string().nullish()
50
75
  });
51
- var ReasoningDetailUnionSchema = z.union([
76
+ var ReasoningDetailUnionSchema = import_zod.z.union([
52
77
  ReasoningDetailSummarySchema,
53
78
  ReasoningDetailEncryptedSchema,
54
79
  ReasoningDetailTextSchema
55
80
  ]);
56
- var ReasoningDetailsWithUnknownSchema = z.union([
81
+ var ReasoningDetailsWithUnknownSchema = import_zod.z.union([
57
82
  ReasoningDetailUnionSchema,
58
- z.unknown().transform(() => null)
83
+ import_zod.z.unknown().transform(() => null)
59
84
  ]);
60
- var ReasoningDetailArraySchema = z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
85
+ var ReasoningDetailArraySchema = import_zod.z.array(ReasoningDetailsWithUnknownSchema).transform((d) => d.filter((d2) => !!d2));
61
86
 
62
87
  // src/llmgateway-chat-language-model.ts
63
- import {
64
- InvalidResponseDataError,
65
- UnsupportedFunctionalityError
66
- } from "@ai-sdk/provider";
67
- import {
68
- combineHeaders,
69
- createEventSourceResponseHandler,
70
- createJsonResponseHandler,
71
- generateId,
72
- isParsableJson,
73
- postJsonToApi
74
- } from "@ai-sdk/provider-utils";
75
- import { z as z3 } from "zod";
88
+ var import_provider = require("@ai-sdk/provider");
89
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
90
+ var import_zod3 = require("zod");
76
91
 
77
92
  // src/convert-to-llmgateway-chat-messages.ts
78
- import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
93
+ var import_provider_utils = require("@ai-sdk/provider-utils");
79
94
  function getCacheControl(providerMetadata) {
80
95
  var _a, _b, _c;
81
96
  const anthropic = providerMetadata == null ? void 0 : providerMetadata.anthropic;
@@ -121,7 +136,7 @@ function convertToLLMGatewayChatMessages(prompt) {
121
136
  return {
122
137
  type: "image_url",
123
138
  image_url: {
124
- url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${convertUint8ArrayToBase64(
139
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_b2 = part.mimeType) != null ? _b2 : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(
125
140
  part.image
126
141
  )}`
127
142
  },
@@ -135,7 +150,7 @@ function convertToLLMGatewayChatMessages(prompt) {
135
150
  filename: String(
136
151
  (_d = (_c2 = part.providerMetadata) == null ? void 0 : _c2.llmgateway) == null ? void 0 : _d.filename
137
152
  ),
138
- file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${convertUint8ArrayToBase64(part.data)}` : `data:${part.mimeType};base64,${part.data}`
153
+ file_data: part.data instanceof Uint8Array ? `data:${part.mimeType};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.data)}` : `data:${part.mimeType};base64,${part.data}`
139
154
  },
140
155
  cache_control: cacheControl
141
156
  };
@@ -261,17 +276,17 @@ function mapLLMGatewayFinishReason(finishReason) {
261
276
  }
262
277
 
263
278
  // src/llmgateway-error.ts
264
- import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
265
- import { z as z2 } from "zod";
266
- var LLMGatewayErrorResponseSchema = z2.object({
267
- error: z2.object({
268
- message: z2.string(),
269
- type: z2.string(),
270
- param: z2.any().nullable(),
271
- code: z2.string().nullable()
279
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
280
+ var import_zod2 = require("zod");
281
+ var LLMGatewayErrorResponseSchema = import_zod2.z.object({
282
+ error: import_zod2.z.object({
283
+ message: import_zod2.z.string(),
284
+ type: import_zod2.z.string(),
285
+ param: import_zod2.z.any().nullable(),
286
+ code: import_zod2.z.string().nullable()
272
287
  })
273
288
  });
274
- var llmgatewayFailedResponseHandler = createJsonErrorResponseHandler({
289
+ var llmgatewayFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
275
290
  errorSchema: LLMGatewayErrorResponseSchema,
276
291
  errorToMessage: (data) => data.error.message
277
292
  });
@@ -362,7 +377,7 @@ var LLMGatewayChatLanguageModel = class {
362
377
  // Handle all non-text types with a single default case
363
378
  default: {
364
379
  const _exhaustiveCheck = type;
365
- throw new UnsupportedFunctionalityError({
380
+ throw new import_provider.UnsupportedFunctionalityError({
366
381
  functionality: `${_exhaustiveCheck} mode`
367
382
  });
368
383
  }
@@ -371,15 +386,15 @@ var LLMGatewayChatLanguageModel = class {
371
386
  async doGenerate(options) {
372
387
  var _b, _c, _d, _e, _f, _g, _h, _i, _j;
373
388
  const args = this.getArgs(options);
374
- const { responseHeaders, value: response } = await postJsonToApi({
389
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
375
390
  url: this.config.url({
376
391
  path: "/chat/completions",
377
392
  modelId: this.modelId
378
393
  }),
379
- headers: combineHeaders(this.config.headers(), options.headers),
394
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
380
395
  body: args,
381
396
  failedResponseHandler: llmgatewayFailedResponseHandler,
382
- successfulResponseHandler: createJsonResponseHandler(
397
+ successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
383
398
  LLMGatewayNonStreamChatCompletionResponseSchema
384
399
  ),
385
400
  abortSignal: options.abortSignal,
@@ -469,7 +484,7 @@ var LLMGatewayChatLanguageModel = class {
469
484
  var _a2;
470
485
  return {
471
486
  toolCallType: "function",
472
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
487
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
473
488
  toolName: toolCall.function.name,
474
489
  args: toolCall.function.arguments
475
490
  };
@@ -485,12 +500,12 @@ var LLMGatewayChatLanguageModel = class {
485
500
  async doStream(options) {
486
501
  var _a, _c;
487
502
  const args = this.getArgs(options);
488
- const { responseHeaders, value: response } = await postJsonToApi({
503
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
489
504
  url: this.config.url({
490
505
  path: "/chat/completions",
491
506
  modelId: this.modelId
492
507
  }),
493
- headers: combineHeaders(this.config.headers(), options.headers),
508
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
494
509
  body: __spreadProps(__spreadValues({}, args), {
495
510
  stream: true,
496
511
  // only include stream_options when in strict compatibility mode:
@@ -499,7 +514,7 @@ var LLMGatewayChatLanguageModel = class {
499
514
  }, ((_a = this.settings.usage) == null ? void 0 : _a.include) ? { include_usage: true } : {}) : void 0
500
515
  }),
501
516
  failedResponseHandler: llmgatewayFailedResponseHandler,
502
- successfulResponseHandler: createEventSourceResponseHandler(
517
+ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
503
518
  LLMGatewayStreamChatCompletionChunkSchema
504
519
  ),
505
520
  abortSignal: options.abortSignal,
@@ -640,19 +655,19 @@ var LLMGatewayChatLanguageModel = class {
640
655
  const index = toolCallDelta.index;
641
656
  if (toolCalls[index] == null) {
642
657
  if (toolCallDelta.type !== "function") {
643
- throw new InvalidResponseDataError({
658
+ throw new import_provider.InvalidResponseDataError({
644
659
  data: toolCallDelta,
645
660
  message: `Expected 'function' type.`
646
661
  });
647
662
  }
648
663
  if (toolCallDelta.id == null) {
649
- throw new InvalidResponseDataError({
664
+ throw new import_provider.InvalidResponseDataError({
650
665
  data: toolCallDelta,
651
666
  message: `Expected 'id' to be a string.`
652
667
  });
653
668
  }
654
669
  if (((_c2 = toolCallDelta.function) == null ? void 0 : _c2.name) == null) {
655
- throw new InvalidResponseDataError({
670
+ throw new import_provider.InvalidResponseDataError({
656
671
  data: toolCallDelta,
657
672
  message: `Expected 'function.name' to be a string.`
658
673
  });
@@ -670,7 +685,7 @@ var LLMGatewayChatLanguageModel = class {
670
685
  if (toolCall2 == null) {
671
686
  throw new Error("Tool call is missing");
672
687
  }
673
- if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && isParsableJson(toolCall2.function.arguments)) {
688
+ if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
674
689
  controller.enqueue({
675
690
  type: "tool-call-delta",
676
691
  toolCallType: "function",
@@ -681,7 +696,7 @@ var LLMGatewayChatLanguageModel = class {
681
696
  controller.enqueue({
682
697
  type: "tool-call",
683
698
  toolCallType: "function",
684
- toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
699
+ toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
685
700
  toolName: toolCall2.function.name,
686
701
  args: toolCall2.function.arguments
687
702
  });
@@ -703,11 +718,11 @@ var LLMGatewayChatLanguageModel = class {
703
718
  toolName: toolCall.function.name,
704
719
  argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
705
720
  });
706
- if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
721
+ if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
707
722
  controller.enqueue({
708
723
  type: "tool-call",
709
724
  toolCallType: "function",
710
- toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
725
+ toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
711
726
  toolName: toolCall.function.name,
712
727
  args: toolCall.function.arguments
713
728
  });
@@ -724,10 +739,10 @@ var LLMGatewayChatLanguageModel = class {
724
739
  controller.enqueue({
725
740
  type: "tool-call",
726
741
  toolCallType: "function",
727
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
742
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
728
743
  toolName: toolCall.function.name,
729
744
  // Coerce invalid arguments to an empty JSON object
730
- args: isParsableJson(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
745
+ args: (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments) ? toolCall.function.arguments : "{}"
731
746
  });
732
747
  toolCall.sent = true;
733
748
  }
@@ -755,97 +770,97 @@ var LLMGatewayChatLanguageModel = class {
755
770
  };
756
771
  }
757
772
  };
758
- var LLMGatewayChatCompletionBaseResponseSchema = z3.object({
759
- id: z3.string().optional(),
760
- model: z3.string().optional(),
761
- usage: z3.object({
762
- prompt_tokens: z3.number(),
763
- prompt_tokens_details: z3.object({
764
- cached_tokens: z3.number()
773
+ var LLMGatewayChatCompletionBaseResponseSchema = import_zod3.z.object({
774
+ id: import_zod3.z.string().optional(),
775
+ model: import_zod3.z.string().optional(),
776
+ usage: import_zod3.z.object({
777
+ prompt_tokens: import_zod3.z.number(),
778
+ prompt_tokens_details: import_zod3.z.object({
779
+ cached_tokens: import_zod3.z.number()
765
780
  }).nullish(),
766
- completion_tokens: z3.number(),
767
- completion_tokens_details: z3.object({
768
- reasoning_tokens: z3.number()
781
+ completion_tokens: import_zod3.z.number(),
782
+ completion_tokens_details: import_zod3.z.object({
783
+ reasoning_tokens: import_zod3.z.number()
769
784
  }).nullish(),
770
- total_tokens: z3.number(),
771
- cost: z3.number().optional()
785
+ total_tokens: import_zod3.z.number(),
786
+ cost: import_zod3.z.number().optional()
772
787
  }).nullish()
773
788
  });
774
789
  var LLMGatewayNonStreamChatCompletionResponseSchema = LLMGatewayChatCompletionBaseResponseSchema.extend({
775
- choices: z3.array(
776
- z3.object({
777
- message: z3.object({
778
- role: z3.literal("assistant"),
779
- content: z3.string().nullable().optional(),
780
- reasoning: z3.string().nullable().optional(),
790
+ choices: import_zod3.z.array(
791
+ import_zod3.z.object({
792
+ message: import_zod3.z.object({
793
+ role: import_zod3.z.literal("assistant"),
794
+ content: import_zod3.z.string().nullable().optional(),
795
+ reasoning: import_zod3.z.string().nullable().optional(),
781
796
  reasoning_details: ReasoningDetailArraySchema.nullish(),
782
- tool_calls: z3.array(
783
- z3.object({
784
- id: z3.string().optional().nullable(),
785
- type: z3.literal("function"),
786
- function: z3.object({
787
- name: z3.string(),
788
- arguments: z3.string()
797
+ tool_calls: import_zod3.z.array(
798
+ import_zod3.z.object({
799
+ id: import_zod3.z.string().optional().nullable(),
800
+ type: import_zod3.z.literal("function"),
801
+ function: import_zod3.z.object({
802
+ name: import_zod3.z.string(),
803
+ arguments: import_zod3.z.string()
789
804
  })
790
805
  })
791
806
  ).optional()
792
807
  }),
793
- index: z3.number(),
794
- logprobs: z3.object({
795
- content: z3.array(
796
- z3.object({
797
- token: z3.string(),
798
- logprob: z3.number(),
799
- top_logprobs: z3.array(
800
- z3.object({
801
- token: z3.string(),
802
- logprob: z3.number()
808
+ index: import_zod3.z.number(),
809
+ logprobs: import_zod3.z.object({
810
+ content: import_zod3.z.array(
811
+ import_zod3.z.object({
812
+ token: import_zod3.z.string(),
813
+ logprob: import_zod3.z.number(),
814
+ top_logprobs: import_zod3.z.array(
815
+ import_zod3.z.object({
816
+ token: import_zod3.z.string(),
817
+ logprob: import_zod3.z.number()
803
818
  })
804
819
  )
805
820
  })
806
821
  ).nullable()
807
822
  }).nullable().optional(),
808
- finish_reason: z3.string().optional().nullable()
823
+ finish_reason: import_zod3.z.string().optional().nullable()
809
824
  })
810
825
  )
811
826
  });
812
- var LLMGatewayStreamChatCompletionChunkSchema = z3.union([
827
+ var LLMGatewayStreamChatCompletionChunkSchema = import_zod3.z.union([
813
828
  LLMGatewayChatCompletionBaseResponseSchema.extend({
814
- choices: z3.array(
815
- z3.object({
816
- delta: z3.object({
817
- role: z3.enum(["assistant"]).optional(),
818
- content: z3.string().nullish(),
819
- reasoning: z3.string().nullish().optional(),
829
+ choices: import_zod3.z.array(
830
+ import_zod3.z.object({
831
+ delta: import_zod3.z.object({
832
+ role: import_zod3.z.enum(["assistant"]).optional(),
833
+ content: import_zod3.z.string().nullish(),
834
+ reasoning: import_zod3.z.string().nullish().optional(),
820
835
  reasoning_details: ReasoningDetailArraySchema.nullish(),
821
- tool_calls: z3.array(
822
- z3.object({
823
- index: z3.number(),
824
- id: z3.string().nullish(),
825
- type: z3.literal("function").optional(),
826
- function: z3.object({
827
- name: z3.string().nullish(),
828
- arguments: z3.string().nullish()
836
+ tool_calls: import_zod3.z.array(
837
+ import_zod3.z.object({
838
+ index: import_zod3.z.number(),
839
+ id: import_zod3.z.string().nullish(),
840
+ type: import_zod3.z.literal("function").optional(),
841
+ function: import_zod3.z.object({
842
+ name: import_zod3.z.string().nullish(),
843
+ arguments: import_zod3.z.string().nullish()
829
844
  })
830
845
  })
831
846
  ).nullish()
832
847
  }).nullish(),
833
- logprobs: z3.object({
834
- content: z3.array(
835
- z3.object({
836
- token: z3.string(),
837
- logprob: z3.number(),
838
- top_logprobs: z3.array(
839
- z3.object({
840
- token: z3.string(),
841
- logprob: z3.number()
848
+ logprobs: import_zod3.z.object({
849
+ content: import_zod3.z.array(
850
+ import_zod3.z.object({
851
+ token: import_zod3.z.string(),
852
+ logprob: import_zod3.z.number(),
853
+ top_logprobs: import_zod3.z.array(
854
+ import_zod3.z.object({
855
+ token: import_zod3.z.string(),
856
+ logprob: import_zod3.z.number()
842
857
  })
843
858
  )
844
859
  })
845
860
  ).nullable()
846
861
  }).nullish(),
847
- finish_reason: z3.string().nullable().optional(),
848
- index: z3.number()
862
+ finish_reason: import_zod3.z.string().nullable().optional(),
863
+ index: import_zod3.z.number()
849
864
  })
850
865
  )
851
866
  }),
@@ -903,20 +918,12 @@ function prepareToolsAndToolChoice(mode) {
903
918
  }
904
919
 
905
920
  // src/llmgateway-completion-language-model.ts
906
- import { UnsupportedFunctionalityError as UnsupportedFunctionalityError3 } from "@ai-sdk/provider";
907
- import {
908
- combineHeaders as combineHeaders2,
909
- createEventSourceResponseHandler as createEventSourceResponseHandler2,
910
- createJsonResponseHandler as createJsonResponseHandler2,
911
- postJsonToApi as postJsonToApi2
912
- } from "@ai-sdk/provider-utils";
913
- import { z as z4 } from "zod";
921
+ var import_provider3 = require("@ai-sdk/provider");
922
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
923
+ var import_zod4 = require("zod");
914
924
 
915
925
  // src/convert-to-llmgateway-completion-prompt.ts
916
- import {
917
- InvalidPromptError,
918
- UnsupportedFunctionalityError as UnsupportedFunctionalityError2
919
- } from "@ai-sdk/provider";
926
+ var import_provider2 = require("@ai-sdk/provider");
920
927
  function convertToLLMGatewayCompletionPrompt({
921
928
  prompt,
922
929
  inputFormat,
@@ -936,7 +943,7 @@ function convertToLLMGatewayCompletionPrompt({
936
943
  for (const { role, content } of prompt) {
937
944
  switch (role) {
938
945
  case "system": {
939
- throw new InvalidPromptError({
946
+ throw new import_provider2.InvalidPromptError({
940
947
  message: "Unexpected system message in prompt: ${content}",
941
948
  prompt
942
949
  });
@@ -948,12 +955,12 @@ function convertToLLMGatewayCompletionPrompt({
948
955
  return part.text;
949
956
  }
950
957
  case "image": {
951
- throw new UnsupportedFunctionalityError2({
958
+ throw new import_provider2.UnsupportedFunctionalityError({
952
959
  functionality: "images"
953
960
  });
954
961
  }
955
962
  case "file": {
956
- throw new UnsupportedFunctionalityError2({
963
+ throw new import_provider2.UnsupportedFunctionalityError({
957
964
  functionality: "file attachments"
958
965
  });
959
966
  }
@@ -978,22 +985,22 @@ ${userMessage}
978
985
  return part.text;
979
986
  }
980
987
  case "tool-call": {
981
- throw new UnsupportedFunctionalityError2({
988
+ throw new import_provider2.UnsupportedFunctionalityError({
982
989
  functionality: "tool-call messages"
983
990
  });
984
991
  }
985
992
  case "reasoning": {
986
- throw new UnsupportedFunctionalityError2({
993
+ throw new import_provider2.UnsupportedFunctionalityError({
987
994
  functionality: "reasoning messages"
988
995
  });
989
996
  }
990
997
  case "redacted-reasoning": {
991
- throw new UnsupportedFunctionalityError2({
998
+ throw new import_provider2.UnsupportedFunctionalityError({
992
999
  functionality: "redacted reasoning messages"
993
1000
  });
994
1001
  }
995
1002
  case "file": {
996
- throw new UnsupportedFunctionalityError2({
1003
+ throw new import_provider2.UnsupportedFunctionalityError({
997
1004
  functionality: "file attachments"
998
1005
  });
999
1006
  }
@@ -1012,7 +1019,7 @@ ${assistantMessage}
1012
1019
  break;
1013
1020
  }
1014
1021
  case "tool": {
1015
- throw new UnsupportedFunctionalityError2({
1022
+ throw new import_provider2.UnsupportedFunctionalityError({
1016
1023
  functionality: "tool messages"
1017
1024
  });
1018
1025
  }
@@ -1105,31 +1112,31 @@ var LLMGatewayCompletionLanguageModel = class {
1105
1112
  switch (type) {
1106
1113
  case "regular": {
1107
1114
  if ((_b = mode.tools) == null ? void 0 : _b.length) {
1108
- throw new UnsupportedFunctionalityError3({
1115
+ throw new import_provider3.UnsupportedFunctionalityError({
1109
1116
  functionality: "tools"
1110
1117
  });
1111
1118
  }
1112
1119
  if (mode.toolChoice) {
1113
- throw new UnsupportedFunctionalityError3({
1120
+ throw new import_provider3.UnsupportedFunctionalityError({
1114
1121
  functionality: "toolChoice"
1115
1122
  });
1116
1123
  }
1117
1124
  return baseArgs;
1118
1125
  }
1119
1126
  case "object-json": {
1120
- throw new UnsupportedFunctionalityError3({
1127
+ throw new import_provider3.UnsupportedFunctionalityError({
1121
1128
  functionality: "object-json mode"
1122
1129
  });
1123
1130
  }
1124
1131
  case "object-tool": {
1125
- throw new UnsupportedFunctionalityError3({
1132
+ throw new import_provider3.UnsupportedFunctionalityError({
1126
1133
  functionality: "object-tool mode"
1127
1134
  });
1128
1135
  }
1129
1136
  // Handle all non-text types with a single default case
1130
1137
  default: {
1131
1138
  const _exhaustiveCheck = type;
1132
- throw new UnsupportedFunctionalityError3({
1139
+ throw new import_provider3.UnsupportedFunctionalityError({
1133
1140
  functionality: `${_exhaustiveCheck} mode`
1134
1141
  });
1135
1142
  }
@@ -1138,15 +1145,15 @@ var LLMGatewayCompletionLanguageModel = class {
1138
1145
  async doGenerate(options) {
1139
1146
  var _b, _c, _d, _e, _f;
1140
1147
  const args = this.getArgs(options);
1141
- const { responseHeaders, value: response } = await postJsonToApi2({
1148
+ const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1142
1149
  url: this.config.url({
1143
1150
  path: "/completions",
1144
1151
  modelId: this.modelId
1145
1152
  }),
1146
- headers: combineHeaders2(this.config.headers(), options.headers),
1153
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1147
1154
  body: args,
1148
1155
  failedResponseHandler: llmgatewayFailedResponseHandler,
1149
- successfulResponseHandler: createJsonResponseHandler2(
1156
+ successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1150
1157
  LLMGatewayCompletionChunkSchema
1151
1158
  ),
1152
1159
  abortSignal: options.abortSignal,
@@ -1180,19 +1187,19 @@ var LLMGatewayCompletionLanguageModel = class {
1180
1187
  }
1181
1188
  async doStream(options) {
1182
1189
  const args = this.getArgs(options);
1183
- const { responseHeaders, value: response } = await postJsonToApi2({
1190
+ const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1184
1191
  url: this.config.url({
1185
1192
  path: "/completions",
1186
1193
  modelId: this.modelId
1187
1194
  }),
1188
- headers: combineHeaders2(this.config.headers(), options.headers),
1195
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
1189
1196
  body: __spreadProps(__spreadValues({}, this.getArgs(options)), {
1190
1197
  stream: true,
1191
1198
  // only include stream_options when in strict compatibility mode:
1192
1199
  stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1193
1200
  }),
1194
1201
  failedResponseHandler: llmgatewayFailedResponseHandler,
1195
- successfulResponseHandler: createEventSourceResponseHandler2(
1202
+ successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
1196
1203
  LLMGatewayCompletionChunkSchema
1197
1204
  ),
1198
1205
  abortSignal: options.abortSignal,
@@ -1262,27 +1269,27 @@ var LLMGatewayCompletionLanguageModel = class {
1262
1269
  };
1263
1270
  }
1264
1271
  };
1265
- var LLMGatewayCompletionChunkSchema = z4.union([
1266
- z4.object({
1267
- id: z4.string().optional(),
1268
- model: z4.string().optional(),
1269
- choices: z4.array(
1270
- z4.object({
1271
- text: z4.string(),
1272
- reasoning: z4.string().nullish().optional(),
1272
+ var LLMGatewayCompletionChunkSchema = import_zod4.z.union([
1273
+ import_zod4.z.object({
1274
+ id: import_zod4.z.string().optional(),
1275
+ model: import_zod4.z.string().optional(),
1276
+ choices: import_zod4.z.array(
1277
+ import_zod4.z.object({
1278
+ text: import_zod4.z.string(),
1279
+ reasoning: import_zod4.z.string().nullish().optional(),
1273
1280
  reasoning_details: ReasoningDetailArraySchema.nullish(),
1274
- finish_reason: z4.string().nullish(),
1275
- index: z4.number(),
1276
- logprobs: z4.object({
1277
- tokens: z4.array(z4.string()),
1278
- token_logprobs: z4.array(z4.number()),
1279
- top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullable()
1281
+ finish_reason: import_zod4.z.string().nullish(),
1282
+ index: import_zod4.z.number(),
1283
+ logprobs: import_zod4.z.object({
1284
+ tokens: import_zod4.z.array(import_zod4.z.string()),
1285
+ token_logprobs: import_zod4.z.array(import_zod4.z.number()),
1286
+ top_logprobs: import_zod4.z.array(import_zod4.z.record(import_zod4.z.string(), import_zod4.z.number())).nullable()
1280
1287
  }).nullable().optional()
1281
1288
  })
1282
1289
  ),
1283
- usage: z4.object({
1284
- prompt_tokens: z4.number(),
1285
- completion_tokens: z4.number()
1290
+ usage: import_zod4.z.object({
1291
+ prompt_tokens: import_zod4.z.number(),
1292
+ completion_tokens: import_zod4.z.number()
1286
1293
  }).optional().nullable()
1287
1294
  }),
1288
1295
  LLMGatewayErrorResponseSchema
@@ -1295,7 +1302,7 @@ var LLMGateway = class {
1295
1302
  */
1296
1303
  constructor(options = {}) {
1297
1304
  var _a, _b;
1298
- this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1305
+ this.baseURL = (_b = (0, import_provider_utils5.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1299
1306
  this.apiKey = options.apiKey;
1300
1307
  this.headers = options.headers;
1301
1308
  }
@@ -1303,7 +1310,7 @@ var LLMGateway = class {
1303
1310
  return {
1304
1311
  baseURL: this.baseURL,
1305
1312
  headers: () => __spreadValues({
1306
- Authorization: `Bearer ${loadApiKey({
1313
+ Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
1307
1314
  apiKey: this.apiKey,
1308
1315
  environmentVariableName: "LLMGATEWAY_API_KEY",
1309
1316
  description: "LLMGateway"
@@ -1330,13 +1337,13 @@ var LLMGateway = class {
1330
1337
  };
1331
1338
 
1332
1339
  // src/llmgateway-provider.ts
1333
- import { loadApiKey as loadApiKey2, withoutTrailingSlash as withoutTrailingSlash2 } from "@ai-sdk/provider-utils";
1340
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
1334
1341
  function createLLMGateway(options = {}) {
1335
1342
  var _a, _b, _c;
1336
- const baseURL = (_b = withoutTrailingSlash2((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1343
+ const baseURL = (_b = (0, import_provider_utils6.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseURL)) != null ? _b : "https://api.llmgateway.io/v1";
1337
1344
  const compatibility = (_c = options.compatibility) != null ? _c : "compatible";
1338
1345
  const getHeaders = () => __spreadValues({
1339
- Authorization: `Bearer ${loadApiKey2({
1346
+ Authorization: `Bearer ${(0, import_provider_utils6.loadApiKey)({
1340
1347
  apiKey: options.apiKey,
1341
1348
  environmentVariableName: "LLMGATEWAY_API_KEY",
1342
1349
  description: "LLMGateway"
@@ -1382,9 +1389,10 @@ var llmgateway = createLLMGateway({
1382
1389
  compatibility: "strict"
1383
1390
  // strict for LLMGateway API
1384
1391
  });
1385
- export {
1392
+ // Annotate the CommonJS export names for ESM import in node:
1393
+ 0 && (module.exports = {
1386
1394
  LLMGateway,
1387
1395
  createLLMGateway,
1388
1396
  llmgateway
1389
- };
1397
+ });
1390
1398
  //# sourceMappingURL=index.js.map