@ai-sdk/openai-compatible 1.0.0-canary.2 → 1.0.0-canary.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,25 @@
1
1
  # @ai-sdk/openai-compatible
2
2
 
3
+ ## 1.0.0-canary.4
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [d1a1aa1]
8
+ - @ai-sdk/provider@2.0.0-canary.3
9
+ - @ai-sdk/provider-utils@3.0.0-canary.4
10
+
11
+ ## 1.0.0-canary.3
12
+
13
+ ### Patch Changes
14
+
15
+ - Updated dependencies [a166433]
16
+ - Updated dependencies [abf9a79]
17
+ - Updated dependencies [9f95b35]
18
+ - Updated dependencies [0a87932]
19
+ - Updated dependencies [6dc848c]
20
+ - @ai-sdk/provider-utils@3.0.0-canary.3
21
+ - @ai-sdk/provider@2.0.0-canary.2
22
+
3
23
  ## 1.0.0-canary.2
4
24
 
5
25
  ### Patch Changes
package/dist/index.js CHANGED
@@ -30,12 +30,11 @@ module.exports = __toCommonJS(src_exports);
30
30
 
31
31
  // src/openai-compatible-chat-language-model.ts
32
32
  var import_provider3 = require("@ai-sdk/provider");
33
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
33
+ var import_provider_utils = require("@ai-sdk/provider-utils");
34
34
  var import_zod2 = require("zod");
35
35
 
36
36
  // src/convert-to-openai-compatible-chat-messages.ts
37
37
  var import_provider = require("@ai-sdk/provider");
38
- var import_provider_utils = require("@ai-sdk/provider-utils");
39
38
  function getOpenAIMetadata(message) {
40
39
  var _a, _b;
41
40
  return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
@@ -61,25 +60,26 @@ function convertToOpenAICompatibleChatMessages(prompt) {
61
60
  messages.push({
62
61
  role: "user",
63
62
  content: content.map((part) => {
64
- var _a;
65
63
  const partMetadata = getOpenAIMetadata(part);
66
64
  switch (part.type) {
67
65
  case "text": {
68
66
  return { type: "text", text: part.text, ...partMetadata };
69
67
  }
70
- case "image": {
71
- return {
72
- type: "image_url",
73
- image_url: {
74
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`
75
- },
76
- ...partMetadata
77
- };
78
- }
79
68
  case "file": {
80
- throw new import_provider.UnsupportedFunctionalityError({
81
- functionality: "File content parts in user messages"
82
- });
69
+ if (part.mediaType.startsWith("image/")) {
70
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
71
+ return {
72
+ type: "image_url",
73
+ image_url: {
74
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`
75
+ },
76
+ ...partMetadata
77
+ };
78
+ } else {
79
+ throw new import_provider.UnsupportedFunctionalityError({
80
+ functionality: `file part media type ${part.mediaType}`
81
+ });
82
+ }
83
83
  }
84
84
  }
85
85
  }),
@@ -235,7 +235,7 @@ function prepareTools({
235
235
  default: {
236
236
  const _exhaustiveCheck = type;
237
237
  throw new import_provider2.UnsupportedFunctionalityError({
238
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
238
+ functionality: `tool choice type: ${_exhaustiveCheck}`
239
239
  });
240
240
  }
241
241
  }
@@ -254,7 +254,7 @@ var OpenAICompatibleChatLanguageModel = class {
254
254
  this.chunkSchema = createOpenAICompatibleChatChunkSchema(
255
255
  errorStructure.errorSchema
256
256
  );
257
- this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(errorStructure);
257
+ this.failedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(errorStructure);
258
258
  this.supportsStructuredOutputs = (_b = config.supportsStructuredOutputs) != null ? _b : false;
259
259
  }
260
260
  get defaultObjectGenerationMode() {
@@ -341,15 +341,15 @@ var OpenAICompatibleChatLanguageModel = class {
341
341
  responseHeaders,
342
342
  value: responseBody,
343
343
  rawValue: rawResponse
344
- } = await (0, import_provider_utils2.postJsonToApi)({
344
+ } = await (0, import_provider_utils.postJsonToApi)({
345
345
  url: this.config.url({
346
346
  path: "/chat/completions",
347
347
  modelId: this.modelId
348
348
  }),
349
- headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
349
+ headers: (0, import_provider_utils.combineHeaders)(this.config.headers(), options.headers),
350
350
  body: args,
351
351
  failedResponseHandler: this.failedResponseHandler,
352
- successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
352
+ successfulResponseHandler: (0, import_provider_utils.createJsonResponseHandler)(
353
353
  OpenAICompatibleChatResponseSchema
354
354
  ),
355
355
  abortSignal: options.abortSignal,
@@ -384,7 +384,7 @@ var OpenAICompatibleChatLanguageModel = class {
384
384
  var _a2;
385
385
  return {
386
386
  toolCallType: "function",
387
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
387
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils.generateId)(),
388
388
  toolName: toolCall.function.name,
389
389
  args: toolCall.function.arguments
390
390
  };
@@ -396,10 +396,13 @@ var OpenAICompatibleChatLanguageModel = class {
396
396
  },
397
397
  providerMetadata,
398
398
  rawCall: { rawPrompt, rawSettings },
399
- rawResponse: { headers: responseHeaders, body: rawResponse },
400
- response: getResponseMetadata(responseBody),
401
- warnings,
402
- request: { body }
399
+ request: { body },
400
+ response: {
401
+ ...getResponseMetadata(responseBody),
402
+ headers: responseHeaders,
403
+ body: rawResponse
404
+ },
405
+ warnings
403
406
  };
404
407
  }
405
408
  async doStream(options) {
@@ -453,25 +456,25 @@ var OpenAICompatibleChatLanguageModel = class {
453
456
  return {
454
457
  stream: simulatedStream,
455
458
  rawCall: result.rawCall,
456
- rawResponse: result.rawResponse,
459
+ response: result.response,
457
460
  warnings: result.warnings
458
461
  };
459
462
  }
460
463
  const { args, warnings } = this.getArgs({ ...options });
461
464
  const body = JSON.stringify({ ...args, stream: true });
462
465
  const metadataExtractor = (_a = this.config.metadataExtractor) == null ? void 0 : _a.createStreamExtractor();
463
- const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
466
+ const { responseHeaders, value: response } = await (0, import_provider_utils.postJsonToApi)({
464
467
  url: this.config.url({
465
468
  path: "/chat/completions",
466
469
  modelId: this.modelId
467
470
  }),
468
- headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
471
+ headers: (0, import_provider_utils.combineHeaders)(this.config.headers(), options.headers),
469
472
  body: {
470
473
  ...args,
471
474
  stream: true
472
475
  },
473
476
  failedResponseHandler: this.failedResponseHandler,
474
- successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
477
+ successfulResponseHandler: (0, import_provider_utils.createEventSourceResponseHandler)(
475
478
  this.chunkSchema
476
479
  ),
477
480
  abortSignal: options.abortSignal,
@@ -605,11 +608,11 @@ var OpenAICompatibleChatLanguageModel = class {
605
608
  argsTextDelta: toolCall2.function.arguments
606
609
  });
607
610
  }
608
- if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
611
+ if ((0, import_provider_utils.isParsableJson)(toolCall2.function.arguments)) {
609
612
  controller.enqueue({
610
613
  type: "tool-call",
611
614
  toolCallType: "function",
612
- toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils2.generateId)(),
615
+ toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils.generateId)(),
613
616
  toolName: toolCall2.function.name,
614
617
  args: toolCall2.function.arguments
615
618
  });
@@ -632,11 +635,11 @@ var OpenAICompatibleChatLanguageModel = class {
632
635
  toolName: toolCall.function.name,
633
636
  argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
634
637
  });
635
- if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
638
+ if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils.isParsableJson)(toolCall.function.arguments)) {
636
639
  controller.enqueue({
637
640
  type: "tool-call",
638
641
  toolCallType: "function",
639
- toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils2.generateId)(),
642
+ toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils.generateId)(),
640
643
  toolName: toolCall.function.name,
641
644
  args: toolCall.function.arguments
642
645
  });
@@ -676,9 +679,9 @@ var OpenAICompatibleChatLanguageModel = class {
676
679
  })
677
680
  ),
678
681
  rawCall: { rawPrompt, rawSettings },
679
- rawResponse: { headers: responseHeaders },
680
- warnings,
681
- request: { body }
682
+ request: { body },
683
+ response: { headers: responseHeaders },
684
+ warnings
682
685
  };
683
686
  }
684
687
  };
@@ -752,7 +755,7 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_zod2.z.union
752
755
  ]);
753
756
 
754
757
  // src/openai-compatible-completion-language-model.ts
755
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
758
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
756
759
  var import_zod3 = require("zod");
757
760
 
758
761
  // src/convert-to-openai-compatible-completion-prompt.ts
@@ -787,13 +790,8 @@ function convertToOpenAICompatibleCompletionPrompt({
787
790
  case "text": {
788
791
  return part.text;
789
792
  }
790
- case "image": {
791
- throw new import_provider4.UnsupportedFunctionalityError({
792
- functionality: "images"
793
- });
794
- }
795
793
  }
796
- }).join("");
794
+ }).filter(Boolean).join("");
797
795
  text += `${user}:
798
796
  ${userMessage}
799
797
 
@@ -853,7 +851,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
853
851
  this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(
854
852
  errorStructure.errorSchema
855
853
  );
856
- this.failedResponseHandler = (0, import_provider_utils3.createJsonErrorResponseHandler)(errorStructure);
854
+ this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(errorStructure);
857
855
  }
858
856
  get provider() {
859
857
  return this.config.provider;
@@ -928,15 +926,15 @@ var OpenAICompatibleCompletionLanguageModel = class {
928
926
  responseHeaders,
929
927
  value: response,
930
928
  rawValue: rawResponse
931
- } = await (0, import_provider_utils3.postJsonToApi)({
929
+ } = await (0, import_provider_utils2.postJsonToApi)({
932
930
  url: this.config.url({
933
931
  path: "/completions",
934
932
  modelId: this.modelId
935
933
  }),
936
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
934
+ headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
937
935
  body: args,
938
936
  failedResponseHandler: this.failedResponseHandler,
939
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
937
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
940
938
  openaiCompatibleCompletionResponseSchema
941
939
  ),
942
940
  abortSignal: options.abortSignal,
@@ -952,10 +950,13 @@ var OpenAICompatibleCompletionLanguageModel = class {
952
950
  },
953
951
  finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),
954
952
  rawCall: { rawPrompt, rawSettings },
955
- rawResponse: { headers: responseHeaders, body: rawResponse },
956
- response: getResponseMetadata(response),
957
- warnings,
958
- request: { body: JSON.stringify(args) }
953
+ request: { body: JSON.stringify(args) },
954
+ response: {
955
+ ...getResponseMetadata(response),
956
+ headers: responseHeaders,
957
+ body: rawResponse
958
+ },
959
+ warnings
959
960
  };
960
961
  }
961
962
  async doStream(options) {
@@ -964,15 +965,15 @@ var OpenAICompatibleCompletionLanguageModel = class {
964
965
  ...args,
965
966
  stream: true
966
967
  };
967
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
968
+ const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
968
969
  url: this.config.url({
969
970
  path: "/completions",
970
971
  modelId: this.modelId
971
972
  }),
972
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
973
+ headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
973
974
  body,
974
975
  failedResponseHandler: this.failedResponseHandler,
975
- successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
976
+ successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
976
977
  this.chunkSchema
977
978
  ),
978
979
  abortSignal: options.abortSignal,
@@ -1036,9 +1037,9 @@ var OpenAICompatibleCompletionLanguageModel = class {
1036
1037
  })
1037
1038
  ),
1038
1039
  rawCall: { rawPrompt, rawSettings },
1039
- rawResponse: { headers: responseHeaders },
1040
- warnings,
1041
- request: { body: JSON.stringify(body) }
1040
+ request: { body: JSON.stringify(body) },
1041
+ response: { headers: responseHeaders },
1042
+ warnings
1042
1043
  };
1043
1044
  }
1044
1045
  };
@@ -1079,7 +1080,7 @@ var createOpenAICompatibleCompletionChunkSchema = (errorSchema) => import_zod3.z
1079
1080
 
1080
1081
  // src/openai-compatible-embedding-model.ts
1081
1082
  var import_provider5 = require("@ai-sdk/provider");
1082
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
1083
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1083
1084
  var import_zod4 = require("zod");
1084
1085
  var OpenAICompatibleEmbeddingModel = class {
1085
1086
  constructor(modelId, settings, config) {
@@ -1113,12 +1114,12 @@ var OpenAICompatibleEmbeddingModel = class {
1113
1114
  values
1114
1115
  });
1115
1116
  }
1116
- const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
1117
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
1117
1118
  url: this.config.url({
1118
1119
  path: "/embeddings",
1119
1120
  modelId: this.modelId
1120
1121
  }),
1121
- headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
1122
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), headers),
1122
1123
  body: {
1123
1124
  model: this.modelId,
1124
1125
  input: values,
@@ -1126,10 +1127,10 @@ var OpenAICompatibleEmbeddingModel = class {
1126
1127
  dimensions: this.settings.dimensions,
1127
1128
  user: this.settings.user
1128
1129
  },
1129
- failedResponseHandler: (0, import_provider_utils4.createJsonErrorResponseHandler)(
1130
+ failedResponseHandler: (0, import_provider_utils3.createJsonErrorResponseHandler)(
1130
1131
  (_a = this.config.errorStructure) != null ? _a : defaultOpenAICompatibleErrorStructure
1131
1132
  ),
1132
- successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1133
+ successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
1133
1134
  openaiTextEmbeddingResponseSchema
1134
1135
  ),
1135
1136
  abortSignal,
@@ -1148,7 +1149,7 @@ var openaiTextEmbeddingResponseSchema = import_zod4.z.object({
1148
1149
  });
1149
1150
 
1150
1151
  // src/openai-compatible-image-model.ts
1151
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
1152
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1152
1153
  var import_zod5 = require("zod");
1153
1154
  var OpenAICompatibleImageModel = class {
1154
1155
  constructor(modelId, settings, config) {
@@ -1187,12 +1188,12 @@ var OpenAICompatibleImageModel = class {
1187
1188
  warnings.push({ type: "unsupported-setting", setting: "seed" });
1188
1189
  }
1189
1190
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1190
- const { value: response, responseHeaders } = await (0, import_provider_utils5.postJsonToApi)({
1191
+ const { value: response, responseHeaders } = await (0, import_provider_utils4.postJsonToApi)({
1191
1192
  url: this.config.url({
1192
1193
  path: "/images/generations",
1193
1194
  modelId: this.modelId
1194
1195
  }),
1195
- headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
1196
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
1196
1197
  body: {
1197
1198
  model: this.modelId,
1198
1199
  prompt,
@@ -1202,10 +1203,10 @@ var OpenAICompatibleImageModel = class {
1202
1203
  response_format: "b64_json",
1203
1204
  ...this.settings.user ? { user: this.settings.user } : {}
1204
1205
  },
1205
- failedResponseHandler: (0, import_provider_utils5.createJsonErrorResponseHandler)(
1206
+ failedResponseHandler: (0, import_provider_utils4.createJsonErrorResponseHandler)(
1206
1207
  (_e = this.config.errorStructure) != null ? _e : defaultOpenAICompatibleErrorStructure
1207
1208
  ),
1208
- successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
1209
+ successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
1209
1210
  openaiCompatibleImageResponseSchema
1210
1211
  ),
1211
1212
  abortSignal,
@@ -1227,9 +1228,9 @@ var openaiCompatibleImageResponseSchema = import_zod5.z.object({
1227
1228
  });
1228
1229
 
1229
1230
  // src/openai-compatible-provider.ts
1230
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
1231
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
1231
1232
  function createOpenAICompatible(options) {
1232
- const baseURL = (0, import_provider_utils6.withoutTrailingSlash)(options.baseURL);
1233
+ const baseURL = (0, import_provider_utils5.withoutTrailingSlash)(options.baseURL);
1233
1234
  const providerName = options.name;
1234
1235
  const getHeaders = () => ({
1235
1236
  ...options.apiKey && { Authorization: `Bearer ${options.apiKey}` },