@ai-sdk/openai-compatible 1.0.0-canary.1 → 1.0.0-canary.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -0
- package/dist/index.js +137 -214
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +102 -181
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.js +15 -15
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +15 -15
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,26 @@
|
|
1
1
|
# @ai-sdk/openai-compatible
|
2
2
|
|
3
|
+
## 1.0.0-canary.3
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- Updated dependencies [a166433]
|
8
|
+
- Updated dependencies [abf9a79]
|
9
|
+
- Updated dependencies [9f95b35]
|
10
|
+
- Updated dependencies [0a87932]
|
11
|
+
- Updated dependencies [6dc848c]
|
12
|
+
- @ai-sdk/provider-utils@3.0.0-canary.3
|
13
|
+
- @ai-sdk/provider@2.0.0-canary.2
|
14
|
+
|
15
|
+
## 1.0.0-canary.2
|
16
|
+
|
17
|
+
### Patch Changes
|
18
|
+
|
19
|
+
- Updated dependencies [c57e248]
|
20
|
+
- Updated dependencies [33f4a6a]
|
21
|
+
- @ai-sdk/provider@2.0.0-canary.1
|
22
|
+
- @ai-sdk/provider-utils@3.0.0-canary.2
|
23
|
+
|
3
24
|
## 1.0.0-canary.1
|
4
25
|
|
5
26
|
### Patch Changes
|
package/dist/index.js
CHANGED
@@ -30,15 +30,14 @@ module.exports = __toCommonJS(src_exports);
|
|
30
30
|
|
31
31
|
// src/openai-compatible-chat-language-model.ts
|
32
32
|
var import_provider3 = require("@ai-sdk/provider");
|
33
|
-
var
|
33
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
34
34
|
var import_zod2 = require("zod");
|
35
35
|
|
36
36
|
// src/convert-to-openai-compatible-chat-messages.ts
|
37
37
|
var import_provider = require("@ai-sdk/provider");
|
38
|
-
var import_provider_utils = require("@ai-sdk/provider-utils");
|
39
38
|
function getOpenAIMetadata(message) {
|
40
39
|
var _a, _b;
|
41
|
-
return (_b = (_a = message == null ? void 0 : message.
|
40
|
+
return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
|
42
41
|
}
|
43
42
|
function convertToOpenAICompatibleChatMessages(prompt) {
|
44
43
|
const messages = [];
|
@@ -61,25 +60,26 @@ function convertToOpenAICompatibleChatMessages(prompt) {
|
|
61
60
|
messages.push({
|
62
61
|
role: "user",
|
63
62
|
content: content.map((part) => {
|
64
|
-
var _a;
|
65
63
|
const partMetadata = getOpenAIMetadata(part);
|
66
64
|
switch (part.type) {
|
67
65
|
case "text": {
|
68
66
|
return { type: "text", text: part.text, ...partMetadata };
|
69
67
|
}
|
70
|
-
case "image": {
|
71
|
-
return {
|
72
|
-
type: "image_url",
|
73
|
-
image_url: {
|
74
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`
|
75
|
-
},
|
76
|
-
...partMetadata
|
77
|
-
};
|
78
|
-
}
|
79
68
|
case "file": {
|
80
|
-
|
81
|
-
|
82
|
-
|
69
|
+
if (part.mediaType.startsWith("image/")) {
|
70
|
+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
71
|
+
return {
|
72
|
+
type: "image_url",
|
73
|
+
image_url: {
|
74
|
+
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`
|
75
|
+
},
|
76
|
+
...partMetadata
|
77
|
+
};
|
78
|
+
} else {
|
79
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
80
|
+
functionality: `file part media type ${part.mediaType}`
|
81
|
+
});
|
82
|
+
}
|
83
83
|
}
|
84
84
|
}
|
85
85
|
}),
|
@@ -191,16 +191,14 @@ var defaultOpenAICompatibleErrorStructure = {
|
|
191
191
|
// src/openai-compatible-prepare-tools.ts
|
192
192
|
var import_provider2 = require("@ai-sdk/provider");
|
193
193
|
function prepareTools({
|
194
|
-
|
195
|
-
|
194
|
+
tools,
|
195
|
+
toolChoice
|
196
196
|
}) {
|
197
|
-
|
198
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
197
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
199
198
|
const toolWarnings = [];
|
200
199
|
if (tools == null) {
|
201
|
-
return { tools: void 0,
|
200
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
202
201
|
}
|
203
|
-
const toolChoice = mode.toolChoice;
|
204
202
|
const openaiCompatTools = [];
|
205
203
|
for (const tool of tools) {
|
206
204
|
if (tool.type === "provider-defined") {
|
@@ -217,29 +215,27 @@ function prepareTools({
|
|
217
215
|
}
|
218
216
|
}
|
219
217
|
if (toolChoice == null) {
|
220
|
-
return { tools: openaiCompatTools,
|
218
|
+
return { tools: openaiCompatTools, toolChoice: void 0, toolWarnings };
|
221
219
|
}
|
222
220
|
const type = toolChoice.type;
|
223
221
|
switch (type) {
|
224
222
|
case "auto":
|
225
223
|
case "none":
|
226
224
|
case "required":
|
227
|
-
return { tools: openaiCompatTools,
|
225
|
+
return { tools: openaiCompatTools, toolChoice: type, toolWarnings };
|
228
226
|
case "tool":
|
229
227
|
return {
|
230
228
|
tools: openaiCompatTools,
|
231
|
-
|
229
|
+
toolChoice: {
|
232
230
|
type: "function",
|
233
|
-
function: {
|
234
|
-
name: toolChoice.toolName
|
235
|
-
}
|
231
|
+
function: { name: toolChoice.toolName }
|
236
232
|
},
|
237
233
|
toolWarnings
|
238
234
|
};
|
239
235
|
default: {
|
240
236
|
const _exhaustiveCheck = type;
|
241
237
|
throw new import_provider2.UnsupportedFunctionalityError({
|
242
|
-
functionality: `
|
238
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
243
239
|
});
|
244
240
|
}
|
245
241
|
}
|
@@ -258,7 +254,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
258
254
|
this.chunkSchema = createOpenAICompatibleChatChunkSchema(
|
259
255
|
errorStructure.errorSchema
|
260
256
|
);
|
261
|
-
this.failedResponseHandler = (0,
|
257
|
+
this.failedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)(errorStructure);
|
262
258
|
this.supportsStructuredOutputs = (_b = config.supportsStructuredOutputs) != null ? _b : false;
|
263
259
|
}
|
264
260
|
get defaultObjectGenerationMode() {
|
@@ -271,7 +267,6 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
271
267
|
return this.config.provider.split(".")[0].trim();
|
272
268
|
}
|
273
269
|
getArgs({
|
274
|
-
mode,
|
275
270
|
prompt,
|
276
271
|
maxTokens,
|
277
272
|
temperature,
|
@@ -279,19 +274,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
279
274
|
topK,
|
280
275
|
frequencyPenalty,
|
281
276
|
presencePenalty,
|
282
|
-
|
277
|
+
providerOptions,
|
283
278
|
stopSequences,
|
284
279
|
responseFormat,
|
285
|
-
seed
|
280
|
+
seed,
|
281
|
+
toolChoice,
|
282
|
+
tools
|
286
283
|
}) {
|
287
|
-
var _a
|
288
|
-
const type = mode.type;
|
284
|
+
var _a;
|
289
285
|
const warnings = [];
|
290
286
|
if (topK != null) {
|
291
|
-
warnings.push({
|
292
|
-
type: "unsupported-setting",
|
293
|
-
setting: "topK"
|
294
|
-
});
|
287
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
295
288
|
}
|
296
289
|
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
297
290
|
warnings.push({
|
@@ -300,85 +293,45 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
300
293
|
details: "JSON response format schema is only supported with structuredOutputs"
|
301
294
|
});
|
302
295
|
}
|
303
|
-
const
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
296
|
+
const {
|
297
|
+
tools: openaiTools,
|
298
|
+
toolChoice: openaiToolChoice,
|
299
|
+
toolWarnings
|
300
|
+
} = prepareTools({
|
301
|
+
tools,
|
302
|
+
toolChoice
|
303
|
+
});
|
304
|
+
return {
|
305
|
+
args: {
|
306
|
+
// model id:
|
307
|
+
model: this.modelId,
|
308
|
+
// model specific settings:
|
309
|
+
user: this.settings.user,
|
310
|
+
// standardized settings:
|
311
|
+
max_tokens: maxTokens,
|
312
|
+
temperature,
|
313
|
+
top_p: topP,
|
314
|
+
frequency_penalty: frequencyPenalty,
|
315
|
+
presence_penalty: presencePenalty,
|
316
|
+
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
|
317
|
+
type: "json_schema",
|
318
|
+
json_schema: {
|
319
|
+
schema: responseFormat.schema,
|
320
|
+
name: (_a = responseFormat.name) != null ? _a : "response",
|
321
|
+
description: responseFormat.description
|
322
|
+
}
|
323
|
+
} : { type: "json_object" } : void 0,
|
324
|
+
stop: stopSequences,
|
325
|
+
seed,
|
326
|
+
...providerOptions == null ? void 0 : providerOptions[this.providerOptionsName],
|
327
|
+
// messages:
|
328
|
+
messages: convertToOpenAICompatibleChatMessages(prompt),
|
329
|
+
// tools:
|
330
|
+
tools: openaiTools,
|
331
|
+
tool_choice: openaiToolChoice
|
332
|
+
},
|
333
|
+
warnings: [...warnings, ...toolWarnings]
|
327
334
|
};
|
328
|
-
switch (type) {
|
329
|
-
case "regular": {
|
330
|
-
const { tools, tool_choice, toolWarnings } = prepareTools({
|
331
|
-
mode,
|
332
|
-
structuredOutputs: this.supportsStructuredOutputs
|
333
|
-
});
|
334
|
-
return {
|
335
|
-
args: { ...baseArgs, tools, tool_choice },
|
336
|
-
warnings: [...warnings, ...toolWarnings]
|
337
|
-
};
|
338
|
-
}
|
339
|
-
case "object-json": {
|
340
|
-
return {
|
341
|
-
args: {
|
342
|
-
...baseArgs,
|
343
|
-
response_format: this.supportsStructuredOutputs === true && mode.schema != null ? {
|
344
|
-
type: "json_schema",
|
345
|
-
json_schema: {
|
346
|
-
schema: mode.schema,
|
347
|
-
name: (_b = mode.name) != null ? _b : "response",
|
348
|
-
description: mode.description
|
349
|
-
}
|
350
|
-
} : { type: "json_object" }
|
351
|
-
},
|
352
|
-
warnings
|
353
|
-
};
|
354
|
-
}
|
355
|
-
case "object-tool": {
|
356
|
-
return {
|
357
|
-
args: {
|
358
|
-
...baseArgs,
|
359
|
-
tool_choice: {
|
360
|
-
type: "function",
|
361
|
-
function: { name: mode.tool.name }
|
362
|
-
},
|
363
|
-
tools: [
|
364
|
-
{
|
365
|
-
type: "function",
|
366
|
-
function: {
|
367
|
-
name: mode.tool.name,
|
368
|
-
description: mode.tool.description,
|
369
|
-
parameters: mode.tool.parameters
|
370
|
-
}
|
371
|
-
}
|
372
|
-
]
|
373
|
-
},
|
374
|
-
warnings
|
375
|
-
};
|
376
|
-
}
|
377
|
-
default: {
|
378
|
-
const _exhaustiveCheck = type;
|
379
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
380
|
-
}
|
381
|
-
}
|
382
335
|
}
|
383
336
|
async doGenerate(options) {
|
384
337
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
@@ -388,15 +341,15 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
388
341
|
responseHeaders,
|
389
342
|
value: responseBody,
|
390
343
|
rawValue: rawResponse
|
391
|
-
} = await (0,
|
344
|
+
} = await (0, import_provider_utils.postJsonToApi)({
|
392
345
|
url: this.config.url({
|
393
346
|
path: "/chat/completions",
|
394
347
|
modelId: this.modelId
|
395
348
|
}),
|
396
|
-
headers: (0,
|
349
|
+
headers: (0, import_provider_utils.combineHeaders)(this.config.headers(), options.headers),
|
397
350
|
body: args,
|
398
351
|
failedResponseHandler: this.failedResponseHandler,
|
399
|
-
successfulResponseHandler: (0,
|
352
|
+
successfulResponseHandler: (0, import_provider_utils.createJsonResponseHandler)(
|
400
353
|
OpenAICompatibleChatResponseSchema
|
401
354
|
),
|
402
355
|
abortSignal: options.abortSignal,
|
@@ -431,7 +384,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
431
384
|
var _a2;
|
432
385
|
return {
|
433
386
|
toolCallType: "function",
|
434
|
-
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0,
|
387
|
+
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils.generateId)(),
|
435
388
|
toolName: toolCall.function.name,
|
436
389
|
args: toolCall.function.arguments
|
437
390
|
};
|
@@ -507,18 +460,18 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
507
460
|
const { args, warnings } = this.getArgs({ ...options });
|
508
461
|
const body = JSON.stringify({ ...args, stream: true });
|
509
462
|
const metadataExtractor = (_a = this.config.metadataExtractor) == null ? void 0 : _a.createStreamExtractor();
|
510
|
-
const { responseHeaders, value: response } = await (0,
|
463
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils.postJsonToApi)({
|
511
464
|
url: this.config.url({
|
512
465
|
path: "/chat/completions",
|
513
466
|
modelId: this.modelId
|
514
467
|
}),
|
515
|
-
headers: (0,
|
468
|
+
headers: (0, import_provider_utils.combineHeaders)(this.config.headers(), options.headers),
|
516
469
|
body: {
|
517
470
|
...args,
|
518
471
|
stream: true
|
519
472
|
},
|
520
473
|
failedResponseHandler: this.failedResponseHandler,
|
521
|
-
successfulResponseHandler: (0,
|
474
|
+
successfulResponseHandler: (0, import_provider_utils.createEventSourceResponseHandler)(
|
522
475
|
this.chunkSchema
|
523
476
|
),
|
524
477
|
abortSignal: options.abortSignal,
|
@@ -652,11 +605,11 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
652
605
|
argsTextDelta: toolCall2.function.arguments
|
653
606
|
});
|
654
607
|
}
|
655
|
-
if ((0,
|
608
|
+
if ((0, import_provider_utils.isParsableJson)(toolCall2.function.arguments)) {
|
656
609
|
controller.enqueue({
|
657
610
|
type: "tool-call",
|
658
611
|
toolCallType: "function",
|
659
|
-
toolCallId: (_e = toolCall2.id) != null ? _e : (0,
|
612
|
+
toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils.generateId)(),
|
660
613
|
toolName: toolCall2.function.name,
|
661
614
|
args: toolCall2.function.arguments
|
662
615
|
});
|
@@ -679,11 +632,11 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
679
632
|
toolName: toolCall.function.name,
|
680
633
|
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
681
634
|
});
|
682
|
-
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0,
|
635
|
+
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils.isParsableJson)(toolCall.function.arguments)) {
|
683
636
|
controller.enqueue({
|
684
637
|
type: "tool-call",
|
685
638
|
toolCallType: "function",
|
686
|
-
toolCallId: (_l = toolCall.id) != null ? _l : (0,
|
639
|
+
toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils.generateId)(),
|
687
640
|
toolName: toolCall.function.name,
|
688
641
|
args: toolCall.function.arguments
|
689
642
|
});
|
@@ -799,8 +752,7 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_zod2.z.union
|
|
799
752
|
]);
|
800
753
|
|
801
754
|
// src/openai-compatible-completion-language-model.ts
|
802
|
-
var
|
803
|
-
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
755
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
804
756
|
var import_zod3 = require("zod");
|
805
757
|
|
806
758
|
// src/convert-to-openai-compatible-completion-prompt.ts
|
@@ -835,13 +787,8 @@ function convertToOpenAICompatibleCompletionPrompt({
|
|
835
787
|
case "text": {
|
836
788
|
return part.text;
|
837
789
|
}
|
838
|
-
case "image": {
|
839
|
-
throw new import_provider4.UnsupportedFunctionalityError({
|
840
|
-
functionality: "images"
|
841
|
-
});
|
842
|
-
}
|
843
790
|
}
|
844
|
-
}).join("");
|
791
|
+
}).filter(Boolean).join("");
|
845
792
|
text += `${user}:
|
846
793
|
${userMessage}
|
847
794
|
|
@@ -901,7 +848,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
901
848
|
this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(
|
902
849
|
errorStructure.errorSchema
|
903
850
|
);
|
904
|
-
this.failedResponseHandler = (0,
|
851
|
+
this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(errorStructure);
|
905
852
|
}
|
906
853
|
get provider() {
|
907
854
|
return this.config.provider;
|
@@ -910,7 +857,6 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
910
857
|
return this.config.provider.split(".")[0].trim();
|
911
858
|
}
|
912
859
|
getArgs({
|
913
|
-
mode,
|
914
860
|
inputFormat,
|
915
861
|
prompt,
|
916
862
|
maxTokens,
|
@@ -922,16 +868,19 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
922
868
|
stopSequences: userStopSequences,
|
923
869
|
responseFormat,
|
924
870
|
seed,
|
925
|
-
|
871
|
+
providerOptions,
|
872
|
+
tools,
|
873
|
+
toolChoice
|
926
874
|
}) {
|
927
|
-
var _a;
|
928
|
-
const type = mode.type;
|
929
875
|
const warnings = [];
|
930
876
|
if (topK != null) {
|
931
|
-
warnings.push({
|
932
|
-
|
933
|
-
|
934
|
-
});
|
877
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
878
|
+
}
|
879
|
+
if (tools == null ? void 0 : tools.length) {
|
880
|
+
warnings.push({ type: "unsupported-setting", setting: "tools" });
|
881
|
+
}
|
882
|
+
if (toolChoice != null) {
|
883
|
+
warnings.push({ type: "unsupported-setting", setting: "toolChoice" });
|
935
884
|
}
|
936
885
|
if (responseFormat != null && responseFormat.type !== "text") {
|
937
886
|
warnings.push({
|
@@ -942,56 +891,30 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
942
891
|
}
|
943
892
|
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });
|
944
893
|
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
|
949
|
-
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
|
894
|
+
return {
|
895
|
+
args: {
|
896
|
+
// model id:
|
897
|
+
model: this.modelId,
|
898
|
+
// model specific settings:
|
899
|
+
echo: this.settings.echo,
|
900
|
+
logit_bias: this.settings.logitBias,
|
901
|
+
suffix: this.settings.suffix,
|
902
|
+
user: this.settings.user,
|
903
|
+
// standardized settings:
|
904
|
+
max_tokens: maxTokens,
|
905
|
+
temperature,
|
906
|
+
top_p: topP,
|
907
|
+
frequency_penalty: frequencyPenalty,
|
908
|
+
presence_penalty: presencePenalty,
|
909
|
+
seed,
|
910
|
+
...providerOptions == null ? void 0 : providerOptions[this.providerOptionsName],
|
911
|
+
// prompt:
|
912
|
+
prompt: completionPrompt,
|
913
|
+
// stop sequences:
|
914
|
+
stop: stop.length > 0 ? stop : void 0
|
915
|
+
},
|
916
|
+
warnings
|
965
917
|
};
|
966
|
-
switch (type) {
|
967
|
-
case "regular": {
|
968
|
-
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
969
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
970
|
-
functionality: "tools"
|
971
|
-
});
|
972
|
-
}
|
973
|
-
if (mode.toolChoice) {
|
974
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
975
|
-
functionality: "toolChoice"
|
976
|
-
});
|
977
|
-
}
|
978
|
-
return { args: baseArgs, warnings };
|
979
|
-
}
|
980
|
-
case "object-json": {
|
981
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
982
|
-
functionality: "object-json mode"
|
983
|
-
});
|
984
|
-
}
|
985
|
-
case "object-tool": {
|
986
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
987
|
-
functionality: "object-tool mode"
|
988
|
-
});
|
989
|
-
}
|
990
|
-
default: {
|
991
|
-
const _exhaustiveCheck = type;
|
992
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
993
|
-
}
|
994
|
-
}
|
995
918
|
}
|
996
919
|
async doGenerate(options) {
|
997
920
|
var _a, _b, _c, _d;
|
@@ -1000,15 +923,15 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
1000
923
|
responseHeaders,
|
1001
924
|
value: response,
|
1002
925
|
rawValue: rawResponse
|
1003
|
-
} = await (0,
|
926
|
+
} = await (0, import_provider_utils2.postJsonToApi)({
|
1004
927
|
url: this.config.url({
|
1005
928
|
path: "/completions",
|
1006
929
|
modelId: this.modelId
|
1007
930
|
}),
|
1008
|
-
headers: (0,
|
931
|
+
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
1009
932
|
body: args,
|
1010
933
|
failedResponseHandler: this.failedResponseHandler,
|
1011
|
-
successfulResponseHandler: (0,
|
934
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
|
1012
935
|
openaiCompatibleCompletionResponseSchema
|
1013
936
|
),
|
1014
937
|
abortSignal: options.abortSignal,
|
@@ -1036,15 +959,15 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
1036
959
|
...args,
|
1037
960
|
stream: true
|
1038
961
|
};
|
1039
|
-
const { responseHeaders, value: response } = await (0,
|
962
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
1040
963
|
url: this.config.url({
|
1041
964
|
path: "/completions",
|
1042
965
|
modelId: this.modelId
|
1043
966
|
}),
|
1044
|
-
headers: (0,
|
967
|
+
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
1045
968
|
body,
|
1046
969
|
failedResponseHandler: this.failedResponseHandler,
|
1047
|
-
successfulResponseHandler: (0,
|
970
|
+
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
|
1048
971
|
this.chunkSchema
|
1049
972
|
),
|
1050
973
|
abortSignal: options.abortSignal,
|
@@ -1150,8 +1073,8 @@ var createOpenAICompatibleCompletionChunkSchema = (errorSchema) => import_zod3.z
|
|
1150
1073
|
]);
|
1151
1074
|
|
1152
1075
|
// src/openai-compatible-embedding-model.ts
|
1153
|
-
var
|
1154
|
-
var
|
1076
|
+
var import_provider5 = require("@ai-sdk/provider");
|
1077
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
1155
1078
|
var import_zod4 = require("zod");
|
1156
1079
|
var OpenAICompatibleEmbeddingModel = class {
|
1157
1080
|
constructor(modelId, settings, config) {
|
@@ -1178,19 +1101,19 @@ var OpenAICompatibleEmbeddingModel = class {
|
|
1178
1101
|
}) {
|
1179
1102
|
var _a;
|
1180
1103
|
if (values.length > this.maxEmbeddingsPerCall) {
|
1181
|
-
throw new
|
1104
|
+
throw new import_provider5.TooManyEmbeddingValuesForCallError({
|
1182
1105
|
provider: this.provider,
|
1183
1106
|
modelId: this.modelId,
|
1184
1107
|
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
1185
1108
|
values
|
1186
1109
|
});
|
1187
1110
|
}
|
1188
|
-
const { responseHeaders, value: response } = await (0,
|
1111
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
1189
1112
|
url: this.config.url({
|
1190
1113
|
path: "/embeddings",
|
1191
1114
|
modelId: this.modelId
|
1192
1115
|
}),
|
1193
|
-
headers: (0,
|
1116
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), headers),
|
1194
1117
|
body: {
|
1195
1118
|
model: this.modelId,
|
1196
1119
|
input: values,
|
@@ -1198,10 +1121,10 @@ var OpenAICompatibleEmbeddingModel = class {
|
|
1198
1121
|
dimensions: this.settings.dimensions,
|
1199
1122
|
user: this.settings.user
|
1200
1123
|
},
|
1201
|
-
failedResponseHandler: (0,
|
1124
|
+
failedResponseHandler: (0, import_provider_utils3.createJsonErrorResponseHandler)(
|
1202
1125
|
(_a = this.config.errorStructure) != null ? _a : defaultOpenAICompatibleErrorStructure
|
1203
1126
|
),
|
1204
|
-
successfulResponseHandler: (0,
|
1127
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
1205
1128
|
openaiTextEmbeddingResponseSchema
|
1206
1129
|
),
|
1207
1130
|
abortSignal,
|
@@ -1220,7 +1143,7 @@ var openaiTextEmbeddingResponseSchema = import_zod4.z.object({
|
|
1220
1143
|
});
|
1221
1144
|
|
1222
1145
|
// src/openai-compatible-image-model.ts
|
1223
|
-
var
|
1146
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
1224
1147
|
var import_zod5 = require("zod");
|
1225
1148
|
var OpenAICompatibleImageModel = class {
|
1226
1149
|
constructor(modelId, settings, config) {
|
@@ -1259,12 +1182,12 @@ var OpenAICompatibleImageModel = class {
|
|
1259
1182
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
1260
1183
|
}
|
1261
1184
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
1262
|
-
const { value: response, responseHeaders } = await (0,
|
1185
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils4.postJsonToApi)({
|
1263
1186
|
url: this.config.url({
|
1264
1187
|
path: "/images/generations",
|
1265
1188
|
modelId: this.modelId
|
1266
1189
|
}),
|
1267
|
-
headers: (0,
|
1190
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
|
1268
1191
|
body: {
|
1269
1192
|
model: this.modelId,
|
1270
1193
|
prompt,
|
@@ -1274,10 +1197,10 @@ var OpenAICompatibleImageModel = class {
|
|
1274
1197
|
response_format: "b64_json",
|
1275
1198
|
...this.settings.user ? { user: this.settings.user } : {}
|
1276
1199
|
},
|
1277
|
-
failedResponseHandler: (0,
|
1200
|
+
failedResponseHandler: (0, import_provider_utils4.createJsonErrorResponseHandler)(
|
1278
1201
|
(_e = this.config.errorStructure) != null ? _e : defaultOpenAICompatibleErrorStructure
|
1279
1202
|
),
|
1280
|
-
successfulResponseHandler: (0,
|
1203
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
1281
1204
|
openaiCompatibleImageResponseSchema
|
1282
1205
|
),
|
1283
1206
|
abortSignal,
|
@@ -1299,9 +1222,9 @@ var openaiCompatibleImageResponseSchema = import_zod5.z.object({
|
|
1299
1222
|
});
|
1300
1223
|
|
1301
1224
|
// src/openai-compatible-provider.ts
|
1302
|
-
var
|
1225
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
1303
1226
|
function createOpenAICompatible(options) {
|
1304
|
-
const baseURL = (0,
|
1227
|
+
const baseURL = (0, import_provider_utils5.withoutTrailingSlash)(options.baseURL);
|
1305
1228
|
const providerName = options.name;
|
1306
1229
|
const getHeaders = () => ({
|
1307
1230
|
...options.apiKey && { Authorization: `Bearer ${options.apiKey}` },
|