@apertis/ai-sdk-provider 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +88 -0
- package/dist/index.cjs +441 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +59 -0
- package/dist/index.d.ts +59 -0
- package/dist/index.js +418 -0
- package/dist/index.js.map +1 -0
- package/package.json +59 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,441 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
apertis: () => apertis,
|
|
24
|
+
createApertis: () => createApertis
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(index_exports);
|
|
27
|
+
|
|
28
|
+
// src/apertis-provider.ts
|
|
29
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
30
|
+
|
|
31
|
+
// src/apertis-chat-language-model.ts
|
|
32
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
33
|
+
|
|
34
|
+
// src/apertis-error.ts
|
|
35
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
36
|
+
var import_zod = require("zod");
|
|
37
|
+
var apertisErrorSchema = import_zod.z.object({
|
|
38
|
+
error: import_zod.z.object({
|
|
39
|
+
message: import_zod.z.string(),
|
|
40
|
+
type: import_zod.z.string().optional(),
|
|
41
|
+
code: import_zod.z.string().nullable().optional(),
|
|
42
|
+
param: import_zod.z.string().nullable().optional()
|
|
43
|
+
})
|
|
44
|
+
});
|
|
45
|
+
var apertisFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
46
|
+
errorSchema: apertisErrorSchema,
|
|
47
|
+
errorToMessage: (error) => error.error.message
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
// src/schemas/chat-response.ts
|
|
51
|
+
var import_zod2 = require("zod");
|
|
52
|
+
var openAIChatResponseSchema = import_zod2.z.object({
|
|
53
|
+
id: import_zod2.z.string(),
|
|
54
|
+
object: import_zod2.z.literal("chat.completion").optional(),
|
|
55
|
+
created: import_zod2.z.number().optional(),
|
|
56
|
+
model: import_zod2.z.string().optional(),
|
|
57
|
+
choices: import_zod2.z.array(
|
|
58
|
+
import_zod2.z.object({
|
|
59
|
+
index: import_zod2.z.number(),
|
|
60
|
+
message: import_zod2.z.object({
|
|
61
|
+
role: import_zod2.z.literal("assistant"),
|
|
62
|
+
content: import_zod2.z.string().nullable(),
|
|
63
|
+
tool_calls: import_zod2.z.array(
|
|
64
|
+
import_zod2.z.object({
|
|
65
|
+
id: import_zod2.z.string(),
|
|
66
|
+
type: import_zod2.z.literal("function"),
|
|
67
|
+
function: import_zod2.z.object({
|
|
68
|
+
name: import_zod2.z.string(),
|
|
69
|
+
arguments: import_zod2.z.string()
|
|
70
|
+
})
|
|
71
|
+
})
|
|
72
|
+
).optional()
|
|
73
|
+
}),
|
|
74
|
+
finish_reason: import_zod2.z.string().nullable(),
|
|
75
|
+
logprobs: import_zod2.z.any().nullable().optional()
|
|
76
|
+
})
|
|
77
|
+
),
|
|
78
|
+
usage: import_zod2.z.object({
|
|
79
|
+
prompt_tokens: import_zod2.z.number(),
|
|
80
|
+
completion_tokens: import_zod2.z.number(),
|
|
81
|
+
total_tokens: import_zod2.z.number().optional()
|
|
82
|
+
}).optional()
|
|
83
|
+
});
|
|
84
|
+
var openAIChatChunkSchema = import_zod2.z.object({
|
|
85
|
+
id: import_zod2.z.string(),
|
|
86
|
+
object: import_zod2.z.literal("chat.completion.chunk").optional(),
|
|
87
|
+
created: import_zod2.z.number().optional(),
|
|
88
|
+
model: import_zod2.z.string().optional(),
|
|
89
|
+
choices: import_zod2.z.array(
|
|
90
|
+
import_zod2.z.object({
|
|
91
|
+
index: import_zod2.z.number(),
|
|
92
|
+
delta: import_zod2.z.object({
|
|
93
|
+
role: import_zod2.z.literal("assistant").optional(),
|
|
94
|
+
content: import_zod2.z.string().nullable().optional(),
|
|
95
|
+
tool_calls: import_zod2.z.array(
|
|
96
|
+
import_zod2.z.object({
|
|
97
|
+
index: import_zod2.z.number(),
|
|
98
|
+
id: import_zod2.z.string().optional(),
|
|
99
|
+
type: import_zod2.z.literal("function").optional(),
|
|
100
|
+
function: import_zod2.z.object({
|
|
101
|
+
name: import_zod2.z.string().optional(),
|
|
102
|
+
arguments: import_zod2.z.string().optional()
|
|
103
|
+
}).optional()
|
|
104
|
+
})
|
|
105
|
+
).optional()
|
|
106
|
+
}),
|
|
107
|
+
finish_reason: import_zod2.z.string().nullable().optional()
|
|
108
|
+
})
|
|
109
|
+
),
|
|
110
|
+
usage: import_zod2.z.object({
|
|
111
|
+
prompt_tokens: import_zod2.z.number(),
|
|
112
|
+
completion_tokens: import_zod2.z.number()
|
|
113
|
+
}).nullish()
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
// src/utils/map-finish-reason.ts
|
|
117
|
+
function mapApertisFinishReason(finishReason) {
|
|
118
|
+
switch (finishReason) {
|
|
119
|
+
case "stop":
|
|
120
|
+
return "stop";
|
|
121
|
+
case "length":
|
|
122
|
+
return "length";
|
|
123
|
+
case "tool_calls":
|
|
124
|
+
return "tool-calls";
|
|
125
|
+
case "content_filter":
|
|
126
|
+
return "content-filter";
|
|
127
|
+
default:
|
|
128
|
+
return "unknown";
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// src/utils/convert-to-openai-messages.ts
|
|
133
|
+
function convertToOpenAIMessages(prompt) {
|
|
134
|
+
const messages = [];
|
|
135
|
+
for (const message of prompt) {
|
|
136
|
+
switch (message.role) {
|
|
137
|
+
case "system":
|
|
138
|
+
messages.push({ role: "system", content: message.content });
|
|
139
|
+
break;
|
|
140
|
+
case "user":
|
|
141
|
+
messages.push({
|
|
142
|
+
role: "user",
|
|
143
|
+
content: message.content.map((part) => {
|
|
144
|
+
switch (part.type) {
|
|
145
|
+
case "text":
|
|
146
|
+
return { type: "text", text: part.text };
|
|
147
|
+
case "image":
|
|
148
|
+
return {
|
|
149
|
+
type: "image_url",
|
|
150
|
+
image_url: {
|
|
151
|
+
url: part.image instanceof URL ? part.image.toString() : `data:${part.mimeType ?? "image/png"};base64,${Buffer.from(part.image).toString("base64")}`
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
default:
|
|
155
|
+
throw new Error(
|
|
156
|
+
`Unsupported user content part type: ${part.type}`
|
|
157
|
+
);
|
|
158
|
+
}
|
|
159
|
+
})
|
|
160
|
+
});
|
|
161
|
+
break;
|
|
162
|
+
case "assistant": {
|
|
163
|
+
const textContent = message.content.filter((p) => p.type === "text").map((p) => p.text).join("");
|
|
164
|
+
const toolCalls = message.content.filter(
|
|
165
|
+
(p) => p.type === "tool-call"
|
|
166
|
+
).map((tc) => {
|
|
167
|
+
let arguments_str = "{}";
|
|
168
|
+
try {
|
|
169
|
+
arguments_str = JSON.stringify(tc.args);
|
|
170
|
+
} catch {
|
|
171
|
+
arguments_str = "{}";
|
|
172
|
+
}
|
|
173
|
+
return {
|
|
174
|
+
id: tc.toolCallId,
|
|
175
|
+
type: "function",
|
|
176
|
+
function: { name: tc.toolName, arguments: arguments_str }
|
|
177
|
+
};
|
|
178
|
+
});
|
|
179
|
+
messages.push({
|
|
180
|
+
role: "assistant",
|
|
181
|
+
content: textContent || null,
|
|
182
|
+
...toolCalls.length > 0 ? { tool_calls: toolCalls } : {}
|
|
183
|
+
});
|
|
184
|
+
break;
|
|
185
|
+
}
|
|
186
|
+
case "tool":
|
|
187
|
+
for (const result of message.content) {
|
|
188
|
+
let content = "{}";
|
|
189
|
+
if (typeof result.result === "string") {
|
|
190
|
+
content = result.result;
|
|
191
|
+
} else {
|
|
192
|
+
try {
|
|
193
|
+
content = JSON.stringify(result.result);
|
|
194
|
+
} catch {
|
|
195
|
+
content = "{}";
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
messages.push({
|
|
199
|
+
role: "tool",
|
|
200
|
+
tool_call_id: result.toolCallId,
|
|
201
|
+
content
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
break;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
return messages;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// src/utils/convert-to-openai-tools.ts
|
|
211
|
+
function convertToOpenAITools(tools) {
|
|
212
|
+
if (!tools || tools.length === 0) return void 0;
|
|
213
|
+
return tools.map((tool) => ({
|
|
214
|
+
type: "function",
|
|
215
|
+
function: {
|
|
216
|
+
name: tool.name,
|
|
217
|
+
description: tool.description,
|
|
218
|
+
parameters: tool.parameters
|
|
219
|
+
}
|
|
220
|
+
}));
|
|
221
|
+
}
|
|
222
|
+
function convertToOpenAIToolChoice(toolChoice) {
|
|
223
|
+
if (!toolChoice) return void 0;
|
|
224
|
+
switch (toolChoice.type) {
|
|
225
|
+
case "none":
|
|
226
|
+
return "none";
|
|
227
|
+
case "auto":
|
|
228
|
+
return "auto";
|
|
229
|
+
case "required":
|
|
230
|
+
return "required";
|
|
231
|
+
case "tool":
|
|
232
|
+
return { type: "function", function: { name: toolChoice.toolName } };
|
|
233
|
+
default:
|
|
234
|
+
return void 0;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// src/apertis-chat-language-model.ts
|
|
239
|
+
var ApertisChatLanguageModel = class {
|
|
240
|
+
constructor(modelId, settings, config) {
|
|
241
|
+
this.modelId = modelId;
|
|
242
|
+
this.settings = settings;
|
|
243
|
+
this.config = config;
|
|
244
|
+
}
|
|
245
|
+
specificationVersion = "v1";
|
|
246
|
+
defaultObjectGenerationMode = "json";
|
|
247
|
+
supportsImageUrls = true;
|
|
248
|
+
get provider() {
|
|
249
|
+
return this.config.provider;
|
|
250
|
+
}
|
|
251
|
+
get supportsStructuredOutputs() {
|
|
252
|
+
return true;
|
|
253
|
+
}
|
|
254
|
+
async doGenerate(options) {
|
|
255
|
+
const body = this.buildRequestBody(options, false);
|
|
256
|
+
const { value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
|
257
|
+
url: `${this.config.baseURL}/chat/completions`,
|
|
258
|
+
headers: this.config.headers(),
|
|
259
|
+
body,
|
|
260
|
+
failedResponseHandler: apertisFailedResponseHandler,
|
|
261
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
|
|
262
|
+
openAIChatResponseSchema
|
|
263
|
+
),
|
|
264
|
+
fetch: this.config.fetch,
|
|
265
|
+
abortSignal: options.abortSignal
|
|
266
|
+
});
|
|
267
|
+
const choice = response.choices[0];
|
|
268
|
+
return {
|
|
269
|
+
text: choice.message.content ?? void 0,
|
|
270
|
+
toolCalls: choice.message.tool_calls?.map((tc) => ({
|
|
271
|
+
toolCallType: "function",
|
|
272
|
+
toolCallId: tc.id,
|
|
273
|
+
toolName: tc.function.name,
|
|
274
|
+
args: tc.function.arguments
|
|
275
|
+
})),
|
|
276
|
+
finishReason: mapApertisFinishReason(choice.finish_reason),
|
|
277
|
+
usage: {
|
|
278
|
+
promptTokens: response.usage?.prompt_tokens ?? 0,
|
|
279
|
+
completionTokens: response.usage?.completion_tokens ?? 0
|
|
280
|
+
},
|
|
281
|
+
rawCall: { rawPrompt: options.prompt, rawSettings: body }
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
async doStream(options) {
|
|
285
|
+
const body = this.buildRequestBody(options, true);
|
|
286
|
+
const { value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
|
287
|
+
url: `${this.config.baseURL}/chat/completions`,
|
|
288
|
+
headers: this.config.headers(),
|
|
289
|
+
body,
|
|
290
|
+
failedResponseHandler: apertisFailedResponseHandler,
|
|
291
|
+
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
|
|
292
|
+
openAIChatChunkSchema
|
|
293
|
+
),
|
|
294
|
+
fetch: this.config.fetch,
|
|
295
|
+
abortSignal: options.abortSignal
|
|
296
|
+
});
|
|
297
|
+
const toolCallBuffers = /* @__PURE__ */ new Map();
|
|
298
|
+
const transformStream = new TransformStream({
|
|
299
|
+
transform(parseResult, controller) {
|
|
300
|
+
if (!parseResult.success) {
|
|
301
|
+
return;
|
|
302
|
+
}
|
|
303
|
+
const chunk = parseResult.value;
|
|
304
|
+
const choice = chunk.choices[0];
|
|
305
|
+
if (!choice) return;
|
|
306
|
+
if (choice.delta.content) {
|
|
307
|
+
controller.enqueue({
|
|
308
|
+
type: "text-delta",
|
|
309
|
+
textDelta: choice.delta.content
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
if (choice.delta.tool_calls) {
|
|
313
|
+
for (const tc of choice.delta.tool_calls) {
|
|
314
|
+
let buffer = toolCallBuffers.get(tc.index);
|
|
315
|
+
if (!buffer) {
|
|
316
|
+
buffer = { id: tc.id ?? (0, import_provider_utils2.generateId)(), name: "", arguments: "" };
|
|
317
|
+
toolCallBuffers.set(tc.index, buffer);
|
|
318
|
+
}
|
|
319
|
+
if (tc.id) buffer.id = tc.id;
|
|
320
|
+
if (tc.function?.name) buffer.name += tc.function.name;
|
|
321
|
+
if (tc.function?.arguments)
|
|
322
|
+
buffer.arguments += tc.function.arguments;
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
if (choice.finish_reason) {
|
|
326
|
+
for (const [, buffer] of toolCallBuffers) {
|
|
327
|
+
if (buffer.name) {
|
|
328
|
+
controller.enqueue({
|
|
329
|
+
type: "tool-call",
|
|
330
|
+
toolCallType: "function",
|
|
331
|
+
toolCallId: buffer.id,
|
|
332
|
+
toolName: buffer.name,
|
|
333
|
+
args: buffer.arguments
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
toolCallBuffers.clear();
|
|
338
|
+
controller.enqueue({
|
|
339
|
+
type: "finish",
|
|
340
|
+
finishReason: mapApertisFinishReason(choice.finish_reason),
|
|
341
|
+
usage: {
|
|
342
|
+
promptTokens: chunk.usage?.prompt_tokens ?? 0,
|
|
343
|
+
completionTokens: chunk.usage?.completion_tokens ?? 0
|
|
344
|
+
}
|
|
345
|
+
});
|
|
346
|
+
}
|
|
347
|
+
},
|
|
348
|
+
flush(controller) {
|
|
349
|
+
for (const [, buffer] of toolCallBuffers) {
|
|
350
|
+
if (buffer.name) {
|
|
351
|
+
controller.enqueue({
|
|
352
|
+
type: "tool-call",
|
|
353
|
+
toolCallType: "function",
|
|
354
|
+
toolCallId: buffer.id,
|
|
355
|
+
toolName: buffer.name,
|
|
356
|
+
args: buffer.arguments
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
});
|
|
362
|
+
return {
|
|
363
|
+
stream: response.pipeThrough(transformStream),
|
|
364
|
+
rawCall: { rawPrompt: options.prompt, rawSettings: body }
|
|
365
|
+
};
|
|
366
|
+
}
|
|
367
|
+
buildRequestBody(options, stream) {
|
|
368
|
+
const tools = options.mode.type === "regular" ? this.filterFunctionTools(options.mode.tools) : void 0;
|
|
369
|
+
const toolChoice = options.mode.type === "regular" ? options.mode.toolChoice : void 0;
|
|
370
|
+
const responseFormat = options.mode.type === "object-json" ? { type: "json_object" } : void 0;
|
|
371
|
+
const body = {
|
|
372
|
+
model: this.modelId,
|
|
373
|
+
messages: convertToOpenAIMessages(options.prompt),
|
|
374
|
+
stream
|
|
375
|
+
};
|
|
376
|
+
if (stream) body.stream_options = { include_usage: true };
|
|
377
|
+
if (options.temperature !== void 0)
|
|
378
|
+
body.temperature = options.temperature;
|
|
379
|
+
if (options.maxTokens !== void 0) body.max_tokens = options.maxTokens;
|
|
380
|
+
if (options.topP !== void 0) body.top_p = options.topP;
|
|
381
|
+
if (options.frequencyPenalty !== void 0)
|
|
382
|
+
body.frequency_penalty = options.frequencyPenalty;
|
|
383
|
+
if (options.presencePenalty !== void 0)
|
|
384
|
+
body.presence_penalty = options.presencePenalty;
|
|
385
|
+
if (options.stopSequences !== void 0) body.stop = options.stopSequences;
|
|
386
|
+
if (options.seed !== void 0) body.seed = options.seed;
|
|
387
|
+
const convertedTools = convertToOpenAITools(tools);
|
|
388
|
+
if (convertedTools !== void 0) body.tools = convertedTools;
|
|
389
|
+
const convertedToolChoice = convertToOpenAIToolChoice(toolChoice);
|
|
390
|
+
if (convertedToolChoice !== void 0)
|
|
391
|
+
body.tool_choice = convertedToolChoice;
|
|
392
|
+
if (responseFormat !== void 0) body.response_format = responseFormat;
|
|
393
|
+
if (this.settings.user !== void 0) body.user = this.settings.user;
|
|
394
|
+
if (this.settings.logprobs !== void 0)
|
|
395
|
+
body.logprobs = this.settings.logprobs;
|
|
396
|
+
if (this.settings.topLogprobs !== void 0)
|
|
397
|
+
body.top_logprobs = this.settings.topLogprobs;
|
|
398
|
+
return body;
|
|
399
|
+
}
|
|
400
|
+
filterFunctionTools(tools) {
|
|
401
|
+
if (!tools) return void 0;
|
|
402
|
+
return tools.filter(
|
|
403
|
+
(tool) => tool.type === "function"
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
};
|
|
407
|
+
|
|
408
|
+
// src/apertis-provider.ts
|
|
409
|
+
function createApertis(options = {}) {
|
|
410
|
+
const baseURL = (0, import_provider_utils3.withoutTrailingSlash)(options.baseURL) ?? "https://api.apertis.ai/v1";
|
|
411
|
+
const getHeaders = () => ({
|
|
412
|
+
...options.headers,
|
|
413
|
+
Authorization: `Bearer ${(0, import_provider_utils3.loadApiKey)({
|
|
414
|
+
apiKey: options.apiKey,
|
|
415
|
+
environmentVariableName: "APERTIS_API_KEY",
|
|
416
|
+
description: "Apertis API key"
|
|
417
|
+
})}`,
|
|
418
|
+
"Content-Type": "application/json"
|
|
419
|
+
});
|
|
420
|
+
const createChatModel = (modelId, settings = {}) => new ApertisChatLanguageModel(modelId, settings, {
|
|
421
|
+
provider: "apertis.chat",
|
|
422
|
+
baseURL,
|
|
423
|
+
headers: getHeaders,
|
|
424
|
+
fetch: options.fetch
|
|
425
|
+
});
|
|
426
|
+
const provider = Object.assign(
|
|
427
|
+
(modelId, settings) => createChatModel(modelId, settings),
|
|
428
|
+
{
|
|
429
|
+
chat: createChatModel,
|
|
430
|
+
languageModel: createChatModel
|
|
431
|
+
}
|
|
432
|
+
);
|
|
433
|
+
return provider;
|
|
434
|
+
}
|
|
435
|
+
var apertis = createApertis();
|
|
436
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
437
|
+
0 && (module.exports = {
|
|
438
|
+
apertis,
|
|
439
|
+
createApertis
|
|
440
|
+
});
|
|
441
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/apertis-provider.ts","../src/apertis-chat-language-model.ts","../src/apertis-error.ts","../src/schemas/chat-response.ts","../src/utils/map-finish-reason.ts","../src/utils/convert-to-openai-messages.ts","../src/utils/convert-to-openai-tools.ts"],"sourcesContent":["export {\n createApertis,\n apertis,\n type ApertisProvider,\n} from \"./apertis-provider\";\nexport type {\n ApertisProviderSettings,\n ApertisChatSettings,\n ApertisModelId,\n} from \"./apertis-chat-settings\";\n","import type { LanguageModelV1 } from \"@ai-sdk/provider\";\nimport { loadApiKey, withoutTrailingSlash } from \"@ai-sdk/provider-utils\";\nimport { ApertisChatLanguageModel } from \"./apertis-chat-language-model\";\nimport type {\n ApertisChatSettings,\n ApertisModelId,\n ApertisProviderSettings,\n} from \"./apertis-chat-settings\";\n\nexport interface ApertisProvider {\n /**\n * Creates a chat model for text generation.\n */\n (modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;\n\n /**\n * Creates a chat model for text generation.\n */\n chat(\n modelId: ApertisModelId,\n settings?: ApertisChatSettings,\n ): LanguageModelV1;\n\n /**\n * Creates a chat model for text generation (alias for languageModel).\n */\n languageModel(\n modelId: ApertisModelId,\n settings?: ApertisChatSettings,\n ): LanguageModelV1;\n}\n\nexport function createApertis(\n options: ApertisProviderSettings = {},\n): ApertisProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? \"https://api.apertis.ai/v1\";\n\n const getHeaders = () => ({\n ...options.headers,\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: \"APERTIS_API_KEY\",\n description: \"Apertis API key\",\n })}`,\n \"Content-Type\": \"application/json\",\n });\n\n const createChatModel = (\n modelId: ApertisModelId,\n settings: ApertisChatSettings = {},\n ): LanguageModelV1 =>\n new ApertisChatLanguageModel(modelId, settings, {\n provider: \"apertis.chat\",\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider: ApertisProvider = Object.assign(\n (modelId: ApertisModelId, settings?: ApertisChatSettings) =>\n createChatModel(modelId, settings),\n {\n chat: createChatModel,\n languageModel: createChatModel,\n },\n );\n\n return provider;\n}\n\n/**\n * Default Apertis provider instance.\n */\nexport const apertis = createApertis();\n","import type {\n LanguageModelV1,\n LanguageModelV1CallOptions,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1FunctionTool,\n LanguageModelV1StreamPart,\n} from \"@ai-sdk/provider\";\nimport {\n type ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n postJsonToApi,\n} from \"@ai-sdk/provider-utils\";\nimport type { ApertisChatSettings } from \"./apertis-chat-settings\";\nimport { apertisFailedResponseHandler } from \"./apertis-error\";\nimport {\n type OpenAIChatChunk,\n openAIChatChunkSchema,\n openAIChatResponseSchema,\n} from \"./schemas/chat-response\";\nimport {\n convertToOpenAIMessages,\n convertToOpenAIToolChoice,\n convertToOpenAITools,\n mapApertisFinishReason,\n} from \"./utils\";\n\nexport interface ApertisChatConfig {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string>;\n fetch?: typeof fetch;\n}\n\nexport class ApertisChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = \"v1\";\n readonly defaultObjectGenerationMode = \"json\";\n readonly supportsImageUrls = true;\n\n constructor(\n readonly modelId: string,\n private readonly settings: ApertisChatSettings,\n private readonly config: ApertisChatConfig,\n ) {}\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportsStructuredOutputs(): boolean {\n return true;\n }\n\n async doGenerate(options: LanguageModelV1CallOptions): Promise<{\n text?: string;\n toolCalls?: Array<{\n toolCallType: \"function\";\n toolCallId: string;\n toolName: string;\n args: string;\n }>;\n finishReason: LanguageModelV1FinishReason;\n usage: { promptTokens: number; completionTokens: number };\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const body = this.buildRequestBody(options, false);\n\n const { value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body,\n failedResponseHandler: apertisFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAIChatResponseSchema,\n ),\n fetch: this.config.fetch,\n abortSignal: options.abortSignal,\n });\n\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map((tc) => ({\n toolCallType: \"function\" as const,\n toolCallId: tc.id,\n toolName: tc.function.name,\n args: tc.function.arguments,\n })),\n finishReason: mapApertisFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? 0,\n completionTokens: response.usage?.completion_tokens ?? 0,\n },\n rawCall: { rawPrompt: options.prompt, rawSettings: body },\n };\n }\n\n async doStream(options: LanguageModelV1CallOptions): Promise<{\n stream: ReadableStream<LanguageModelV1StreamPart>;\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const body = this.buildRequestBody(options, true);\n\n const { value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body,\n failedResponseHandler: apertisFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openAIChatChunkSchema,\n ),\n fetch: this.config.fetch,\n abortSignal: options.abortSignal,\n });\n\n const toolCallBuffers: Map<\n number,\n { id: string; name: string; arguments: string }\n > = new Map();\n\n const transformStream = new TransformStream<\n ParseResult<OpenAIChatChunk>,\n LanguageModelV1StreamPart\n >({\n transform(parseResult, controller) {\n // Skip failed parse results\n if (!parseResult.success) {\n return;\n }\n\n const chunk = parseResult.value;\n const choice = chunk.choices[0];\n\n if (!choice) return;\n\n // Handle text delta\n if (choice.delta.content) {\n controller.enqueue({\n type: \"text-delta\",\n textDelta: choice.delta.content,\n });\n }\n\n // Handle tool calls\n if (choice.delta.tool_calls) {\n for (const tc of choice.delta.tool_calls) {\n let buffer = toolCallBuffers.get(tc.index);\n\n if (!buffer) {\n buffer = { id: tc.id ?? generateId(), name: \"\", arguments: \"\" };\n toolCallBuffers.set(tc.index, buffer);\n }\n\n if (tc.id) buffer.id = tc.id;\n if (tc.function?.name) buffer.name += tc.function.name;\n if (tc.function?.arguments)\n buffer.arguments += tc.function.arguments;\n }\n }\n\n // Handle finish\n if (choice.finish_reason) {\n // Emit completed tool calls (only those with valid names)\n for (const [, buffer] of toolCallBuffers) {\n if (buffer.name) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: buffer.id,\n toolName: buffer.name,\n args: buffer.arguments,\n });\n }\n }\n // Clear buffers after emitting\n toolCallBuffers.clear();\n\n controller.enqueue({\n type: \"finish\",\n finishReason: mapApertisFinishReason(choice.finish_reason),\n usage: {\n promptTokens: chunk.usage?.prompt_tokens ?? 0,\n completionTokens: chunk.usage?.completion_tokens ?? 0,\n },\n });\n }\n },\n flush(controller) {\n // Emit any remaining buffered tool calls when stream closes early\n for (const [, buffer] of toolCallBuffers) {\n if (buffer.name) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: buffer.id,\n toolName: buffer.name,\n args: buffer.arguments,\n });\n }\n }\n },\n });\n\n return {\n stream: response.pipeThrough(transformStream),\n rawCall: { rawPrompt: options.prompt, rawSettings: body },\n };\n }\n\n private buildRequestBody(\n options: LanguageModelV1CallOptions,\n stream: boolean,\n ) {\n // Extract tools and toolChoice from mode if available\n const tools =\n options.mode.type === \"regular\"\n ? this.filterFunctionTools(options.mode.tools)\n : undefined;\n const toolChoice =\n options.mode.type === \"regular\" ? options.mode.toolChoice : undefined;\n\n // Determine response format based on mode\n const responseFormat =\n options.mode.type === \"object-json\"\n ? { type: \"json_object\" as const }\n : undefined;\n\n const body: Record<string, unknown> = {\n model: this.modelId,\n messages: convertToOpenAIMessages(options.prompt),\n stream,\n };\n\n // Only add defined optional fields to avoid sending undefined to API\n if (stream) body.stream_options = { include_usage: true };\n if (options.temperature !== undefined)\n body.temperature = options.temperature;\n if (options.maxTokens !== undefined) body.max_tokens = options.maxTokens;\n if (options.topP !== undefined) body.top_p = options.topP;\n if (options.frequencyPenalty !== undefined)\n body.frequency_penalty = options.frequencyPenalty;\n if (options.presencePenalty !== undefined)\n body.presence_penalty = options.presencePenalty;\n if (options.stopSequences !== undefined) body.stop = options.stopSequences;\n if (options.seed !== undefined) body.seed = options.seed;\n\n const convertedTools = convertToOpenAITools(tools);\n if (convertedTools !== undefined) body.tools = convertedTools;\n\n const convertedToolChoice = convertToOpenAIToolChoice(toolChoice);\n if (convertedToolChoice !== undefined)\n body.tool_choice = convertedToolChoice;\n\n if (responseFormat !== undefined) body.response_format = responseFormat;\n if (this.settings.user !== undefined) body.user = this.settings.user;\n if (this.settings.logprobs !== undefined)\n body.logprobs = this.settings.logprobs;\n if (this.settings.topLogprobs !== undefined)\n body.top_logprobs = this.settings.topLogprobs;\n\n return body;\n }\n\n private filterFunctionTools(\n tools: Array<LanguageModelV1FunctionTool | { type: string }> | undefined,\n ): LanguageModelV1FunctionTool[] | undefined {\n if (!tools) return undefined;\n return tools.filter(\n (tool): tool is LanguageModelV1FunctionTool => tool.type === \"function\",\n );\n }\n}\n","import { createJsonErrorResponseHandler } from \"@ai-sdk/provider-utils\";\nimport { z } from \"zod\";\n\nconst apertisErrorSchema = z.object({\n error: z.object({\n message: z.string(),\n type: z.string().optional(),\n code: z.string().nullable().optional(),\n param: z.string().nullable().optional(),\n }),\n});\n\nexport type ApertisErrorData = z.infer<typeof apertisErrorSchema>;\n\nexport const apertisFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: apertisErrorSchema,\n errorToMessage: (error) => error.error.message,\n});\n","import { z } from \"zod\";\n\nexport const openAIChatResponseSchema = z.object({\n id: z.string(),\n object: z.literal(\"chat.completion\").optional(),\n created: z.number().optional(),\n model: z.string().optional(),\n choices: z.array(\n z.object({\n index: z.number(),\n message: z.object({\n role: z.literal(\"assistant\"),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n type: z.literal(\"function\"),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional(),\n }),\n finish_reason: z.string().nullable(),\n logprobs: z.any().nullable().optional(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number().optional(),\n })\n .optional(),\n});\n\nexport type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;\n\nexport const openAIChatChunkSchema = z.object({\n id: z.string(),\n object: z.literal(\"chat.completion.chunk\").optional(),\n created: z.number().optional(),\n model: z.string().optional(),\n choices: z.array(\n z.object({\n index: z.number(),\n delta: z.object({\n role: z.literal(\"assistant\").optional(),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().optional(),\n type: z.literal(\"function\").optional(),\n function: z\n .object({\n name: z.string().optional(),\n arguments: z.string().optional(),\n })\n .optional(),\n }),\n )\n .optional(),\n }),\n finish_reason: z.string().nullable().optional(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\nexport type OpenAIChatChunk = z.infer<typeof openAIChatChunkSchema>;\n","import type { LanguageModelV1FinishReason } from \"@ai-sdk/provider\";\n\nexport function mapApertisFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case \"stop\":\n return \"stop\";\n case \"length\":\n return \"length\";\n case \"tool_calls\":\n return \"tool-calls\";\n case \"content_filter\":\n return \"content-filter\";\n default:\n return \"unknown\";\n }\n}\n","import type { LanguageModelV1Prompt } from \"@ai-sdk/provider\";\n\nexport type OpenAIMessage =\n | { role: \"system\"; content: string }\n | { role: \"user\"; content: string | OpenAIContentPart[] }\n | { role: \"assistant\"; content: string | null; tool_calls?: OpenAIToolCall[] }\n | { role: \"tool\"; tool_call_id: string; content: string };\n\nexport type OpenAIContentPart =\n | { type: \"text\"; text: string }\n | {\n type: \"image_url\";\n image_url: { url: string; detail?: \"auto\" | \"low\" | \"high\" };\n };\n\nexport type OpenAIToolCall = {\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n};\n\nexport function convertToOpenAIMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAIMessage[] {\n const messages: OpenAIMessage[] = [];\n\n for (const message of prompt) {\n switch (message.role) {\n case \"system\":\n messages.push({ role: \"system\", content: message.content });\n break;\n\n case \"user\":\n messages.push({\n role: \"user\",\n content: message.content.map((part): OpenAIContentPart => {\n switch (part.type) {\n case \"text\":\n return { type: \"text\", text: part.text };\n case \"image\":\n return {\n type: \"image_url\",\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${part.mimeType ?? \"image/png\"};base64,${Buffer.from(part.image).toString(\"base64\")}`,\n },\n };\n default:\n throw new Error(\n `Unsupported user content part type: ${(part as { type: string }).type}`,\n );\n }\n }),\n });\n break;\n\n case \"assistant\": {\n const textContent = message.content\n .filter((p): p is { type: \"text\"; text: string } => p.type === \"text\")\n .map((p) => p.text)\n .join(\"\");\n\n const toolCalls = message.content\n .filter(\n (\n p,\n ): p is {\n type: \"tool-call\";\n toolCallId: string;\n toolName: string;\n args: unknown;\n } => p.type === \"tool-call\",\n )\n .map((tc) => {\n let arguments_str = \"{}\";\n try {\n arguments_str = JSON.stringify(tc.args);\n } catch {\n arguments_str = \"{}\";\n }\n return {\n id: tc.toolCallId,\n type: \"function\" as const,\n function: { name: tc.toolName, arguments: arguments_str },\n };\n });\n\n messages.push({\n role: \"assistant\",\n content: textContent || null,\n ...(toolCalls.length > 0 ? { tool_calls: toolCalls } : {}),\n });\n break;\n }\n\n case \"tool\":\n for (const result of message.content) {\n let content = \"{}\";\n if (typeof result.result === \"string\") {\n content = result.result;\n } else {\n try {\n content = JSON.stringify(result.result);\n } catch {\n content = \"{}\";\n }\n }\n messages.push({\n role: \"tool\",\n tool_call_id: result.toolCallId,\n content,\n });\n }\n break;\n }\n }\n\n return messages;\n}\n","import type {\n LanguageModelV1FunctionTool,\n LanguageModelV1ToolChoice,\n} from \"@ai-sdk/provider\";\n\nexport type OpenAITool = {\n type: \"function\";\n function: {\n name: string;\n description?: string;\n parameters: Record<string, unknown>;\n };\n};\n\nexport type OpenAIToolChoice =\n | \"none\"\n | \"auto\"\n | \"required\"\n | { type: \"function\"; function: { name: string } };\n\nexport function convertToOpenAITools(\n tools: LanguageModelV1FunctionTool[] | undefined,\n): OpenAITool[] | undefined {\n if (!tools || tools.length === 0) return undefined;\n\n return tools.map((tool) => ({\n type: \"function\" as const,\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters as Record<string, unknown>,\n },\n }));\n}\n\nexport function convertToOpenAIToolChoice(\n toolChoice: LanguageModelV1ToolChoice | undefined,\n): OpenAIToolChoice | undefined {\n if (!toolChoice) return undefined;\n\n switch (toolChoice.type) {\n case \"none\":\n return \"none\";\n case \"auto\":\n return \"auto\";\n case \"required\":\n return \"required\";\n case \"tool\":\n return { type: \"function\", function: { name: toolChoice.toolName } };\n default:\n return undefined;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,IAAAA,yBAAiD;;;ACOjD,IAAAC,yBAMO;;;ACdP,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,qBAAqB,aAAE,OAAO;AAAA,EAClC,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA,IAClB,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,MAAM,aAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IACrC,OAAO,aAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EACxC,CAAC;AACH,CAAC;AAIM,IAAM,mCAA+B,sDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,CAAC,UAAU,MAAM,MAAM;AACzC,CAAC;;;ACjBD,IAAAC,cAAkB;AAEX,IAAM,2BAA2B,cAAE,OAAO;AAAA,EAC/C,IAAI,cAAE,OAAO;AAAA,EACb,QAAQ,cAAE,QAAQ,iBAAiB,EAAE,SAAS;AAAA,EAC9C,SAAS,cAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,OAAO,cAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,MAChB,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS,cAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,SAAS;AAAA,MACnC,UAAU,cAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,IACxC,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,IAC5B,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,EACpC,CAAC,EACA,SAAS;AACd,CAAC;AAIM,IAAM,wBAAwB,cAAE,OAAO;AAAA,EAC5C,IAAI,cAAE,OAAO;AAAA,EACb,QAAQ,cAAE,QAAQ,uBAAuB,EAAE,SAAS;AAAA,EACpD,SAAS,cAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,OAAO,cAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,MAChB,OAAO,cAAE,OAAO;AAAA,QACd,MAAM,cAAE,QAAQ,WAAW,EAAE,SAAS;AAAA,QACtC,SAAS,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,OAAO,cAAE,OAAO;AAAA,YAChB,IAAI,cAAE,OAAO,EAAE,SAAS;AAAA,YACxB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,YACrC,UAAU,cACP,OAAO;AAAA,cACN,MAAM,cAAE,OAAO,EAAE,SAAS;AAAA,cAC1B,WAAW,cAAE,OAAO,EAAE,SAAS;AAAA,YACjC,CAAC,EACA,SAAS;AAAA,UACd,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AC3EM,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ACIO,SAAS,wBACd,QACiB;AACjB,QAAM,WAA4B,CAAC;AAEnC,aAAW,WAAW,QAAQ;AAC5B,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK;AACH,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,QAAQ,CAAC;AAC1D;AAAA,MAEF,KAAK;AACH,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,QAAQ,IAAI,CAAC,SAA4B;AACxD,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK;AACH,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC,KAAK;AACH,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,QAAQ,KAAK,YAAY,WAAW,WAAW,OAAO,KAAK,KAAK,KAAK,EAAE,SAAS,QAAQ,CAAC;AAAA,kBACjG;AAAA,gBACF;AAAA,cACF;AACE,sBAAM,IAAI;AAAA,kBACR,uCAAwC,KAA0B,IAAI;AAAA,gBACxE;AAAA,YACJ;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MAEF,KAAK,aAAa;AAChB,cAAM,cAAc,QAAQ,QACzB,OAAO,CAAC,MAA2C,EAAE,SAAS,MAAM,EACpE,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,EAAE;AAEV,cAAM,YAAY,QAAQ,QACvB;AAAA,UACC,CACE,MAMG,EAAE,SAAS;AAAA,QAClB,EACC,IAAI,CAAC,OAAO;AACX,cAAI,gBAAgB;AACpB,cAAI;AACF,4BAAgB,KAAK,UAAU,GAAG,IAAI;AAAA,UACxC,QAAQ;AACN,4BAAgB;AAAA,UAClB;AACA,iBAAO;AAAA,YACL,IAAI,GAAG;AAAA,YACP,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,GAAG,UAAU,WAAW,cAAc;AAAA,UAC1D;AAAA,QACF,CAAC;AAEH,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,eAAe;AAAA,UACxB,GAAI,UAAU,SAAS,IAAI,EAAE,YAAY,UAAU,IAAI,CAAC;AAAA,QAC1D,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK;AACH,mBAAW,UAAU,QAAQ,SAAS;AACpC,cAAI,UAAU;AACd,cAAI,OAAO,OAAO,WAAW,UAAU;AACrC,sBAAU,OAAO;AAAA,UACnB,OAAO;AACL,gBAAI;AACF,wBAAU,KAAK,UAAU,OAAO,MAAM;AAAA,YACxC,QAAQ;AACN,wBAAU;AAAA,YACZ;AAAA,UACF;AACA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AACA;AAAA,IACJ;AAAA,EACF;AAEA,SAAO;AACT;;;ACpGO,SAAS,qBACd,OAC0B;AAC1B,MAAI,CAAC,SAAS,MAAM,WAAW,EAAG,QAAO;AAEzC,SAAO,MAAM,IAAI,CAAC,UAAU;AAAA,IAC1B,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,EAAE;AACJ;AAEO,SAAS,0BACd,YAC8B;AAC9B,MAAI,CAAC,WAAY,QAAO;AAExB,UAAQ,WAAW,MAAM;AAAA,IACvB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IACrE;AACE,aAAO;AAAA,EACX;AACF;;;ALhBO,IAAM,2BAAN,MAA0D;AAAA,EAK/D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAAA,EAChB;AAAA,EARM,uBAAuB;AAAA,EACvB,8BAA8B;AAAA,EAC9B,oBAAoB;AAAA,EAQ7B,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,4BAAqC;AACvC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,WAAW,SAYd;AACD,UAAM,OAAO,KAAK,iBAAiB,SAAS,KAAK;AAEjD,UAAM,EAAE,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC9C,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,MACnB,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO,QAAQ,WAAW;AAAA,MAChC,WAAW,OAAO,QAAQ,YAAY,IAAI,CAAC,QAAQ;AAAA,QACjD,cAAc;AAAA,QACd,YAAY,GAAG;AAAA,QACf,UAAU,GAAG,SAAS;AAAA,QACtB,MAAM,GAAG,SAAS;AAAA,MACpB,EAAE;AAAA,MACF,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,OAAO,iBAAiB;AAAA,QAC/C,kBAAkB,SAAS,OAAO,qBAAqB;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,QAAQ,QAAQ,aAAa,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,SAIZ;AACD,UAAM,OAAO,KAAK,iBAAiB,SAAS,IAAI;AAEhD,UAAM,EAAE,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC9C,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,MACnB,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,kBAGF,oBAAI,IAAI;AAEZ,UAAM,kBAAkB,IAAI,gBAG1B;AAAA,MACA,UAAU,aAAa,YAAY;AAEjC,YAAI,CAAC,YAAY,SAAS;AACxB;AAAA,QACF;AAEA,cAAM,QAAQ,YAAY;AAC1B,cAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,YAAI,CAAC,OAAQ;AAGb,YAAI,OAAO,MAAM,SAAS;AACxB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,WAAW,OAAO,MAAM;AAAA,UAC1B,CAAC;AAAA,QACH;AAGA,YAAI,OAAO,MAAM,YAAY;AAC3B,qBAAW,MAAM,OAAO,MAAM,YAAY;AACxC,gBAAI,SAAS,gBAAgB,IAAI,GAAG,KAAK;AAEzC,gBAAI,CAAC,QAAQ;AACX,uBAAS,EAAE,IAAI,GAAG,UAAM,mCAAW,GAAG,MAAM,IAAI,WAAW,GAAG;AAC9D,8BAAgB,IAAI,GAAG,OAAO,MAAM;AAAA,YACtC;AAEA,gBAAI,GAAG,GAAI,QAAO,KAAK,GAAG;AAC1B,gBAAI,GAAG,UAAU,KAAM,QAAO,QAAQ,GAAG,SAAS;AAClD,gBAAI,GAAG,UAAU;AACf,qBAAO,aAAa,GAAG,SAAS;AAAA,UACpC;AAAA,QACF;AAGA,YAAI,OAAO,eAAe;AAExB,qBAAW,CAAC,EAAE,MAAM,KAAK,iBAAiB;AACxC,gBAAI,OAAO,MAAM;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,cAAc;AAAA,gBACd,YAAY,OAAO;AAAA,gBACnB,UAAU,OAAO;AAAA,gBACjB,MAAM,OAAO;AAAA,cACf,CAAC;AAAA,YACH;AAAA,UACF;AAEA,0BAAgB,MAAM;AAEtB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,uBAAuB,OAAO,aAAa;AAAA,YACzD,OAAO;AAAA,cACL,cAAc,MAAM,OAAO,iBAAiB;AAAA,cAC5C,kBAAkB,MAAM,OAAO,qBAAqB;AAAA,YACtD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA,MAAM,YAAY;AAEhB,mBAAW,CAAC,EAAE,MAAM,KAAK,iBAAiB;AACxC,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,YAAY,OAAO;AAAA,cACnB,UAAU,OAAO;AAAA,cACjB,MAAM,OAAO;AAAA,YACf,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,YAAY,eAAe;AAAA,MAC5C,SAAS,EAAE,WAAW,QAAQ,QAAQ,aAAa,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA,EAEQ,iBACN,SACA,QACA;AAEA,UAAM,QACJ,QAAQ,KAAK,SAAS,YAClB,KAAK,oBAAoB,QAAQ,KAAK,KAAK,IAC3C;AACN,UAAM,aACJ,QAAQ,KAAK,SAAS,YAAY,QAAQ,KAAK,aAAa;AAG9D,UAAM,iBACJ,QAAQ,KAAK,SAAS,gBAClB,EAAE,MAAM,cAAuB,IAC/B;AAEN,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,UAAU,wBAAwB,QAAQ,MAAM;AAAA,MAChD;AAAA,IACF;AAGA,QAAI,OAAQ,MAAK,iBAAiB,EAAE,eAAe,KAAK;AACxD,QAAI,QAAQ,gBAAgB;AAC1B,WAAK,cAAc,QAAQ;AAC7B,QAAI,QAAQ,cAAc,OAAW,MAAK,aAAa,QAAQ;AAC/D,QAAI,QAAQ,SAAS,OAAW,MAAK,QAAQ,QAAQ;AACrD,QAAI,QAAQ,qBAAqB;AAC/B,WAAK,oBAAoB,QAAQ;AACnC,QAAI,QAAQ,oBAAoB;AAC9B,WAAK,mBAAmB,QAAQ;AAClC,QAAI,QAAQ,kBAAkB,OAAW,MAAK,OAAO,QAAQ;AAC7D,QAAI,QAAQ,SAAS,OAAW,MAAK,OAAO,QAAQ;AAEpD,UAAM,iBAAiB,qBAAqB,KAAK;AACjD,QAAI,mBAAmB,OAAW,MAAK,QAAQ;AAE/C,UAAM,sBAAsB,0BAA0B,UAAU;AAChE,QAAI,wBAAwB;AAC1B,WAAK,cAAc;AAErB,QAAI,mBAAmB,OAAW,MAAK,kBAAkB;AACzD,QAAI,KAAK,SAAS,SAAS,OAAW,MAAK,OAAO,KAAK,SAAS;AAChE,QAAI,KAAK,SAAS,aAAa;AAC7B,WAAK,WAAW,KAAK,SAAS;AAChC,QAAI,KAAK,SAAS,gBAAgB;AAChC,WAAK,eAAe,KAAK,SAAS;AAEpC,WAAO;AAAA,EACT;AAAA,EAEQ,oBACN,OAC2C;AAC3C,QAAI,CAAC,MAAO,QAAO;AACnB,WAAO,MAAM;AAAA,MACX,CAAC,SAA8C,KAAK,SAAS;AAAA,IAC/D;AAAA,EACF;AACF;;;ADpPO,SAAS,cACd,UAAmC,CAAC,GACnB;AACjB,QAAM,cACJ,6CAAqB,QAAQ,OAAO,KAAK;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,GAAG,QAAQ;AAAA,IACX,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,gBAAgB;AAAA,EAClB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAEjC,IAAI,yBAAyB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAA4B,OAAO;AAAA,IACvC,CAAC,SAAyB,aACxB,gBAAgB,SAAS,QAAQ;AAAA,IACnC;AAAA,MACE,MAAM;AAAA,MACN,eAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider_utils","import_provider_utils","import_zod"]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type ApertisModelId = "gpt-5.2" | "gpt-5.2-codex" | "gpt-5.1" | "claude-opus-4-5-20251101" | "claude-sonnet-4.5" | "claude-haiku-4.5" | "gemini-3-pro-preview" | "gemini-3-flash-preview" | "gemini-2.5-flash-preview" | (string & {});
|
|
4
|
+
interface ApertisProviderSettings {
|
|
5
|
+
/**
|
|
6
|
+
* Apertis API key. Default: APERTIS_API_KEY env var
|
|
7
|
+
*/
|
|
8
|
+
apiKey?: string;
|
|
9
|
+
/**
|
|
10
|
+
* Base URL for Apertis API.
|
|
11
|
+
* @default "https://api.apertis.ai/v1"
|
|
12
|
+
*/
|
|
13
|
+
baseURL?: string;
|
|
14
|
+
/**
|
|
15
|
+
* Custom headers to include in requests.
|
|
16
|
+
*/
|
|
17
|
+
headers?: Record<string, string>;
|
|
18
|
+
/**
|
|
19
|
+
* Custom fetch implementation for testing or middleware.
|
|
20
|
+
*/
|
|
21
|
+
fetch?: typeof fetch;
|
|
22
|
+
}
|
|
23
|
+
interface ApertisChatSettings {
|
|
24
|
+
/**
|
|
25
|
+
* A unique identifier for the user (for abuse monitoring).
|
|
26
|
+
*/
|
|
27
|
+
user?: string;
|
|
28
|
+
/**
|
|
29
|
+
* Whether to return log probabilities of output tokens.
|
|
30
|
+
*/
|
|
31
|
+
logprobs?: boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Number of most likely tokens to return at each position (0-20).
|
|
34
|
+
* Requires logprobs to be true.
|
|
35
|
+
*/
|
|
36
|
+
topLogprobs?: number;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
interface ApertisProvider {
|
|
40
|
+
/**
|
|
41
|
+
* Creates a chat model for text generation.
|
|
42
|
+
*/
|
|
43
|
+
(modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;
|
|
44
|
+
/**
|
|
45
|
+
* Creates a chat model for text generation.
|
|
46
|
+
*/
|
|
47
|
+
chat(modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;
|
|
48
|
+
/**
|
|
49
|
+
* Creates a chat model for text generation (alias for languageModel).
|
|
50
|
+
*/
|
|
51
|
+
languageModel(modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;
|
|
52
|
+
}
|
|
53
|
+
declare function createApertis(options?: ApertisProviderSettings): ApertisProvider;
|
|
54
|
+
/**
|
|
55
|
+
* Default Apertis provider instance.
|
|
56
|
+
*/
|
|
57
|
+
declare const apertis: ApertisProvider;
|
|
58
|
+
|
|
59
|
+
export { type ApertisChatSettings, type ApertisModelId, type ApertisProvider, type ApertisProviderSettings, apertis, createApertis };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type ApertisModelId = "gpt-5.2" | "gpt-5.2-codex" | "gpt-5.1" | "claude-opus-4-5-20251101" | "claude-sonnet-4.5" | "claude-haiku-4.5" | "gemini-3-pro-preview" | "gemini-3-flash-preview" | "gemini-2.5-flash-preview" | (string & {});
|
|
4
|
+
interface ApertisProviderSettings {
|
|
5
|
+
/**
|
|
6
|
+
* Apertis API key. Default: APERTIS_API_KEY env var
|
|
7
|
+
*/
|
|
8
|
+
apiKey?: string;
|
|
9
|
+
/**
|
|
10
|
+
* Base URL for Apertis API.
|
|
11
|
+
* @default "https://api.apertis.ai/v1"
|
|
12
|
+
*/
|
|
13
|
+
baseURL?: string;
|
|
14
|
+
/**
|
|
15
|
+
* Custom headers to include in requests.
|
|
16
|
+
*/
|
|
17
|
+
headers?: Record<string, string>;
|
|
18
|
+
/**
|
|
19
|
+
* Custom fetch implementation for testing or middleware.
|
|
20
|
+
*/
|
|
21
|
+
fetch?: typeof fetch;
|
|
22
|
+
}
|
|
23
|
+
interface ApertisChatSettings {
|
|
24
|
+
/**
|
|
25
|
+
* A unique identifier for the user (for abuse monitoring).
|
|
26
|
+
*/
|
|
27
|
+
user?: string;
|
|
28
|
+
/**
|
|
29
|
+
* Whether to return log probabilities of output tokens.
|
|
30
|
+
*/
|
|
31
|
+
logprobs?: boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Number of most likely tokens to return at each position (0-20).
|
|
34
|
+
* Requires logprobs to be true.
|
|
35
|
+
*/
|
|
36
|
+
topLogprobs?: number;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
interface ApertisProvider {
|
|
40
|
+
/**
|
|
41
|
+
* Creates a chat model for text generation.
|
|
42
|
+
*/
|
|
43
|
+
(modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;
|
|
44
|
+
/**
|
|
45
|
+
* Creates a chat model for text generation.
|
|
46
|
+
*/
|
|
47
|
+
chat(modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;
|
|
48
|
+
/**
|
|
49
|
+
* Creates a chat model for text generation (alias for languageModel).
|
|
50
|
+
*/
|
|
51
|
+
languageModel(modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;
|
|
52
|
+
}
|
|
53
|
+
declare function createApertis(options?: ApertisProviderSettings): ApertisProvider;
|
|
54
|
+
/**
|
|
55
|
+
* Default Apertis provider instance.
|
|
56
|
+
*/
|
|
57
|
+
declare const apertis: ApertisProvider;
|
|
58
|
+
|
|
59
|
+
export { type ApertisChatSettings, type ApertisModelId, type ApertisProvider, type ApertisProviderSettings, apertis, createApertis };
|