@apertis/ai-sdk-provider 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +88 -0
- package/dist/index.cjs +441 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +59 -0
- package/dist/index.d.ts +59 -0
- package/dist/index.js +418 -0
- package/dist/index.js.map +1 -0
- package/package.json +59 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,418 @@
|
|
|
1
|
+
// src/apertis-provider.ts
|
|
2
|
+
import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
|
|
3
|
+
|
|
4
|
+
// src/apertis-chat-language-model.ts
|
|
5
|
+
import {
|
|
6
|
+
createEventSourceResponseHandler,
|
|
7
|
+
createJsonResponseHandler,
|
|
8
|
+
generateId,
|
|
9
|
+
postJsonToApi
|
|
10
|
+
} from "@ai-sdk/provider-utils";
|
|
11
|
+
|
|
12
|
+
// src/apertis-error.ts
|
|
13
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
14
|
+
import { z } from "zod";
|
|
15
|
+
var apertisErrorSchema = z.object({
|
|
16
|
+
error: z.object({
|
|
17
|
+
message: z.string(),
|
|
18
|
+
type: z.string().optional(),
|
|
19
|
+
code: z.string().nullable().optional(),
|
|
20
|
+
param: z.string().nullable().optional()
|
|
21
|
+
})
|
|
22
|
+
});
|
|
23
|
+
var apertisFailedResponseHandler = createJsonErrorResponseHandler({
|
|
24
|
+
errorSchema: apertisErrorSchema,
|
|
25
|
+
errorToMessage: (error) => error.error.message
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
// src/schemas/chat-response.ts
|
|
29
|
+
import { z as z2 } from "zod";
|
|
30
|
+
var openAIChatResponseSchema = z2.object({
|
|
31
|
+
id: z2.string(),
|
|
32
|
+
object: z2.literal("chat.completion").optional(),
|
|
33
|
+
created: z2.number().optional(),
|
|
34
|
+
model: z2.string().optional(),
|
|
35
|
+
choices: z2.array(
|
|
36
|
+
z2.object({
|
|
37
|
+
index: z2.number(),
|
|
38
|
+
message: z2.object({
|
|
39
|
+
role: z2.literal("assistant"),
|
|
40
|
+
content: z2.string().nullable(),
|
|
41
|
+
tool_calls: z2.array(
|
|
42
|
+
z2.object({
|
|
43
|
+
id: z2.string(),
|
|
44
|
+
type: z2.literal("function"),
|
|
45
|
+
function: z2.object({
|
|
46
|
+
name: z2.string(),
|
|
47
|
+
arguments: z2.string()
|
|
48
|
+
})
|
|
49
|
+
})
|
|
50
|
+
).optional()
|
|
51
|
+
}),
|
|
52
|
+
finish_reason: z2.string().nullable(),
|
|
53
|
+
logprobs: z2.any().nullable().optional()
|
|
54
|
+
})
|
|
55
|
+
),
|
|
56
|
+
usage: z2.object({
|
|
57
|
+
prompt_tokens: z2.number(),
|
|
58
|
+
completion_tokens: z2.number(),
|
|
59
|
+
total_tokens: z2.number().optional()
|
|
60
|
+
}).optional()
|
|
61
|
+
});
|
|
62
|
+
var openAIChatChunkSchema = z2.object({
|
|
63
|
+
id: z2.string(),
|
|
64
|
+
object: z2.literal("chat.completion.chunk").optional(),
|
|
65
|
+
created: z2.number().optional(),
|
|
66
|
+
model: z2.string().optional(),
|
|
67
|
+
choices: z2.array(
|
|
68
|
+
z2.object({
|
|
69
|
+
index: z2.number(),
|
|
70
|
+
delta: z2.object({
|
|
71
|
+
role: z2.literal("assistant").optional(),
|
|
72
|
+
content: z2.string().nullable().optional(),
|
|
73
|
+
tool_calls: z2.array(
|
|
74
|
+
z2.object({
|
|
75
|
+
index: z2.number(),
|
|
76
|
+
id: z2.string().optional(),
|
|
77
|
+
type: z2.literal("function").optional(),
|
|
78
|
+
function: z2.object({
|
|
79
|
+
name: z2.string().optional(),
|
|
80
|
+
arguments: z2.string().optional()
|
|
81
|
+
}).optional()
|
|
82
|
+
})
|
|
83
|
+
).optional()
|
|
84
|
+
}),
|
|
85
|
+
finish_reason: z2.string().nullable().optional()
|
|
86
|
+
})
|
|
87
|
+
),
|
|
88
|
+
usage: z2.object({
|
|
89
|
+
prompt_tokens: z2.number(),
|
|
90
|
+
completion_tokens: z2.number()
|
|
91
|
+
}).nullish()
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
// src/utils/map-finish-reason.ts
|
|
95
|
+
function mapApertisFinishReason(finishReason) {
|
|
96
|
+
switch (finishReason) {
|
|
97
|
+
case "stop":
|
|
98
|
+
return "stop";
|
|
99
|
+
case "length":
|
|
100
|
+
return "length";
|
|
101
|
+
case "tool_calls":
|
|
102
|
+
return "tool-calls";
|
|
103
|
+
case "content_filter":
|
|
104
|
+
return "content-filter";
|
|
105
|
+
default:
|
|
106
|
+
return "unknown";
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// src/utils/convert-to-openai-messages.ts
|
|
111
|
+
function convertToOpenAIMessages(prompt) {
|
|
112
|
+
const messages = [];
|
|
113
|
+
for (const message of prompt) {
|
|
114
|
+
switch (message.role) {
|
|
115
|
+
case "system":
|
|
116
|
+
messages.push({ role: "system", content: message.content });
|
|
117
|
+
break;
|
|
118
|
+
case "user":
|
|
119
|
+
messages.push({
|
|
120
|
+
role: "user",
|
|
121
|
+
content: message.content.map((part) => {
|
|
122
|
+
switch (part.type) {
|
|
123
|
+
case "text":
|
|
124
|
+
return { type: "text", text: part.text };
|
|
125
|
+
case "image":
|
|
126
|
+
return {
|
|
127
|
+
type: "image_url",
|
|
128
|
+
image_url: {
|
|
129
|
+
url: part.image instanceof URL ? part.image.toString() : `data:${part.mimeType ?? "image/png"};base64,${Buffer.from(part.image).toString("base64")}`
|
|
130
|
+
}
|
|
131
|
+
};
|
|
132
|
+
default:
|
|
133
|
+
throw new Error(
|
|
134
|
+
`Unsupported user content part type: ${part.type}`
|
|
135
|
+
);
|
|
136
|
+
}
|
|
137
|
+
})
|
|
138
|
+
});
|
|
139
|
+
break;
|
|
140
|
+
case "assistant": {
|
|
141
|
+
const textContent = message.content.filter((p) => p.type === "text").map((p) => p.text).join("");
|
|
142
|
+
const toolCalls = message.content.filter(
|
|
143
|
+
(p) => p.type === "tool-call"
|
|
144
|
+
).map((tc) => {
|
|
145
|
+
let arguments_str = "{}";
|
|
146
|
+
try {
|
|
147
|
+
arguments_str = JSON.stringify(tc.args);
|
|
148
|
+
} catch {
|
|
149
|
+
arguments_str = "{}";
|
|
150
|
+
}
|
|
151
|
+
return {
|
|
152
|
+
id: tc.toolCallId,
|
|
153
|
+
type: "function",
|
|
154
|
+
function: { name: tc.toolName, arguments: arguments_str }
|
|
155
|
+
};
|
|
156
|
+
});
|
|
157
|
+
messages.push({
|
|
158
|
+
role: "assistant",
|
|
159
|
+
content: textContent || null,
|
|
160
|
+
...toolCalls.length > 0 ? { tool_calls: toolCalls } : {}
|
|
161
|
+
});
|
|
162
|
+
break;
|
|
163
|
+
}
|
|
164
|
+
case "tool":
|
|
165
|
+
for (const result of message.content) {
|
|
166
|
+
let content = "{}";
|
|
167
|
+
if (typeof result.result === "string") {
|
|
168
|
+
content = result.result;
|
|
169
|
+
} else {
|
|
170
|
+
try {
|
|
171
|
+
content = JSON.stringify(result.result);
|
|
172
|
+
} catch {
|
|
173
|
+
content = "{}";
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
messages.push({
|
|
177
|
+
role: "tool",
|
|
178
|
+
tool_call_id: result.toolCallId,
|
|
179
|
+
content
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
break;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
return messages;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// src/utils/convert-to-openai-tools.ts
|
|
189
|
+
function convertToOpenAITools(tools) {
|
|
190
|
+
if (!tools || tools.length === 0) return void 0;
|
|
191
|
+
return tools.map((tool) => ({
|
|
192
|
+
type: "function",
|
|
193
|
+
function: {
|
|
194
|
+
name: tool.name,
|
|
195
|
+
description: tool.description,
|
|
196
|
+
parameters: tool.parameters
|
|
197
|
+
}
|
|
198
|
+
}));
|
|
199
|
+
}
|
|
200
|
+
function convertToOpenAIToolChoice(toolChoice) {
|
|
201
|
+
if (!toolChoice) return void 0;
|
|
202
|
+
switch (toolChoice.type) {
|
|
203
|
+
case "none":
|
|
204
|
+
return "none";
|
|
205
|
+
case "auto":
|
|
206
|
+
return "auto";
|
|
207
|
+
case "required":
|
|
208
|
+
return "required";
|
|
209
|
+
case "tool":
|
|
210
|
+
return { type: "function", function: { name: toolChoice.toolName } };
|
|
211
|
+
default:
|
|
212
|
+
return void 0;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// src/apertis-chat-language-model.ts
|
|
217
|
+
var ApertisChatLanguageModel = class {
|
|
218
|
+
constructor(modelId, settings, config) {
|
|
219
|
+
this.modelId = modelId;
|
|
220
|
+
this.settings = settings;
|
|
221
|
+
this.config = config;
|
|
222
|
+
}
|
|
223
|
+
specificationVersion = "v1";
|
|
224
|
+
defaultObjectGenerationMode = "json";
|
|
225
|
+
supportsImageUrls = true;
|
|
226
|
+
get provider() {
|
|
227
|
+
return this.config.provider;
|
|
228
|
+
}
|
|
229
|
+
get supportsStructuredOutputs() {
|
|
230
|
+
return true;
|
|
231
|
+
}
|
|
232
|
+
async doGenerate(options) {
|
|
233
|
+
const body = this.buildRequestBody(options, false);
|
|
234
|
+
const { value: response } = await postJsonToApi({
|
|
235
|
+
url: `${this.config.baseURL}/chat/completions`,
|
|
236
|
+
headers: this.config.headers(),
|
|
237
|
+
body,
|
|
238
|
+
failedResponseHandler: apertisFailedResponseHandler,
|
|
239
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
240
|
+
openAIChatResponseSchema
|
|
241
|
+
),
|
|
242
|
+
fetch: this.config.fetch,
|
|
243
|
+
abortSignal: options.abortSignal
|
|
244
|
+
});
|
|
245
|
+
const choice = response.choices[0];
|
|
246
|
+
return {
|
|
247
|
+
text: choice.message.content ?? void 0,
|
|
248
|
+
toolCalls: choice.message.tool_calls?.map((tc) => ({
|
|
249
|
+
toolCallType: "function",
|
|
250
|
+
toolCallId: tc.id,
|
|
251
|
+
toolName: tc.function.name,
|
|
252
|
+
args: tc.function.arguments
|
|
253
|
+
})),
|
|
254
|
+
finishReason: mapApertisFinishReason(choice.finish_reason),
|
|
255
|
+
usage: {
|
|
256
|
+
promptTokens: response.usage?.prompt_tokens ?? 0,
|
|
257
|
+
completionTokens: response.usage?.completion_tokens ?? 0
|
|
258
|
+
},
|
|
259
|
+
rawCall: { rawPrompt: options.prompt, rawSettings: body }
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
async doStream(options) {
|
|
263
|
+
const body = this.buildRequestBody(options, true);
|
|
264
|
+
const { value: response } = await postJsonToApi({
|
|
265
|
+
url: `${this.config.baseURL}/chat/completions`,
|
|
266
|
+
headers: this.config.headers(),
|
|
267
|
+
body,
|
|
268
|
+
failedResponseHandler: apertisFailedResponseHandler,
|
|
269
|
+
successfulResponseHandler: createEventSourceResponseHandler(
|
|
270
|
+
openAIChatChunkSchema
|
|
271
|
+
),
|
|
272
|
+
fetch: this.config.fetch,
|
|
273
|
+
abortSignal: options.abortSignal
|
|
274
|
+
});
|
|
275
|
+
const toolCallBuffers = /* @__PURE__ */ new Map();
|
|
276
|
+
const transformStream = new TransformStream({
|
|
277
|
+
transform(parseResult, controller) {
|
|
278
|
+
if (!parseResult.success) {
|
|
279
|
+
return;
|
|
280
|
+
}
|
|
281
|
+
const chunk = parseResult.value;
|
|
282
|
+
const choice = chunk.choices[0];
|
|
283
|
+
if (!choice) return;
|
|
284
|
+
if (choice.delta.content) {
|
|
285
|
+
controller.enqueue({
|
|
286
|
+
type: "text-delta",
|
|
287
|
+
textDelta: choice.delta.content
|
|
288
|
+
});
|
|
289
|
+
}
|
|
290
|
+
if (choice.delta.tool_calls) {
|
|
291
|
+
for (const tc of choice.delta.tool_calls) {
|
|
292
|
+
let buffer = toolCallBuffers.get(tc.index);
|
|
293
|
+
if (!buffer) {
|
|
294
|
+
buffer = { id: tc.id ?? generateId(), name: "", arguments: "" };
|
|
295
|
+
toolCallBuffers.set(tc.index, buffer);
|
|
296
|
+
}
|
|
297
|
+
if (tc.id) buffer.id = tc.id;
|
|
298
|
+
if (tc.function?.name) buffer.name += tc.function.name;
|
|
299
|
+
if (tc.function?.arguments)
|
|
300
|
+
buffer.arguments += tc.function.arguments;
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
if (choice.finish_reason) {
|
|
304
|
+
for (const [, buffer] of toolCallBuffers) {
|
|
305
|
+
if (buffer.name) {
|
|
306
|
+
controller.enqueue({
|
|
307
|
+
type: "tool-call",
|
|
308
|
+
toolCallType: "function",
|
|
309
|
+
toolCallId: buffer.id,
|
|
310
|
+
toolName: buffer.name,
|
|
311
|
+
args: buffer.arguments
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
toolCallBuffers.clear();
|
|
316
|
+
controller.enqueue({
|
|
317
|
+
type: "finish",
|
|
318
|
+
finishReason: mapApertisFinishReason(choice.finish_reason),
|
|
319
|
+
usage: {
|
|
320
|
+
promptTokens: chunk.usage?.prompt_tokens ?? 0,
|
|
321
|
+
completionTokens: chunk.usage?.completion_tokens ?? 0
|
|
322
|
+
}
|
|
323
|
+
});
|
|
324
|
+
}
|
|
325
|
+
},
|
|
326
|
+
flush(controller) {
|
|
327
|
+
for (const [, buffer] of toolCallBuffers) {
|
|
328
|
+
if (buffer.name) {
|
|
329
|
+
controller.enqueue({
|
|
330
|
+
type: "tool-call",
|
|
331
|
+
toolCallType: "function",
|
|
332
|
+
toolCallId: buffer.id,
|
|
333
|
+
toolName: buffer.name,
|
|
334
|
+
args: buffer.arguments
|
|
335
|
+
});
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
});
|
|
340
|
+
return {
|
|
341
|
+
stream: response.pipeThrough(transformStream),
|
|
342
|
+
rawCall: { rawPrompt: options.prompt, rawSettings: body }
|
|
343
|
+
};
|
|
344
|
+
}
|
|
345
|
+
buildRequestBody(options, stream) {
|
|
346
|
+
const tools = options.mode.type === "regular" ? this.filterFunctionTools(options.mode.tools) : void 0;
|
|
347
|
+
const toolChoice = options.mode.type === "regular" ? options.mode.toolChoice : void 0;
|
|
348
|
+
const responseFormat = options.mode.type === "object-json" ? { type: "json_object" } : void 0;
|
|
349
|
+
const body = {
|
|
350
|
+
model: this.modelId,
|
|
351
|
+
messages: convertToOpenAIMessages(options.prompt),
|
|
352
|
+
stream
|
|
353
|
+
};
|
|
354
|
+
if (stream) body.stream_options = { include_usage: true };
|
|
355
|
+
if (options.temperature !== void 0)
|
|
356
|
+
body.temperature = options.temperature;
|
|
357
|
+
if (options.maxTokens !== void 0) body.max_tokens = options.maxTokens;
|
|
358
|
+
if (options.topP !== void 0) body.top_p = options.topP;
|
|
359
|
+
if (options.frequencyPenalty !== void 0)
|
|
360
|
+
body.frequency_penalty = options.frequencyPenalty;
|
|
361
|
+
if (options.presencePenalty !== void 0)
|
|
362
|
+
body.presence_penalty = options.presencePenalty;
|
|
363
|
+
if (options.stopSequences !== void 0) body.stop = options.stopSequences;
|
|
364
|
+
if (options.seed !== void 0) body.seed = options.seed;
|
|
365
|
+
const convertedTools = convertToOpenAITools(tools);
|
|
366
|
+
if (convertedTools !== void 0) body.tools = convertedTools;
|
|
367
|
+
const convertedToolChoice = convertToOpenAIToolChoice(toolChoice);
|
|
368
|
+
if (convertedToolChoice !== void 0)
|
|
369
|
+
body.tool_choice = convertedToolChoice;
|
|
370
|
+
if (responseFormat !== void 0) body.response_format = responseFormat;
|
|
371
|
+
if (this.settings.user !== void 0) body.user = this.settings.user;
|
|
372
|
+
if (this.settings.logprobs !== void 0)
|
|
373
|
+
body.logprobs = this.settings.logprobs;
|
|
374
|
+
if (this.settings.topLogprobs !== void 0)
|
|
375
|
+
body.top_logprobs = this.settings.topLogprobs;
|
|
376
|
+
return body;
|
|
377
|
+
}
|
|
378
|
+
filterFunctionTools(tools) {
|
|
379
|
+
if (!tools) return void 0;
|
|
380
|
+
return tools.filter(
|
|
381
|
+
(tool) => tool.type === "function"
|
|
382
|
+
);
|
|
383
|
+
}
|
|
384
|
+
};
|
|
385
|
+
|
|
386
|
+
// src/apertis-provider.ts
|
|
387
|
+
function createApertis(options = {}) {
|
|
388
|
+
const baseURL = withoutTrailingSlash(options.baseURL) ?? "https://api.apertis.ai/v1";
|
|
389
|
+
const getHeaders = () => ({
|
|
390
|
+
...options.headers,
|
|
391
|
+
Authorization: `Bearer ${loadApiKey({
|
|
392
|
+
apiKey: options.apiKey,
|
|
393
|
+
environmentVariableName: "APERTIS_API_KEY",
|
|
394
|
+
description: "Apertis API key"
|
|
395
|
+
})}`,
|
|
396
|
+
"Content-Type": "application/json"
|
|
397
|
+
});
|
|
398
|
+
const createChatModel = (modelId, settings = {}) => new ApertisChatLanguageModel(modelId, settings, {
|
|
399
|
+
provider: "apertis.chat",
|
|
400
|
+
baseURL,
|
|
401
|
+
headers: getHeaders,
|
|
402
|
+
fetch: options.fetch
|
|
403
|
+
});
|
|
404
|
+
const provider = Object.assign(
|
|
405
|
+
(modelId, settings) => createChatModel(modelId, settings),
|
|
406
|
+
{
|
|
407
|
+
chat: createChatModel,
|
|
408
|
+
languageModel: createChatModel
|
|
409
|
+
}
|
|
410
|
+
);
|
|
411
|
+
return provider;
|
|
412
|
+
}
|
|
413
|
+
var apertis = createApertis();
|
|
414
|
+
export {
|
|
415
|
+
apertis,
|
|
416
|
+
createApertis
|
|
417
|
+
};
|
|
418
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/apertis-provider.ts","../src/apertis-chat-language-model.ts","../src/apertis-error.ts","../src/schemas/chat-response.ts","../src/utils/map-finish-reason.ts","../src/utils/convert-to-openai-messages.ts","../src/utils/convert-to-openai-tools.ts"],"sourcesContent":["import type { LanguageModelV1 } from \"@ai-sdk/provider\";\nimport { loadApiKey, withoutTrailingSlash } from \"@ai-sdk/provider-utils\";\nimport { ApertisChatLanguageModel } from \"./apertis-chat-language-model\";\nimport type {\n ApertisChatSettings,\n ApertisModelId,\n ApertisProviderSettings,\n} from \"./apertis-chat-settings\";\n\nexport interface ApertisProvider {\n /**\n * Creates a chat model for text generation.\n */\n (modelId: ApertisModelId, settings?: ApertisChatSettings): LanguageModelV1;\n\n /**\n * Creates a chat model for text generation.\n */\n chat(\n modelId: ApertisModelId,\n settings?: ApertisChatSettings,\n ): LanguageModelV1;\n\n /**\n * Creates a chat model for text generation (alias for languageModel).\n */\n languageModel(\n modelId: ApertisModelId,\n settings?: ApertisChatSettings,\n ): LanguageModelV1;\n}\n\nexport function createApertis(\n options: ApertisProviderSettings = {},\n): ApertisProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? \"https://api.apertis.ai/v1\";\n\n const getHeaders = () => ({\n ...options.headers,\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: \"APERTIS_API_KEY\",\n description: \"Apertis API key\",\n })}`,\n \"Content-Type\": \"application/json\",\n });\n\n const createChatModel = (\n modelId: ApertisModelId,\n settings: ApertisChatSettings = {},\n ): LanguageModelV1 =>\n new ApertisChatLanguageModel(modelId, settings, {\n provider: \"apertis.chat\",\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider: ApertisProvider = Object.assign(\n (modelId: ApertisModelId, settings?: ApertisChatSettings) =>\n createChatModel(modelId, settings),\n {\n chat: createChatModel,\n languageModel: createChatModel,\n },\n );\n\n return provider;\n}\n\n/**\n * Default Apertis provider instance.\n */\nexport const apertis = createApertis();\n","import type {\n LanguageModelV1,\n LanguageModelV1CallOptions,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1FunctionTool,\n LanguageModelV1StreamPart,\n} from \"@ai-sdk/provider\";\nimport {\n type ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n postJsonToApi,\n} from \"@ai-sdk/provider-utils\";\nimport type { ApertisChatSettings } from \"./apertis-chat-settings\";\nimport { apertisFailedResponseHandler } from \"./apertis-error\";\nimport {\n type OpenAIChatChunk,\n openAIChatChunkSchema,\n openAIChatResponseSchema,\n} from \"./schemas/chat-response\";\nimport {\n convertToOpenAIMessages,\n convertToOpenAIToolChoice,\n convertToOpenAITools,\n mapApertisFinishReason,\n} from \"./utils\";\n\nexport interface ApertisChatConfig {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string>;\n fetch?: typeof fetch;\n}\n\nexport class ApertisChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = \"v1\";\n readonly defaultObjectGenerationMode = \"json\";\n readonly supportsImageUrls = true;\n\n constructor(\n readonly modelId: string,\n private readonly settings: ApertisChatSettings,\n private readonly config: ApertisChatConfig,\n ) {}\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportsStructuredOutputs(): boolean {\n return true;\n }\n\n async doGenerate(options: LanguageModelV1CallOptions): Promise<{\n text?: string;\n toolCalls?: Array<{\n toolCallType: \"function\";\n toolCallId: string;\n toolName: string;\n args: string;\n }>;\n finishReason: LanguageModelV1FinishReason;\n usage: { promptTokens: number; completionTokens: number };\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const body = this.buildRequestBody(options, false);\n\n const { value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body,\n failedResponseHandler: apertisFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAIChatResponseSchema,\n ),\n fetch: this.config.fetch,\n abortSignal: options.abortSignal,\n });\n\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map((tc) => ({\n toolCallType: \"function\" as const,\n toolCallId: tc.id,\n toolName: tc.function.name,\n args: tc.function.arguments,\n })),\n finishReason: mapApertisFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? 0,\n completionTokens: response.usage?.completion_tokens ?? 0,\n },\n rawCall: { rawPrompt: options.prompt, rawSettings: body },\n };\n }\n\n async doStream(options: LanguageModelV1CallOptions): Promise<{\n stream: ReadableStream<LanguageModelV1StreamPart>;\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const body = this.buildRequestBody(options, true);\n\n const { value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body,\n failedResponseHandler: apertisFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openAIChatChunkSchema,\n ),\n fetch: this.config.fetch,\n abortSignal: options.abortSignal,\n });\n\n const toolCallBuffers: Map<\n number,\n { id: string; name: string; arguments: string }\n > = new Map();\n\n const transformStream = new TransformStream<\n ParseResult<OpenAIChatChunk>,\n LanguageModelV1StreamPart\n >({\n transform(parseResult, controller) {\n // Skip failed parse results\n if (!parseResult.success) {\n return;\n }\n\n const chunk = parseResult.value;\n const choice = chunk.choices[0];\n\n if (!choice) return;\n\n // Handle text delta\n if (choice.delta.content) {\n controller.enqueue({\n type: \"text-delta\",\n textDelta: choice.delta.content,\n });\n }\n\n // Handle tool calls\n if (choice.delta.tool_calls) {\n for (const tc of choice.delta.tool_calls) {\n let buffer = toolCallBuffers.get(tc.index);\n\n if (!buffer) {\n buffer = { id: tc.id ?? generateId(), name: \"\", arguments: \"\" };\n toolCallBuffers.set(tc.index, buffer);\n }\n\n if (tc.id) buffer.id = tc.id;\n if (tc.function?.name) buffer.name += tc.function.name;\n if (tc.function?.arguments)\n buffer.arguments += tc.function.arguments;\n }\n }\n\n // Handle finish\n if (choice.finish_reason) {\n // Emit completed tool calls (only those with valid names)\n for (const [, buffer] of toolCallBuffers) {\n if (buffer.name) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: buffer.id,\n toolName: buffer.name,\n args: buffer.arguments,\n });\n }\n }\n // Clear buffers after emitting\n toolCallBuffers.clear();\n\n controller.enqueue({\n type: \"finish\",\n finishReason: mapApertisFinishReason(choice.finish_reason),\n usage: {\n promptTokens: chunk.usage?.prompt_tokens ?? 0,\n completionTokens: chunk.usage?.completion_tokens ?? 0,\n },\n });\n }\n },\n flush(controller) {\n // Emit any remaining buffered tool calls when stream closes early\n for (const [, buffer] of toolCallBuffers) {\n if (buffer.name) {\n controller.enqueue({\n type: \"tool-call\",\n toolCallType: \"function\",\n toolCallId: buffer.id,\n toolName: buffer.name,\n args: buffer.arguments,\n });\n }\n }\n },\n });\n\n return {\n stream: response.pipeThrough(transformStream),\n rawCall: { rawPrompt: options.prompt, rawSettings: body },\n };\n }\n\n private buildRequestBody(\n options: LanguageModelV1CallOptions,\n stream: boolean,\n ) {\n // Extract tools and toolChoice from mode if available\n const tools =\n options.mode.type === \"regular\"\n ? this.filterFunctionTools(options.mode.tools)\n : undefined;\n const toolChoice =\n options.mode.type === \"regular\" ? options.mode.toolChoice : undefined;\n\n // Determine response format based on mode\n const responseFormat =\n options.mode.type === \"object-json\"\n ? { type: \"json_object\" as const }\n : undefined;\n\n const body: Record<string, unknown> = {\n model: this.modelId,\n messages: convertToOpenAIMessages(options.prompt),\n stream,\n };\n\n // Only add defined optional fields to avoid sending undefined to API\n if (stream) body.stream_options = { include_usage: true };\n if (options.temperature !== undefined)\n body.temperature = options.temperature;\n if (options.maxTokens !== undefined) body.max_tokens = options.maxTokens;\n if (options.topP !== undefined) body.top_p = options.topP;\n if (options.frequencyPenalty !== undefined)\n body.frequency_penalty = options.frequencyPenalty;\n if (options.presencePenalty !== undefined)\n body.presence_penalty = options.presencePenalty;\n if (options.stopSequences !== undefined) body.stop = options.stopSequences;\n if (options.seed !== undefined) body.seed = options.seed;\n\n const convertedTools = convertToOpenAITools(tools);\n if (convertedTools !== undefined) body.tools = convertedTools;\n\n const convertedToolChoice = convertToOpenAIToolChoice(toolChoice);\n if (convertedToolChoice !== undefined)\n body.tool_choice = convertedToolChoice;\n\n if (responseFormat !== undefined) body.response_format = responseFormat;\n if (this.settings.user !== undefined) body.user = this.settings.user;\n if (this.settings.logprobs !== undefined)\n body.logprobs = this.settings.logprobs;\n if (this.settings.topLogprobs !== undefined)\n body.top_logprobs = this.settings.topLogprobs;\n\n return body;\n }\n\n private filterFunctionTools(\n tools: Array<LanguageModelV1FunctionTool | { type: string }> | undefined,\n ): LanguageModelV1FunctionTool[] | undefined {\n if (!tools) return undefined;\n return tools.filter(\n (tool): tool is LanguageModelV1FunctionTool => tool.type === \"function\",\n );\n }\n}\n","import { createJsonErrorResponseHandler } from \"@ai-sdk/provider-utils\";\nimport { z } from \"zod\";\n\nconst apertisErrorSchema = z.object({\n error: z.object({\n message: z.string(),\n type: z.string().optional(),\n code: z.string().nullable().optional(),\n param: z.string().nullable().optional(),\n }),\n});\n\nexport type ApertisErrorData = z.infer<typeof apertisErrorSchema>;\n\nexport const apertisFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: apertisErrorSchema,\n errorToMessage: (error) => error.error.message,\n});\n","import { z } from \"zod\";\n\nexport const openAIChatResponseSchema = z.object({\n id: z.string(),\n object: z.literal(\"chat.completion\").optional(),\n created: z.number().optional(),\n model: z.string().optional(),\n choices: z.array(\n z.object({\n index: z.number(),\n message: z.object({\n role: z.literal(\"assistant\"),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n type: z.literal(\"function\"),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional(),\n }),\n finish_reason: z.string().nullable(),\n logprobs: z.any().nullable().optional(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number().optional(),\n })\n .optional(),\n});\n\nexport type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;\n\nexport const openAIChatChunkSchema = z.object({\n id: z.string(),\n object: z.literal(\"chat.completion.chunk\").optional(),\n created: z.number().optional(),\n model: z.string().optional(),\n choices: z.array(\n z.object({\n index: z.number(),\n delta: z.object({\n role: z.literal(\"assistant\").optional(),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().optional(),\n type: z.literal(\"function\").optional(),\n function: z\n .object({\n name: z.string().optional(),\n arguments: z.string().optional(),\n })\n .optional(),\n }),\n )\n .optional(),\n }),\n finish_reason: z.string().nullable().optional(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\nexport type OpenAIChatChunk = z.infer<typeof openAIChatChunkSchema>;\n","import type { LanguageModelV1FinishReason } from \"@ai-sdk/provider\";\n\nexport function mapApertisFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case \"stop\":\n return \"stop\";\n case \"length\":\n return \"length\";\n case \"tool_calls\":\n return \"tool-calls\";\n case \"content_filter\":\n return \"content-filter\";\n default:\n return \"unknown\";\n }\n}\n","import type { LanguageModelV1Prompt } from \"@ai-sdk/provider\";\n\nexport type OpenAIMessage =\n | { role: \"system\"; content: string }\n | { role: \"user\"; content: string | OpenAIContentPart[] }\n | { role: \"assistant\"; content: string | null; tool_calls?: OpenAIToolCall[] }\n | { role: \"tool\"; tool_call_id: string; content: string };\n\nexport type OpenAIContentPart =\n | { type: \"text\"; text: string }\n | {\n type: \"image_url\";\n image_url: { url: string; detail?: \"auto\" | \"low\" | \"high\" };\n };\n\nexport type OpenAIToolCall = {\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n};\n\nexport function convertToOpenAIMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAIMessage[] {\n const messages: OpenAIMessage[] = [];\n\n for (const message of prompt) {\n switch (message.role) {\n case \"system\":\n messages.push({ role: \"system\", content: message.content });\n break;\n\n case \"user\":\n messages.push({\n role: \"user\",\n content: message.content.map((part): OpenAIContentPart => {\n switch (part.type) {\n case \"text\":\n return { type: \"text\", text: part.text };\n case \"image\":\n return {\n type: \"image_url\",\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${part.mimeType ?? \"image/png\"};base64,${Buffer.from(part.image).toString(\"base64\")}`,\n },\n };\n default:\n throw new Error(\n `Unsupported user content part type: ${(part as { type: string }).type}`,\n );\n }\n }),\n });\n break;\n\n case \"assistant\": {\n const textContent = message.content\n .filter((p): p is { type: \"text\"; text: string } => p.type === \"text\")\n .map((p) => p.text)\n .join(\"\");\n\n const toolCalls = message.content\n .filter(\n (\n p,\n ): p is {\n type: \"tool-call\";\n toolCallId: string;\n toolName: string;\n args: unknown;\n } => p.type === \"tool-call\",\n )\n .map((tc) => {\n let arguments_str = \"{}\";\n try {\n arguments_str = JSON.stringify(tc.args);\n } catch {\n arguments_str = \"{}\";\n }\n return {\n id: tc.toolCallId,\n type: \"function\" as const,\n function: { name: tc.toolName, arguments: arguments_str },\n };\n });\n\n messages.push({\n role: \"assistant\",\n content: textContent || null,\n ...(toolCalls.length > 0 ? { tool_calls: toolCalls } : {}),\n });\n break;\n }\n\n case \"tool\":\n for (const result of message.content) {\n let content = \"{}\";\n if (typeof result.result === \"string\") {\n content = result.result;\n } else {\n try {\n content = JSON.stringify(result.result);\n } catch {\n content = \"{}\";\n }\n }\n messages.push({\n role: \"tool\",\n tool_call_id: result.toolCallId,\n content,\n });\n }\n break;\n }\n }\n\n return messages;\n}\n","import type {\n LanguageModelV1FunctionTool,\n LanguageModelV1ToolChoice,\n} from \"@ai-sdk/provider\";\n\nexport type OpenAITool = {\n type: \"function\";\n function: {\n name: string;\n description?: string;\n parameters: Record<string, unknown>;\n };\n};\n\nexport type OpenAIToolChoice =\n | \"none\"\n | \"auto\"\n | \"required\"\n | { type: \"function\"; function: { name: string } };\n\nexport function convertToOpenAITools(\n tools: LanguageModelV1FunctionTool[] | undefined,\n): OpenAITool[] | undefined {\n if (!tools || tools.length === 0) return undefined;\n\n return tools.map((tool) => ({\n type: \"function\" as const,\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters as Record<string, unknown>,\n },\n }));\n}\n\nexport function convertToOpenAIToolChoice(\n toolChoice: LanguageModelV1ToolChoice | undefined,\n): OpenAIToolChoice | undefined {\n if (!toolChoice) return undefined;\n\n switch (toolChoice.type) {\n case \"none\":\n return \"none\";\n case \"auto\":\n return \"auto\";\n case \"required\":\n return \"required\";\n case \"tool\":\n return { type: \"function\", function: { name: toolChoice.toolName } };\n default:\n return undefined;\n }\n}\n"],"mappings":";AACA,SAAS,YAAY,4BAA4B;;;ACOjD;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACdP,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,qBAAqB,EAAE,OAAO;AAAA,EAClC,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA,IAClB,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,MAAM,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IACrC,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EACxC,CAAC;AACH,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,CAAC,UAAU,MAAM,MAAM;AACzC,CAAC;;;ACjBD,SAAS,KAAAA,UAAS;AAEX,IAAM,2BAA2BA,GAAE,OAAO;AAAA,EAC/C,IAAIA,GAAE,OAAO;AAAA,EACb,QAAQA,GAAE,QAAQ,iBAAiB,EAAE,SAAS;AAAA,EAC9C,SAASA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,MAChB,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAASA,GAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,SAAS;AAAA,MACnC,UAAUA,GAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,IACxC,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,IAC5B,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA,EACpC,CAAC,EACA,SAAS;AACd,CAAC;AAIM,IAAM,wBAAwBA,GAAE,OAAO;AAAA,EAC5C,IAAIA,GAAE,OAAO;AAAA,EACb,QAAQA,GAAE,QAAQ,uBAAuB,EAAE,SAAS;AAAA,EACpD,SAASA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,MAChB,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,QAAQ,WAAW,EAAE,SAAS;AAAA,QACtC,SAASA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,OAAOA,GAAE,OAAO;AAAA,YAChB,IAAIA,GAAE,OAAO,EAAE,SAAS;AAAA,YACxB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,YACrC,UAAUA,GACP,OAAO;AAAA,cACN,MAAMA,GAAE,OAAO,EAAE,SAAS;AAAA,cAC1B,WAAWA,GAAE,OAAO,EAAE,SAAS;AAAA,YACjC,CAAC,EACA,SAAS;AAAA,UACd,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AC3EM,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ACIO,SAAS,wBACd,QACiB;AACjB,QAAM,WAA4B,CAAC;AAEnC,aAAW,WAAW,QAAQ;AAC5B,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK;AACH,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,QAAQ,QAAQ,CAAC;AAC1D;AAAA,MAEF,KAAK;AACH,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,QAAQ,IAAI,CAAC,SAA4B;AACxD,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK;AACH,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC,KAAK;AACH,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,QAAQ,KAAK,YAAY,WAAW,WAAW,OAAO,KAAK,KAAK,KAAK,EAAE,SAAS,QAAQ,CAAC;AAAA,kBACjG;AAAA,gBACF;AAAA,cACF;AACE,sBAAM,IAAI;AAAA,kBACR,uCAAwC,KAA0B,IAAI;AAAA,gBACxE;AAAA,YACJ;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MAEF,KAAK,aAAa;AAChB,cAAM,cAAc,QAAQ,QACzB,OAAO,CAAC,MAA2C,EAAE,SAAS,MAAM,EACpE,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,KAAK,EAAE;AAEV,cAAM,YAAY,QAAQ,QACvB;AAAA,UACC,CACE,MAMG,EAAE,SAAS;AAAA,QAClB,EACC,IAAI,CAAC,OAAO;AACX,cAAI,gBAAgB;AACpB,cAAI;AACF,4BAAgB,KAAK,UAAU,GAAG,IAAI;AAAA,UACxC,QAAQ;AACN,4BAAgB;AAAA,UAClB;AACA,iBAAO;AAAA,YACL,IAAI,GAAG;AAAA,YACP,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,GAAG,UAAU,WAAW,cAAc;AAAA,UAC1D;AAAA,QACF,CAAC;AAEH,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,eAAe;AAAA,UACxB,GAAI,UAAU,SAAS,IAAI,EAAE,YAAY,UAAU,IAAI,CAAC;AAAA,QAC1D,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK;AACH,mBAAW,UAAU,QAAQ,SAAS;AACpC,cAAI,UAAU;AACd,cAAI,OAAO,OAAO,WAAW,UAAU;AACrC,sBAAU,OAAO;AAAA,UACnB,OAAO;AACL,gBAAI;AACF,wBAAU,KAAK,UAAU,OAAO,MAAM;AAAA,YACxC,QAAQ;AACN,wBAAU;AAAA,YACZ;AAAA,UACF;AACA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AACA;AAAA,IACJ;AAAA,EACF;AAEA,SAAO;AACT;;;ACpGO,SAAS,qBACd,OAC0B;AAC1B,MAAI,CAAC,SAAS,MAAM,WAAW,EAAG,QAAO;AAEzC,SAAO,MAAM,IAAI,CAAC,UAAU;AAAA,IAC1B,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,EAAE;AACJ;AAEO,SAAS,0BACd,YAC8B;AAC9B,MAAI,CAAC,WAAY,QAAO;AAExB,UAAQ,WAAW,MAAM;AAAA,IACvB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IACrE;AACE,aAAO;AAAA,EACX;AACF;;;ALhBO,IAAM,2BAAN,MAA0D;AAAA,EAK/D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAAA,EAChB;AAAA,EARM,uBAAuB;AAAA,EACvB,8BAA8B;AAAA,EAC9B,oBAAoB;AAAA,EAQ7B,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,4BAAqC;AACvC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,WAAW,SAYd;AACD,UAAM,OAAO,KAAK,iBAAiB,SAAS,KAAK;AAEjD,UAAM,EAAE,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC9C,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,MACnB,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO,QAAQ,WAAW;AAAA,MAChC,WAAW,OAAO,QAAQ,YAAY,IAAI,CAAC,QAAQ;AAAA,QACjD,cAAc;AAAA,QACd,YAAY,GAAG;AAAA,QACf,UAAU,GAAG,SAAS;AAAA,QACtB,MAAM,GAAG,SAAS;AAAA,MACpB,EAAE;AAAA,MACF,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,OAAO,iBAAiB;AAAA,QAC/C,kBAAkB,SAAS,OAAO,qBAAqB;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,QAAQ,QAAQ,aAAa,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,SAIZ;AACD,UAAM,OAAO,KAAK,iBAAiB,SAAS,IAAI;AAEhD,UAAM,EAAE,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC9C,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,MACnB,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,kBAGF,oBAAI,IAAI;AAEZ,UAAM,kBAAkB,IAAI,gBAG1B;AAAA,MACA,UAAU,aAAa,YAAY;AAEjC,YAAI,CAAC,YAAY,SAAS;AACxB;AAAA,QACF;AAEA,cAAM,QAAQ,YAAY;AAC1B,cAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,YAAI,CAAC,OAAQ;AAGb,YAAI,OAAO,MAAM,SAAS;AACxB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,WAAW,OAAO,MAAM;AAAA,UAC1B,CAAC;AAAA,QACH;AAGA,YAAI,OAAO,MAAM,YAAY;AAC3B,qBAAW,MAAM,OAAO,MAAM,YAAY;AACxC,gBAAI,SAAS,gBAAgB,IAAI,GAAG,KAAK;AAEzC,gBAAI,CAAC,QAAQ;AACX,uBAAS,EAAE,IAAI,GAAG,MAAM,WAAW,GAAG,MAAM,IAAI,WAAW,GAAG;AAC9D,8BAAgB,IAAI,GAAG,OAAO,MAAM;AAAA,YACtC;AAEA,gBAAI,GAAG,GAAI,QAAO,KAAK,GAAG;AAC1B,gBAAI,GAAG,UAAU,KAAM,QAAO,QAAQ,GAAG,SAAS;AAClD,gBAAI,GAAG,UAAU;AACf,qBAAO,aAAa,GAAG,SAAS;AAAA,UACpC;AAAA,QACF;AAGA,YAAI,OAAO,eAAe;AAExB,qBAAW,CAAC,EAAE,MAAM,KAAK,iBAAiB;AACxC,gBAAI,OAAO,MAAM;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,cAAc;AAAA,gBACd,YAAY,OAAO;AAAA,gBACnB,UAAU,OAAO;AAAA,gBACjB,MAAM,OAAO;AAAA,cACf,CAAC;AAAA,YACH;AAAA,UACF;AAEA,0BAAgB,MAAM;AAEtB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,uBAAuB,OAAO,aAAa;AAAA,YACzD,OAAO;AAAA,cACL,cAAc,MAAM,OAAO,iBAAiB;AAAA,cAC5C,kBAAkB,MAAM,OAAO,qBAAqB;AAAA,YACtD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA,MAAM,YAAY;AAEhB,mBAAW,CAAC,EAAE,MAAM,KAAK,iBAAiB;AACxC,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,YAAY,OAAO;AAAA,cACnB,UAAU,OAAO;AAAA,cACjB,MAAM,OAAO;AAAA,YACf,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,YAAY,eAAe;AAAA,MAC5C,SAAS,EAAE,WAAW,QAAQ,QAAQ,aAAa,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA,EAEQ,iBACN,SACA,QACA;AAEA,UAAM,QACJ,QAAQ,KAAK,SAAS,YAClB,KAAK,oBAAoB,QAAQ,KAAK,KAAK,IAC3C;AACN,UAAM,aACJ,QAAQ,KAAK,SAAS,YAAY,QAAQ,KAAK,aAAa;AAG9D,UAAM,iBACJ,QAAQ,KAAK,SAAS,gBAClB,EAAE,MAAM,cAAuB,IAC/B;AAEN,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ,UAAU,wBAAwB,QAAQ,MAAM;AAAA,MAChD;AAAA,IACF;AAGA,QAAI,OAAQ,MAAK,iBAAiB,EAAE,eAAe,KAAK;AACxD,QAAI,QAAQ,gBAAgB;AAC1B,WAAK,cAAc,QAAQ;AAC7B,QAAI,QAAQ,cAAc,OAAW,MAAK,aAAa,QAAQ;AAC/D,QAAI,QAAQ,SAAS,OAAW,MAAK,QAAQ,QAAQ;AACrD,QAAI,QAAQ,qBAAqB;AAC/B,WAAK,oBAAoB,QAAQ;AACnC,QAAI,QAAQ,oBAAoB;AAC9B,WAAK,mBAAmB,QAAQ;AAClC,QAAI,QAAQ,kBAAkB,OAAW,MAAK,OAAO,QAAQ;AAC7D,QAAI,QAAQ,SAAS,OAAW,MAAK,OAAO,QAAQ;AAEpD,UAAM,iBAAiB,qBAAqB,KAAK;AACjD,QAAI,mBAAmB,OAAW,MAAK,QAAQ;AAE/C,UAAM,sBAAsB,0BAA0B,UAAU;AAChE,QAAI,wBAAwB;AAC1B,WAAK,cAAc;AAErB,QAAI,mBAAmB,OAAW,MAAK,kBAAkB;AACzD,QAAI,KAAK,SAAS,SAAS,OAAW,MAAK,OAAO,KAAK,SAAS;AAChE,QAAI,KAAK,SAAS,aAAa;AAC7B,WAAK,WAAW,KAAK,SAAS;AAChC,QAAI,KAAK,SAAS,gBAAgB;AAChC,WAAK,eAAe,KAAK,SAAS;AAEpC,WAAO;AAAA,EACT;AAAA,EAEQ,oBACN,OAC2C;AAC3C,QAAI,CAAC,MAAO,QAAO;AACnB,WAAO,MAAM;AAAA,MACX,CAAC,SAA8C,KAAK,SAAS;AAAA,IAC/D;AAAA,EACF;AACF;;;ADpPO,SAAS,cACd,UAAmC,CAAC,GACnB;AACjB,QAAM,UACJ,qBAAqB,QAAQ,OAAO,KAAK;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,GAAG,QAAQ;AAAA,IACX,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,gBAAgB;AAAA,EAClB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAEjC,IAAI,yBAAyB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAA4B,OAAO;AAAA,IACvC,CAAC,SAAyB,aACxB,gBAAgB,SAAS,QAAQ;AAAA,IACnC;AAAA,MACE,MAAM;AAAA,MACN,eAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@apertis/ai-sdk-provider",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Apertis AI provider for Vercel AI SDK",
|
|
5
|
+
"license": "Apache-2.0",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "https://github.com/theQuert/apertis-sdk"
|
|
9
|
+
},
|
|
10
|
+
"homepage": "https://github.com/theQuert/apertis-sdk#readme",
|
|
11
|
+
"type": "module",
|
|
12
|
+
"engines": {
|
|
13
|
+
"node": ">=18.0.0"
|
|
14
|
+
},
|
|
15
|
+
"main": "./dist/index.js",
|
|
16
|
+
"module": "./dist/index.mjs",
|
|
17
|
+
"types": "./dist/index.d.ts",
|
|
18
|
+
"exports": {
|
|
19
|
+
".": {
|
|
20
|
+
"types": "./dist/index.d.ts",
|
|
21
|
+
"import": "./dist/index.mjs",
|
|
22
|
+
"require": "./dist/index.js"
|
|
23
|
+
}
|
|
24
|
+
},
|
|
25
|
+
"files": ["dist"],
|
|
26
|
+
"scripts": {
|
|
27
|
+
"build": "tsup",
|
|
28
|
+
"dev": "tsup --watch",
|
|
29
|
+
"lint": "biome check .",
|
|
30
|
+
"lint:fix": "biome check --write .",
|
|
31
|
+
"test": "vitest",
|
|
32
|
+
"test:run": "vitest run",
|
|
33
|
+
"prepublishOnly": "pnpm build"
|
|
34
|
+
},
|
|
35
|
+
"dependencies": {
|
|
36
|
+
"@ai-sdk/provider": "^1.0.0",
|
|
37
|
+
"@ai-sdk/provider-utils": "^2.0.0",
|
|
38
|
+
"zod": "^3.23.0"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@biomejs/biome": "^1.9.0",
|
|
42
|
+
"@types/node": "^22.0.0",
|
|
43
|
+
"tsup": "^8.0.0",
|
|
44
|
+
"typescript": "^5.6.0",
|
|
45
|
+
"vitest": "^2.0.0"
|
|
46
|
+
},
|
|
47
|
+
"peerDependencies": {
|
|
48
|
+
"zod": "^3.0.0"
|
|
49
|
+
},
|
|
50
|
+
"keywords": [
|
|
51
|
+
"ai",
|
|
52
|
+
"apertis",
|
|
53
|
+
"vercel",
|
|
54
|
+
"ai-sdk",
|
|
55
|
+
"llm",
|
|
56
|
+
"openai",
|
|
57
|
+
"anthropic"
|
|
58
|
+
]
|
|
59
|
+
}
|