ai 3.0.21 → 3.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +42 -1
- package/dist/index.d.ts +42 -1
- package/dist/index.js +104 -177
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +65 -138
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -33
- package/react/dist/index.d.mts +6 -2
- package/react/dist/index.d.ts +6 -2
- package/react/dist/index.js +107 -24
- package/react/dist/index.js.map +1 -1
- package/react/dist/index.mjs +107 -24
- package/react/dist/index.mjs.map +1 -1
- package/rsc/dist/rsc-server.mjs +3 -3
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/solid/dist/index.d.mts +6 -2
- package/solid/dist/index.d.ts +6 -2
- package/solid/dist/index.js +105 -23
- package/solid/dist/index.js.map +1 -1
- package/solid/dist/index.mjs +105 -23
- package/solid/dist/index.mjs.map +1 -1
- package/svelte/dist/index.d.mts +6 -2
- package/svelte/dist/index.d.ts +6 -2
- package/svelte/dist/index.js +107 -24
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +107 -24
- package/svelte/dist/index.mjs.map +1 -1
- package/vue/dist/index.d.mts +6 -2
- package/vue/dist/index.d.ts +6 -2
- package/vue/dist/index.js +105 -23
- package/vue/dist/index.js.map +1 -1
- package/vue/dist/index.mjs +105 -23
- package/vue/dist/index.mjs.map +1 -1
- package/anthropic/dist/index.d.mts +0 -51
- package/anthropic/dist/index.d.ts +0 -51
- package/anthropic/dist/index.js +0 -792
- package/anthropic/dist/index.js.map +0 -1
- package/anthropic/dist/index.mjs +0 -760
- package/anthropic/dist/index.mjs.map +0 -1
- package/google/dist/index.d.mts +0 -47
- package/google/dist/index.d.ts +0 -47
- package/google/dist/index.js +0 -796
- package/google/dist/index.js.map +0 -1
- package/google/dist/index.mjs +0 -764
- package/google/dist/index.mjs.map +0 -1
- package/mistral/dist/index.d.mts +0 -52
- package/mistral/dist/index.d.ts +0 -52
- package/mistral/dist/index.js +0 -763
- package/mistral/dist/index.js.map +0 -1
- package/mistral/dist/index.mjs +0 -731
- package/mistral/dist/index.mjs.map +0 -1
- package/openai/dist/index.d.mts +0 -116
- package/openai/dist/index.d.ts +0 -116
- package/openai/dist/index.js +0 -1143
- package/openai/dist/index.js.map +0 -1
- package/openai/dist/index.mjs +0 -1115
- package/openai/dist/index.mjs.map +0 -1
package/anthropic/dist/index.mjs
DELETED
@@ -1,760 +0,0 @@
|
|
1
|
-
// spec/util/load-api-key.ts
|
2
|
-
import { LoadAPIKeyError } from "@ai-sdk/provider";
|
3
|
-
function loadApiKey({
|
4
|
-
apiKey,
|
5
|
-
environmentVariableName,
|
6
|
-
apiKeyParameterName = "apiKey",
|
7
|
-
description
|
8
|
-
}) {
|
9
|
-
if (typeof apiKey === "string") {
|
10
|
-
return apiKey;
|
11
|
-
}
|
12
|
-
if (apiKey != null) {
|
13
|
-
throw new LoadAPIKeyError({
|
14
|
-
message: `${description} API key must be a string.`
|
15
|
-
});
|
16
|
-
}
|
17
|
-
if (typeof process === "undefined") {
|
18
|
-
throw new LoadAPIKeyError({
|
19
|
-
message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
|
20
|
-
});
|
21
|
-
}
|
22
|
-
apiKey = process.env[environmentVariableName];
|
23
|
-
if (apiKey == null) {
|
24
|
-
throw new LoadAPIKeyError({
|
25
|
-
message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
|
26
|
-
});
|
27
|
-
}
|
28
|
-
if (typeof apiKey !== "string") {
|
29
|
-
throw new LoadAPIKeyError({
|
30
|
-
message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
|
31
|
-
});
|
32
|
-
}
|
33
|
-
return apiKey;
|
34
|
-
}
|
35
|
-
|
36
|
-
// spec/util/parse-json.ts
|
37
|
-
import { JSONParseError, TypeValidationError as TypeValidationError2 } from "@ai-sdk/provider";
|
38
|
-
import SecureJSON from "secure-json-parse";
|
39
|
-
|
40
|
-
// spec/util/validate-types.ts
|
41
|
-
import { TypeValidationError } from "@ai-sdk/provider";
|
42
|
-
function validateTypes({
|
43
|
-
value,
|
44
|
-
schema
|
45
|
-
}) {
|
46
|
-
try {
|
47
|
-
return schema.parse(value);
|
48
|
-
} catch (error) {
|
49
|
-
throw new TypeValidationError({ value, cause: error });
|
50
|
-
}
|
51
|
-
}
|
52
|
-
function safeValidateTypes({
|
53
|
-
value,
|
54
|
-
schema
|
55
|
-
}) {
|
56
|
-
try {
|
57
|
-
const validationResult = schema.safeParse(value);
|
58
|
-
if (validationResult.success) {
|
59
|
-
return {
|
60
|
-
success: true,
|
61
|
-
value: validationResult.data
|
62
|
-
};
|
63
|
-
}
|
64
|
-
return {
|
65
|
-
success: false,
|
66
|
-
error: new TypeValidationError({
|
67
|
-
value,
|
68
|
-
cause: validationResult.error
|
69
|
-
})
|
70
|
-
};
|
71
|
-
} catch (error) {
|
72
|
-
return {
|
73
|
-
success: false,
|
74
|
-
error: TypeValidationError.isTypeValidationError(error) ? error : new TypeValidationError({ value, cause: error })
|
75
|
-
};
|
76
|
-
}
|
77
|
-
}
|
78
|
-
|
79
|
-
// spec/util/parse-json.ts
|
80
|
-
function parseJSON({
|
81
|
-
text,
|
82
|
-
schema
|
83
|
-
}) {
|
84
|
-
try {
|
85
|
-
const value = SecureJSON.parse(text);
|
86
|
-
if (schema == null) {
|
87
|
-
return value;
|
88
|
-
}
|
89
|
-
return validateTypes({ value, schema });
|
90
|
-
} catch (error) {
|
91
|
-
if (JSONParseError.isJSONParseError(error) || TypeValidationError2.isTypeValidationError(error)) {
|
92
|
-
throw error;
|
93
|
-
}
|
94
|
-
throw new JSONParseError({ text, cause: error });
|
95
|
-
}
|
96
|
-
}
|
97
|
-
function safeParseJSON({
|
98
|
-
text,
|
99
|
-
schema
|
100
|
-
}) {
|
101
|
-
try {
|
102
|
-
const value = SecureJSON.parse(text);
|
103
|
-
if (schema == null) {
|
104
|
-
return {
|
105
|
-
success: true,
|
106
|
-
value
|
107
|
-
};
|
108
|
-
}
|
109
|
-
return safeValidateTypes({ value, schema });
|
110
|
-
} catch (error) {
|
111
|
-
return {
|
112
|
-
success: false,
|
113
|
-
error: JSONParseError.isJSONParseError(error) ? error : new JSONParseError({ text, cause: error })
|
114
|
-
};
|
115
|
-
}
|
116
|
-
}
|
117
|
-
|
118
|
-
// spec/util/post-to-api.ts
|
119
|
-
import { APICallError } from "@ai-sdk/provider";
|
120
|
-
var postJsonToApi = async ({
|
121
|
-
url,
|
122
|
-
headers,
|
123
|
-
body,
|
124
|
-
failedResponseHandler,
|
125
|
-
successfulResponseHandler,
|
126
|
-
abortSignal
|
127
|
-
}) => postToApi({
|
128
|
-
url,
|
129
|
-
headers: {
|
130
|
-
...headers,
|
131
|
-
"Content-Type": "application/json"
|
132
|
-
},
|
133
|
-
body: {
|
134
|
-
content: JSON.stringify(body),
|
135
|
-
values: body
|
136
|
-
},
|
137
|
-
failedResponseHandler,
|
138
|
-
successfulResponseHandler,
|
139
|
-
abortSignal
|
140
|
-
});
|
141
|
-
var postToApi = async ({
|
142
|
-
url,
|
143
|
-
headers = {},
|
144
|
-
body,
|
145
|
-
successfulResponseHandler,
|
146
|
-
failedResponseHandler,
|
147
|
-
abortSignal
|
148
|
-
}) => {
|
149
|
-
try {
|
150
|
-
const definedHeaders = Object.fromEntries(
|
151
|
-
Object.entries(headers).filter(([_key, value]) => value != null)
|
152
|
-
);
|
153
|
-
const response = await fetch(url, {
|
154
|
-
method: "POST",
|
155
|
-
headers: definedHeaders,
|
156
|
-
body: body.content,
|
157
|
-
signal: abortSignal
|
158
|
-
});
|
159
|
-
if (!response.ok) {
|
160
|
-
try {
|
161
|
-
throw await failedResponseHandler({
|
162
|
-
response,
|
163
|
-
url,
|
164
|
-
requestBodyValues: body.values
|
165
|
-
});
|
166
|
-
} catch (error) {
|
167
|
-
if (error instanceof Error) {
|
168
|
-
if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
|
169
|
-
throw error;
|
170
|
-
}
|
171
|
-
}
|
172
|
-
throw new APICallError({
|
173
|
-
message: "Failed to process error response",
|
174
|
-
cause: error,
|
175
|
-
statusCode: response.status,
|
176
|
-
url,
|
177
|
-
requestBodyValues: body.values
|
178
|
-
});
|
179
|
-
}
|
180
|
-
}
|
181
|
-
try {
|
182
|
-
return await successfulResponseHandler({
|
183
|
-
response,
|
184
|
-
url,
|
185
|
-
requestBodyValues: body.values
|
186
|
-
});
|
187
|
-
} catch (error) {
|
188
|
-
if (error instanceof Error) {
|
189
|
-
if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
|
190
|
-
throw error;
|
191
|
-
}
|
192
|
-
}
|
193
|
-
throw new APICallError({
|
194
|
-
message: "Failed to process successful response",
|
195
|
-
cause: error,
|
196
|
-
statusCode: response.status,
|
197
|
-
url,
|
198
|
-
requestBodyValues: body.values
|
199
|
-
});
|
200
|
-
}
|
201
|
-
} catch (error) {
|
202
|
-
if (error instanceof Error) {
|
203
|
-
if (error.name === "AbortError") {
|
204
|
-
throw error;
|
205
|
-
}
|
206
|
-
}
|
207
|
-
if (error instanceof TypeError && error.message === "fetch failed") {
|
208
|
-
const cause = error.cause;
|
209
|
-
if (cause != null) {
|
210
|
-
throw new APICallError({
|
211
|
-
message: `Cannot connect to API: ${cause.message}`,
|
212
|
-
cause,
|
213
|
-
url,
|
214
|
-
requestBodyValues: body.values,
|
215
|
-
isRetryable: true
|
216
|
-
// retry when network error
|
217
|
-
});
|
218
|
-
}
|
219
|
-
}
|
220
|
-
throw error;
|
221
|
-
}
|
222
|
-
};
|
223
|
-
|
224
|
-
// spec/util/response-handler.ts
|
225
|
-
import { APICallError as APICallError2, NoResponseBodyError } from "@ai-sdk/provider";
|
226
|
-
import {
|
227
|
-
EventSourceParserStream
|
228
|
-
} from "eventsource-parser/stream";
|
229
|
-
var createJsonErrorResponseHandler = ({
|
230
|
-
errorSchema,
|
231
|
-
errorToMessage,
|
232
|
-
isRetryable
|
233
|
-
}) => async ({ response, url, requestBodyValues }) => {
|
234
|
-
const responseBody = await response.text();
|
235
|
-
if (responseBody.trim() === "") {
|
236
|
-
return new APICallError2({
|
237
|
-
message: response.statusText,
|
238
|
-
url,
|
239
|
-
requestBodyValues,
|
240
|
-
statusCode: response.status,
|
241
|
-
responseBody,
|
242
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response)
|
243
|
-
});
|
244
|
-
}
|
245
|
-
try {
|
246
|
-
const parsedError = parseJSON({
|
247
|
-
text: responseBody,
|
248
|
-
schema: errorSchema
|
249
|
-
});
|
250
|
-
return new APICallError2({
|
251
|
-
message: errorToMessage(parsedError),
|
252
|
-
url,
|
253
|
-
requestBodyValues,
|
254
|
-
statusCode: response.status,
|
255
|
-
responseBody,
|
256
|
-
data: parsedError,
|
257
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
|
258
|
-
});
|
259
|
-
} catch (parseError) {
|
260
|
-
return new APICallError2({
|
261
|
-
message: response.statusText,
|
262
|
-
url,
|
263
|
-
requestBodyValues,
|
264
|
-
statusCode: response.status,
|
265
|
-
responseBody,
|
266
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response)
|
267
|
-
});
|
268
|
-
}
|
269
|
-
};
|
270
|
-
var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
|
271
|
-
if (response.body == null) {
|
272
|
-
throw new NoResponseBodyError();
|
273
|
-
}
|
274
|
-
return response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(
|
275
|
-
new TransformStream({
|
276
|
-
transform({ data }, controller) {
|
277
|
-
if (data === "[DONE]") {
|
278
|
-
return;
|
279
|
-
}
|
280
|
-
controller.enqueue(
|
281
|
-
safeParseJSON({
|
282
|
-
text: data,
|
283
|
-
schema: chunkSchema
|
284
|
-
})
|
285
|
-
);
|
286
|
-
}
|
287
|
-
})
|
288
|
-
);
|
289
|
-
};
|
290
|
-
var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
|
291
|
-
const responseBody = await response.text();
|
292
|
-
const parsedResult = safeParseJSON({
|
293
|
-
text: responseBody,
|
294
|
-
schema: responseSchema
|
295
|
-
});
|
296
|
-
if (!parsedResult.success) {
|
297
|
-
throw new APICallError2({
|
298
|
-
message: "Invalid JSON response",
|
299
|
-
cause: parsedResult.error,
|
300
|
-
statusCode: response.status,
|
301
|
-
responseBody,
|
302
|
-
url,
|
303
|
-
requestBodyValues
|
304
|
-
});
|
305
|
-
}
|
306
|
-
return parsedResult.value;
|
307
|
-
};
|
308
|
-
|
309
|
-
// spec/util/uint8-utils.ts
|
310
|
-
function convertUint8ArrayToBase64(array) {
|
311
|
-
let latin1string = "";
|
312
|
-
for (let i = 0; i < array.length; i++) {
|
313
|
-
latin1string += String.fromCodePoint(array[i]);
|
314
|
-
}
|
315
|
-
return globalThis.btoa(latin1string);
|
316
|
-
}
|
317
|
-
|
318
|
-
// anthropic/anthropic-messages-language-model.ts
|
319
|
-
import {
|
320
|
-
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
321
|
-
} from "@ai-sdk/provider";
|
322
|
-
import { z as z2 } from "zod";
|
323
|
-
|
324
|
-
// anthropic/anthropic-error.ts
|
325
|
-
import { z } from "zod";
|
326
|
-
var anthropicErrorDataSchema = z.object({
|
327
|
-
type: z.literal("error"),
|
328
|
-
error: z.object({
|
329
|
-
type: z.string(),
|
330
|
-
message: z.string()
|
331
|
-
})
|
332
|
-
});
|
333
|
-
var anthropicFailedResponseHandler = createJsonErrorResponseHandler({
|
334
|
-
errorSchema: anthropicErrorDataSchema,
|
335
|
-
errorToMessage: (data) => data.error.message
|
336
|
-
});
|
337
|
-
|
338
|
-
// anthropic/convert-to-anthropic-messages-prompt.ts
|
339
|
-
import {
|
340
|
-
UnsupportedFunctionalityError
|
341
|
-
} from "@ai-sdk/provider";
|
342
|
-
function convertToAnthropicMessagesPrompt(prompt) {
|
343
|
-
let system;
|
344
|
-
const messages = [];
|
345
|
-
for (const { role, content } of prompt) {
|
346
|
-
switch (role) {
|
347
|
-
case "system": {
|
348
|
-
system = content;
|
349
|
-
break;
|
350
|
-
}
|
351
|
-
case "user": {
|
352
|
-
messages.push({
|
353
|
-
role: "user",
|
354
|
-
content: content.map((part) => {
|
355
|
-
var _a;
|
356
|
-
switch (part.type) {
|
357
|
-
case "text": {
|
358
|
-
return { type: "text", text: part.text };
|
359
|
-
}
|
360
|
-
case "image": {
|
361
|
-
if (part.image instanceof URL) {
|
362
|
-
throw new UnsupportedFunctionalityError({
|
363
|
-
functionality: "URL image parts"
|
364
|
-
});
|
365
|
-
} else {
|
366
|
-
return {
|
367
|
-
type: "image",
|
368
|
-
source: {
|
369
|
-
type: "base64",
|
370
|
-
media_type: (_a = part.mimeType) != null ? _a : "image/jpeg",
|
371
|
-
data: convertUint8ArrayToBase64(part.image)
|
372
|
-
}
|
373
|
-
};
|
374
|
-
}
|
375
|
-
}
|
376
|
-
}
|
377
|
-
})
|
378
|
-
});
|
379
|
-
break;
|
380
|
-
}
|
381
|
-
case "assistant": {
|
382
|
-
messages.push({
|
383
|
-
role: "assistant",
|
384
|
-
content: content.map((part) => {
|
385
|
-
switch (part.type) {
|
386
|
-
case "text": {
|
387
|
-
return { type: "text", text: part.text };
|
388
|
-
}
|
389
|
-
case "tool-call": {
|
390
|
-
return {
|
391
|
-
type: "tool_use",
|
392
|
-
id: part.toolCallId,
|
393
|
-
name: part.toolName,
|
394
|
-
input: part.args
|
395
|
-
};
|
396
|
-
}
|
397
|
-
}
|
398
|
-
})
|
399
|
-
});
|
400
|
-
break;
|
401
|
-
}
|
402
|
-
case "tool": {
|
403
|
-
messages.push({
|
404
|
-
role: "user",
|
405
|
-
content: content.map((part) => ({
|
406
|
-
type: "tool_result",
|
407
|
-
tool_use_id: part.toolCallId,
|
408
|
-
content: JSON.stringify(part.result),
|
409
|
-
is_error: part.isError
|
410
|
-
}))
|
411
|
-
});
|
412
|
-
break;
|
413
|
-
}
|
414
|
-
default: {
|
415
|
-
const _exhaustiveCheck = role;
|
416
|
-
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
417
|
-
}
|
418
|
-
}
|
419
|
-
}
|
420
|
-
return {
|
421
|
-
system,
|
422
|
-
messages
|
423
|
-
};
|
424
|
-
}
|
425
|
-
|
426
|
-
// anthropic/map-anthropic-stop-reason.ts
|
427
|
-
function mapAnthropicStopReason(finishReason) {
|
428
|
-
switch (finishReason) {
|
429
|
-
case "end_turn":
|
430
|
-
case "stop_sequence":
|
431
|
-
return "stop";
|
432
|
-
case "tool_use":
|
433
|
-
return "tool-calls";
|
434
|
-
case "max_tokens":
|
435
|
-
return "length";
|
436
|
-
default:
|
437
|
-
return "other";
|
438
|
-
}
|
439
|
-
}
|
440
|
-
|
441
|
-
// anthropic/anthropic-messages-language-model.ts
|
442
|
-
var AnthropicMessagesLanguageModel = class {
|
443
|
-
constructor(modelId, settings, config) {
|
444
|
-
this.specificationVersion = "v1";
|
445
|
-
this.defaultObjectGenerationMode = "tool";
|
446
|
-
this.modelId = modelId;
|
447
|
-
this.settings = settings;
|
448
|
-
this.config = config;
|
449
|
-
}
|
450
|
-
get provider() {
|
451
|
-
return this.config.provider;
|
452
|
-
}
|
453
|
-
getArgs({
|
454
|
-
mode,
|
455
|
-
prompt,
|
456
|
-
maxTokens,
|
457
|
-
temperature,
|
458
|
-
topP,
|
459
|
-
frequencyPenalty,
|
460
|
-
presencePenalty,
|
461
|
-
seed
|
462
|
-
}) {
|
463
|
-
var _a;
|
464
|
-
const type = mode.type;
|
465
|
-
const warnings = [];
|
466
|
-
if (frequencyPenalty != null) {
|
467
|
-
warnings.push({
|
468
|
-
type: "unsupported-setting",
|
469
|
-
setting: "frequencyPenalty"
|
470
|
-
});
|
471
|
-
}
|
472
|
-
if (presencePenalty != null) {
|
473
|
-
warnings.push({
|
474
|
-
type: "unsupported-setting",
|
475
|
-
setting: "presencePenalty"
|
476
|
-
});
|
477
|
-
}
|
478
|
-
if (seed != null) {
|
479
|
-
warnings.push({
|
480
|
-
type: "unsupported-setting",
|
481
|
-
setting: "seed"
|
482
|
-
});
|
483
|
-
}
|
484
|
-
const messagesPrompt = convertToAnthropicMessagesPrompt(prompt);
|
485
|
-
const baseArgs = {
|
486
|
-
// model id:
|
487
|
-
model: this.modelId,
|
488
|
-
// model specific settings:
|
489
|
-
top_k: this.settings.topK,
|
490
|
-
// standardized settings:
|
491
|
-
max_tokens: maxTokens != null ? maxTokens : 4096,
|
492
|
-
// 4096: max model output tokens
|
493
|
-
temperature,
|
494
|
-
// uses 0..1 scale
|
495
|
-
top_p: topP,
|
496
|
-
// prompt:
|
497
|
-
system: messagesPrompt.system,
|
498
|
-
messages: messagesPrompt.messages
|
499
|
-
};
|
500
|
-
switch (type) {
|
501
|
-
case "regular": {
|
502
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
503
|
-
return {
|
504
|
-
args: {
|
505
|
-
...baseArgs,
|
506
|
-
tools: tools == null ? void 0 : tools.map((tool) => ({
|
507
|
-
name: tool.name,
|
508
|
-
description: tool.description,
|
509
|
-
input_schema: tool.parameters
|
510
|
-
}))
|
511
|
-
},
|
512
|
-
warnings
|
513
|
-
};
|
514
|
-
}
|
515
|
-
case "object-json": {
|
516
|
-
throw new UnsupportedFunctionalityError2({
|
517
|
-
functionality: "json-mode object generation"
|
518
|
-
});
|
519
|
-
}
|
520
|
-
case "object-tool": {
|
521
|
-
const { name, description, parameters } = mode.tool;
|
522
|
-
baseArgs.messages[baseArgs.messages.length - 1].content.push({
|
523
|
-
type: "text",
|
524
|
-
text: `
|
525
|
-
|
526
|
-
Use the '${name}' tool.`
|
527
|
-
});
|
528
|
-
return {
|
529
|
-
args: {
|
530
|
-
...baseArgs,
|
531
|
-
tools: [{ name, description, input_schema: parameters }]
|
532
|
-
},
|
533
|
-
warnings
|
534
|
-
};
|
535
|
-
}
|
536
|
-
case "object-grammar": {
|
537
|
-
throw new UnsupportedFunctionalityError2({
|
538
|
-
functionality: "grammar-mode object generation"
|
539
|
-
});
|
540
|
-
}
|
541
|
-
default: {
|
542
|
-
const _exhaustiveCheck = type;
|
543
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
544
|
-
}
|
545
|
-
}
|
546
|
-
}
|
547
|
-
async doGenerate(options) {
|
548
|
-
const { args, warnings } = this.getArgs(options);
|
549
|
-
const response = await postJsonToApi({
|
550
|
-
url: `${this.config.baseUrl}/messages`,
|
551
|
-
headers: this.config.headers(),
|
552
|
-
body: args,
|
553
|
-
failedResponseHandler: anthropicFailedResponseHandler,
|
554
|
-
successfulResponseHandler: createJsonResponseHandler(
|
555
|
-
anthropicMessagesResponseSchema
|
556
|
-
),
|
557
|
-
abortSignal: options.abortSignal
|
558
|
-
});
|
559
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
560
|
-
let text = "";
|
561
|
-
for (const content of response.content) {
|
562
|
-
if (content.type === "text") {
|
563
|
-
text += content.text;
|
564
|
-
}
|
565
|
-
}
|
566
|
-
let toolCalls = void 0;
|
567
|
-
if (response.content.some((content) => content.type === "tool_use")) {
|
568
|
-
toolCalls = [];
|
569
|
-
for (const content of response.content) {
|
570
|
-
if (content.type === "tool_use") {
|
571
|
-
toolCalls.push({
|
572
|
-
toolCallType: "function",
|
573
|
-
toolCallId: content.id,
|
574
|
-
toolName: content.name,
|
575
|
-
args: JSON.stringify(content.input)
|
576
|
-
});
|
577
|
-
}
|
578
|
-
}
|
579
|
-
}
|
580
|
-
return {
|
581
|
-
text,
|
582
|
-
toolCalls,
|
583
|
-
finishReason: mapAnthropicStopReason(response.stop_reason),
|
584
|
-
usage: {
|
585
|
-
promptTokens: response.usage.input_tokens,
|
586
|
-
completionTokens: response.usage.output_tokens
|
587
|
-
},
|
588
|
-
rawCall: { rawPrompt, rawSettings },
|
589
|
-
warnings
|
590
|
-
};
|
591
|
-
}
|
592
|
-
async doStream(options) {
|
593
|
-
const { args, warnings } = this.getArgs(options);
|
594
|
-
const response = await postJsonToApi({
|
595
|
-
url: `${this.config.baseUrl}/messages`,
|
596
|
-
headers: this.config.headers(),
|
597
|
-
body: {
|
598
|
-
...args,
|
599
|
-
stream: true
|
600
|
-
},
|
601
|
-
failedResponseHandler: anthropicFailedResponseHandler,
|
602
|
-
successfulResponseHandler: createEventSourceResponseHandler(
|
603
|
-
anthropicMessagesChunkSchema
|
604
|
-
),
|
605
|
-
abortSignal: options.abortSignal
|
606
|
-
});
|
607
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
608
|
-
let finishReason = "other";
|
609
|
-
const usage = {
|
610
|
-
promptTokens: Number.NaN,
|
611
|
-
completionTokens: Number.NaN
|
612
|
-
};
|
613
|
-
return {
|
614
|
-
stream: response.pipeThrough(
|
615
|
-
new TransformStream({
|
616
|
-
transform(chunk, controller) {
|
617
|
-
if (!chunk.success) {
|
618
|
-
controller.enqueue({ type: "error", error: chunk.error });
|
619
|
-
return;
|
620
|
-
}
|
621
|
-
const value = chunk.value;
|
622
|
-
switch (value.type) {
|
623
|
-
case "ping":
|
624
|
-
case "content_block_start":
|
625
|
-
case "content_block_stop": {
|
626
|
-
return;
|
627
|
-
}
|
628
|
-
case "content_block_delta": {
|
629
|
-
controller.enqueue({
|
630
|
-
type: "text-delta",
|
631
|
-
textDelta: value.delta.text
|
632
|
-
});
|
633
|
-
return;
|
634
|
-
}
|
635
|
-
case "message_start": {
|
636
|
-
usage.promptTokens = value.message.usage.input_tokens;
|
637
|
-
usage.completionTokens = value.message.usage.output_tokens;
|
638
|
-
return;
|
639
|
-
}
|
640
|
-
case "message_delta": {
|
641
|
-
usage.completionTokens = value.usage.output_tokens;
|
642
|
-
finishReason = mapAnthropicStopReason(value.delta.stop_reason);
|
643
|
-
return;
|
644
|
-
}
|
645
|
-
case "message_stop": {
|
646
|
-
controller.enqueue({ type: "finish", finishReason, usage });
|
647
|
-
return;
|
648
|
-
}
|
649
|
-
default: {
|
650
|
-
const _exhaustiveCheck = value;
|
651
|
-
throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);
|
652
|
-
}
|
653
|
-
}
|
654
|
-
}
|
655
|
-
})
|
656
|
-
),
|
657
|
-
rawCall: { rawPrompt, rawSettings },
|
658
|
-
warnings
|
659
|
-
};
|
660
|
-
}
|
661
|
-
};
|
662
|
-
var anthropicMessagesResponseSchema = z2.object({
|
663
|
-
type: z2.literal("message"),
|
664
|
-
content: z2.array(
|
665
|
-
z2.discriminatedUnion("type", [
|
666
|
-
z2.object({
|
667
|
-
type: z2.literal("text"),
|
668
|
-
text: z2.string()
|
669
|
-
}),
|
670
|
-
z2.object({
|
671
|
-
type: z2.literal("tool_use"),
|
672
|
-
id: z2.string(),
|
673
|
-
name: z2.string(),
|
674
|
-
input: z2.unknown()
|
675
|
-
})
|
676
|
-
])
|
677
|
-
),
|
678
|
-
stop_reason: z2.string().optional().nullable(),
|
679
|
-
usage: z2.object({
|
680
|
-
input_tokens: z2.number(),
|
681
|
-
output_tokens: z2.number()
|
682
|
-
})
|
683
|
-
});
|
684
|
-
var anthropicMessagesChunkSchema = z2.discriminatedUnion("type", [
|
685
|
-
z2.object({
|
686
|
-
type: z2.literal("message_start"),
|
687
|
-
message: z2.object({
|
688
|
-
usage: z2.object({
|
689
|
-
input_tokens: z2.number(),
|
690
|
-
output_tokens: z2.number()
|
691
|
-
})
|
692
|
-
})
|
693
|
-
}),
|
694
|
-
z2.object({
|
695
|
-
type: z2.literal("content_block_start"),
|
696
|
-
index: z2.number(),
|
697
|
-
content_block: z2.object({
|
698
|
-
type: z2.literal("text"),
|
699
|
-
text: z2.string()
|
700
|
-
})
|
701
|
-
}),
|
702
|
-
z2.object({
|
703
|
-
type: z2.literal("content_block_delta"),
|
704
|
-
index: z2.number(),
|
705
|
-
delta: z2.object({
|
706
|
-
type: z2.literal("text_delta"),
|
707
|
-
text: z2.string()
|
708
|
-
})
|
709
|
-
}),
|
710
|
-
z2.object({
|
711
|
-
type: z2.literal("content_block_stop"),
|
712
|
-
index: z2.number()
|
713
|
-
}),
|
714
|
-
z2.object({
|
715
|
-
type: z2.literal("message_delta"),
|
716
|
-
delta: z2.object({ stop_reason: z2.string().optional().nullable() }),
|
717
|
-
usage: z2.object({ output_tokens: z2.number() })
|
718
|
-
}),
|
719
|
-
z2.object({
|
720
|
-
type: z2.literal("message_stop")
|
721
|
-
}),
|
722
|
-
z2.object({
|
723
|
-
type: z2.literal("ping")
|
724
|
-
})
|
725
|
-
]);
|
726
|
-
|
727
|
-
// anthropic/anthropic-facade.ts
|
728
|
-
var Anthropic = class {
|
729
|
-
constructor(options = {}) {
|
730
|
-
this.baseUrl = options.baseUrl;
|
731
|
-
this.apiKey = options.apiKey;
|
732
|
-
}
|
733
|
-
get baseConfig() {
|
734
|
-
var _a;
|
735
|
-
return {
|
736
|
-
baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.anthropic.com/v1",
|
737
|
-
headers: () => ({
|
738
|
-
"anthropic-version": "2023-06-01",
|
739
|
-
"anthropic-beta": "tools-2024-04-04",
|
740
|
-
"x-api-key": loadApiKey({
|
741
|
-
apiKey: this.apiKey,
|
742
|
-
environmentVariableName: "ANTHROPIC_API_KEY",
|
743
|
-
description: "Anthropic"
|
744
|
-
})
|
745
|
-
})
|
746
|
-
};
|
747
|
-
}
|
748
|
-
messages(modelId, settings = {}) {
|
749
|
-
return new AnthropicMessagesLanguageModel(modelId, settings, {
|
750
|
-
provider: "anthropic.messages",
|
751
|
-
...this.baseConfig
|
752
|
-
});
|
753
|
-
}
|
754
|
-
};
|
755
|
-
var anthropic = new Anthropic();
|
756
|
-
export {
|
757
|
-
Anthropic,
|
758
|
-
anthropic
|
759
|
-
};
|
760
|
-
//# sourceMappingURL=index.mjs.map
|