ai 3.3.17 → 3.3.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +59 -83
- package/dist/index.d.ts +59 -83
- package/dist/index.js +154 -191
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +157 -193
- package/dist/index.mjs.map +1 -1
- package/package.json +8 -8
package/dist/index.mjs
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
var __defProp = Object.defineProperty;
|
2
2
|
var __export = (target, all) => {
|
3
|
-
for (var
|
4
|
-
__defProp(target,
|
3
|
+
for (var name10 in all)
|
4
|
+
__defProp(target, name10, { get: all[name10], enumerable: true });
|
5
5
|
};
|
6
6
|
|
7
7
|
// streams/index.ts
|
@@ -142,7 +142,7 @@ function getBaseTelemetryAttributes({
|
|
142
142
|
telemetry,
|
143
143
|
headers
|
144
144
|
}) {
|
145
|
-
var
|
145
|
+
var _a10;
|
146
146
|
return {
|
147
147
|
"ai.model.provider": model.provider,
|
148
148
|
"ai.model.id": model.modelId,
|
@@ -152,7 +152,7 @@ function getBaseTelemetryAttributes({
|
|
152
152
|
return attributes;
|
153
153
|
}, {}),
|
154
154
|
// add metadata as attributes:
|
155
|
-
...Object.entries((
|
155
|
+
...Object.entries((_a10 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a10 : {}).reduce(
|
156
156
|
(attributes, [key, value]) => {
|
157
157
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
158
158
|
return attributes;
|
@@ -177,7 +177,7 @@ var noopTracer = {
|
|
177
177
|
startSpan() {
|
178
178
|
return noopSpan;
|
179
179
|
},
|
180
|
-
startActiveSpan(
|
180
|
+
startActiveSpan(name10, arg1, arg2, arg3) {
|
181
181
|
if (typeof arg1 === "function") {
|
182
182
|
return arg1(noopSpan);
|
183
183
|
}
|
@@ -245,13 +245,13 @@ function getTracer({ isEnabled }) {
|
|
245
245
|
// core/telemetry/record-span.ts
|
246
246
|
import { SpanStatusCode } from "@opentelemetry/api";
|
247
247
|
function recordSpan({
|
248
|
-
name:
|
248
|
+
name: name10,
|
249
249
|
tracer,
|
250
250
|
attributes,
|
251
251
|
fn,
|
252
252
|
endWhenDone = true
|
253
253
|
}) {
|
254
|
-
return tracer.startActiveSpan(
|
254
|
+
return tracer.startActiveSpan(name10, { attributes }, async (span) => {
|
255
255
|
try {
|
256
256
|
const result = await fn(span);
|
257
257
|
if (endWhenDone) {
|
@@ -317,14 +317,14 @@ async function embed({
|
|
317
317
|
headers,
|
318
318
|
experimental_telemetry: telemetry
|
319
319
|
}) {
|
320
|
-
var
|
320
|
+
var _a10;
|
321
321
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
322
322
|
model,
|
323
323
|
telemetry,
|
324
324
|
headers,
|
325
325
|
settings: { maxRetries }
|
326
326
|
});
|
327
|
-
const tracer = getTracer({ isEnabled: (
|
327
|
+
const tracer = getTracer({ isEnabled: (_a10 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a10 : false });
|
328
328
|
return recordSpan({
|
329
329
|
name: "ai.embed",
|
330
330
|
attributes: selectTelemetryAttributes({
|
@@ -357,14 +357,14 @@ async function embed({
|
|
357
357
|
}),
|
358
358
|
tracer,
|
359
359
|
fn: async (doEmbedSpan) => {
|
360
|
-
var
|
360
|
+
var _a11;
|
361
361
|
const modelResponse = await model.doEmbed({
|
362
362
|
values: [value],
|
363
363
|
abortSignal,
|
364
364
|
headers
|
365
365
|
});
|
366
366
|
const embedding2 = modelResponse.embeddings[0];
|
367
|
-
const usage2 = (
|
367
|
+
const usage2 = (_a11 = modelResponse.usage) != null ? _a11 : { tokens: NaN };
|
368
368
|
doEmbedSpan.setAttributes(
|
369
369
|
selectTelemetryAttributes({
|
370
370
|
telemetry,
|
@@ -430,14 +430,14 @@ async function embedMany({
|
|
430
430
|
headers,
|
431
431
|
experimental_telemetry: telemetry
|
432
432
|
}) {
|
433
|
-
var
|
433
|
+
var _a10;
|
434
434
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
435
435
|
model,
|
436
436
|
telemetry,
|
437
437
|
headers,
|
438
438
|
settings: { maxRetries }
|
439
439
|
});
|
440
|
-
const tracer = getTracer({ isEnabled: (
|
440
|
+
const tracer = getTracer({ isEnabled: (_a10 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a10 : false });
|
441
441
|
return recordSpan({
|
442
442
|
name: "ai.embedMany",
|
443
443
|
attributes: selectTelemetryAttributes({
|
@@ -475,14 +475,14 @@ async function embedMany({
|
|
475
475
|
}),
|
476
476
|
tracer,
|
477
477
|
fn: async (doEmbedSpan) => {
|
478
|
-
var
|
478
|
+
var _a11;
|
479
479
|
const modelResponse = await model.doEmbed({
|
480
480
|
values,
|
481
481
|
abortSignal,
|
482
482
|
headers
|
483
483
|
});
|
484
484
|
const embeddings3 = modelResponse.embeddings;
|
485
|
-
const usage2 = (
|
485
|
+
const usage2 = (_a11 = modelResponse.usage) != null ? _a11 : { tokens: NaN };
|
486
486
|
doEmbedSpan.setAttributes(
|
487
487
|
selectTelemetryAttributes({
|
488
488
|
telemetry,
|
@@ -534,14 +534,14 @@ async function embedMany({
|
|
534
534
|
}),
|
535
535
|
tracer,
|
536
536
|
fn: async (doEmbedSpan) => {
|
537
|
-
var
|
537
|
+
var _a11;
|
538
538
|
const modelResponse = await model.doEmbed({
|
539
539
|
values: chunk,
|
540
540
|
abortSignal,
|
541
541
|
headers
|
542
542
|
});
|
543
543
|
const embeddings2 = modelResponse.embeddings;
|
544
|
-
const usage2 = (
|
544
|
+
const usage2 = (_a11 = modelResponse.usage) != null ? _a11 : { tokens: NaN };
|
545
545
|
doEmbedSpan.setAttributes(
|
546
546
|
selectTelemetryAttributes({
|
547
547
|
telemetry,
|
@@ -643,7 +643,7 @@ async function download({
|
|
643
643
|
url,
|
644
644
|
fetchImplementation = fetch
|
645
645
|
}) {
|
646
|
-
var
|
646
|
+
var _a10;
|
647
647
|
const urlText = url.toString();
|
648
648
|
try {
|
649
649
|
const response = await fetchImplementation(urlText);
|
@@ -656,7 +656,7 @@ async function download({
|
|
656
656
|
}
|
657
657
|
return {
|
658
658
|
data: new Uint8Array(await response.arrayBuffer()),
|
659
|
-
mimeType: (
|
659
|
+
mimeType: (_a10 = response.headers.get("content-type")) != null ? _a10 : void 0
|
660
660
|
};
|
661
661
|
} catch (error) {
|
662
662
|
if (DownloadError.isInstance(error)) {
|
@@ -737,8 +737,8 @@ var dataContentSchema = z.union([
|
|
737
737
|
z.custom(
|
738
738
|
// Buffer might not be available in some environments such as CloudFlare:
|
739
739
|
(value) => {
|
740
|
-
var
|
741
|
-
return (_b = (
|
740
|
+
var _a10, _b;
|
741
|
+
return (_b = (_a10 = globalThis.Buffer) == null ? void 0 : _a10.isBuffer(value)) != null ? _b : false;
|
742
742
|
},
|
743
743
|
{ message: "Must be a Buffer" }
|
744
744
|
)
|
@@ -866,7 +866,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
866
866
|
role: "user",
|
867
867
|
content: message.content.map(
|
868
868
|
(part) => {
|
869
|
-
var
|
869
|
+
var _a10, _b, _c;
|
870
870
|
switch (part.type) {
|
871
871
|
case "text": {
|
872
872
|
return {
|
@@ -889,7 +889,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
889
889
|
return {
|
890
890
|
type: "image",
|
891
891
|
image: downloadedImage.data,
|
892
|
-
mimeType: (
|
892
|
+
mimeType: (_a10 = part.mimeType) != null ? _a10 : downloadedImage.mimeType,
|
893
893
|
providerMetadata: part.experimental_providerMetadata
|
894
894
|
};
|
895
895
|
}
|
@@ -1321,8 +1321,8 @@ function prepareResponseHeaders(init, {
|
|
1321
1321
|
contentType,
|
1322
1322
|
dataStreamVersion
|
1323
1323
|
}) {
|
1324
|
-
var
|
1325
|
-
const headers = new Headers((
|
1324
|
+
var _a10;
|
1325
|
+
const headers = new Headers((_a10 = init == null ? void 0 : init.headers) != null ? _a10 : {});
|
1326
1326
|
if (!headers.has("Content-Type")) {
|
1327
1327
|
headers.set("Content-Type", contentType);
|
1328
1328
|
}
|
@@ -1636,7 +1636,7 @@ async function generateObject({
|
|
1636
1636
|
experimental_telemetry: telemetry,
|
1637
1637
|
...settings
|
1638
1638
|
}) {
|
1639
|
-
var
|
1639
|
+
var _a10;
|
1640
1640
|
validateObjectGenerationInput({
|
1641
1641
|
output,
|
1642
1642
|
mode,
|
@@ -1654,7 +1654,7 @@ async function generateObject({
|
|
1654
1654
|
headers,
|
1655
1655
|
settings: { ...settings, maxRetries }
|
1656
1656
|
});
|
1657
|
-
const tracer = getTracer({ isEnabled: (
|
1657
|
+
const tracer = getTracer({ isEnabled: (_a10 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a10 : false });
|
1658
1658
|
return recordSpan({
|
1659
1659
|
name: "ai.generateObject",
|
1660
1660
|
attributes: selectTelemetryAttributes({
|
@@ -1815,7 +1815,7 @@ async function generateObject({
|
|
1815
1815
|
}),
|
1816
1816
|
tracer,
|
1817
1817
|
fn: async (span2) => {
|
1818
|
-
var
|
1818
|
+
var _a11, _b;
|
1819
1819
|
const result2 = await model.doGenerate({
|
1820
1820
|
mode: {
|
1821
1821
|
type: "object-tool",
|
@@ -1832,7 +1832,7 @@ async function generateObject({
|
|
1832
1832
|
abortSignal,
|
1833
1833
|
headers
|
1834
1834
|
});
|
1835
|
-
const objectText = (_b = (
|
1835
|
+
const objectText = (_b = (_a11 = result2.toolCalls) == null ? void 0 : _a11[0]) == null ? void 0 : _b.args;
|
1836
1836
|
if (objectText === void 0) {
|
1837
1837
|
throw new NoObjectGeneratedError();
|
1838
1838
|
}
|
@@ -1920,9 +1920,9 @@ var DefaultGenerateObjectResult = class {
|
|
1920
1920
|
this.experimental_providerMetadata = options.providerMetadata;
|
1921
1921
|
}
|
1922
1922
|
toJsonResponse(init) {
|
1923
|
-
var
|
1923
|
+
var _a10;
|
1924
1924
|
return new Response(JSON.stringify(this.object), {
|
1925
|
-
status: (
|
1925
|
+
status: (_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200,
|
1926
1926
|
headers: prepareResponseHeaders(init, {
|
1927
1927
|
contentType: "application/json; charset=utf-8"
|
1928
1928
|
})
|
@@ -1975,17 +1975,17 @@ var DelayedPromise = class {
|
|
1975
1975
|
return this.promise;
|
1976
1976
|
}
|
1977
1977
|
resolve(value) {
|
1978
|
-
var
|
1978
|
+
var _a10;
|
1979
1979
|
this.status = { type: "resolved", value };
|
1980
1980
|
if (this.promise) {
|
1981
|
-
(
|
1981
|
+
(_a10 = this._resolve) == null ? void 0 : _a10.call(this, value);
|
1982
1982
|
}
|
1983
1983
|
}
|
1984
1984
|
reject(error) {
|
1985
|
-
var
|
1985
|
+
var _a10;
|
1986
1986
|
this.status = { type: "rejected", error };
|
1987
1987
|
if (this.promise) {
|
1988
|
-
(
|
1988
|
+
(_a10 = this._reject) == null ? void 0 : _a10.call(this, error);
|
1989
1989
|
}
|
1990
1990
|
}
|
1991
1991
|
};
|
@@ -2008,7 +2008,7 @@ async function streamObject({
|
|
2008
2008
|
onFinish,
|
2009
2009
|
...settings
|
2010
2010
|
}) {
|
2011
|
-
var
|
2011
|
+
var _a10;
|
2012
2012
|
validateObjectGenerationInput({
|
2013
2013
|
output,
|
2014
2014
|
mode,
|
@@ -2026,7 +2026,7 @@ async function streamObject({
|
|
2026
2026
|
headers,
|
2027
2027
|
settings: { ...settings, maxRetries }
|
2028
2028
|
});
|
2029
|
-
const tracer = getTracer({ isEnabled: (
|
2029
|
+
const tracer = getTracer({ isEnabled: (_a10 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a10 : false });
|
2030
2030
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
2031
2031
|
return recordSpan({
|
2032
2032
|
name: "ai.streamObject",
|
@@ -2415,8 +2415,8 @@ var DefaultStreamObjectResult = class {
|
|
2415
2415
|
});
|
2416
2416
|
}
|
2417
2417
|
pipeTextStreamToResponse(response, init) {
|
2418
|
-
var
|
2419
|
-
response.writeHead((
|
2418
|
+
var _a10;
|
2419
|
+
response.writeHead((_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200, {
|
2420
2420
|
"Content-Type": "text/plain; charset=utf-8",
|
2421
2421
|
...init == null ? void 0 : init.headers
|
2422
2422
|
});
|
@@ -2438,9 +2438,9 @@ var DefaultStreamObjectResult = class {
|
|
2438
2438
|
read();
|
2439
2439
|
}
|
2440
2440
|
toTextStreamResponse(init) {
|
2441
|
-
var
|
2441
|
+
var _a10;
|
2442
2442
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
2443
|
-
status: (
|
2443
|
+
status: (_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200,
|
2444
2444
|
headers: prepareResponseHeaders(init, {
|
2445
2445
|
contentType: "text/plain; charset=utf-8"
|
2446
2446
|
})
|
@@ -2469,9 +2469,9 @@ function prepareToolsAndToolChoice({
|
|
2469
2469
|
};
|
2470
2470
|
}
|
2471
2471
|
return {
|
2472
|
-
tools: Object.entries(tools).map(([
|
2472
|
+
tools: Object.entries(tools).map(([name10, tool2]) => ({
|
2473
2473
|
type: "function",
|
2474
|
-
name:
|
2474
|
+
name: name10,
|
2475
2475
|
description: tool2.description,
|
2476
2476
|
parameters: asSchema2(tool2.parameters).jsonSchema
|
2477
2477
|
})),
|
@@ -2620,14 +2620,14 @@ async function generateText({
|
|
2620
2620
|
experimental_telemetry: telemetry,
|
2621
2621
|
...settings
|
2622
2622
|
}) {
|
2623
|
-
var
|
2623
|
+
var _a10;
|
2624
2624
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
2625
2625
|
model,
|
2626
2626
|
telemetry,
|
2627
2627
|
headers,
|
2628
2628
|
settings: { ...settings, maxRetries }
|
2629
2629
|
});
|
2630
|
-
const tracer = getTracer({ isEnabled: (
|
2630
|
+
const tracer = getTracer({ isEnabled: (_a10 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a10 : false });
|
2631
2631
|
return recordSpan({
|
2632
2632
|
name: "ai.generateText",
|
2633
2633
|
attributes: selectTelemetryAttributes({
|
@@ -2647,7 +2647,7 @@ async function generateText({
|
|
2647
2647
|
}),
|
2648
2648
|
tracer,
|
2649
2649
|
fn: async (span) => {
|
2650
|
-
var
|
2650
|
+
var _a11, _b, _c, _d;
|
2651
2651
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
2652
2652
|
const validatedPrompt = validatePrompt({
|
2653
2653
|
system,
|
@@ -2733,7 +2733,7 @@ async function generateText({
|
|
2733
2733
|
}
|
2734
2734
|
})
|
2735
2735
|
);
|
2736
|
-
currentToolCalls = ((
|
2736
|
+
currentToolCalls = ((_a11 = currentModelResponse.toolCalls) != null ? _a11 : []).map(
|
2737
2737
|
(modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
|
2738
2738
|
);
|
2739
2739
|
currentToolResults = tools == null ? [] : await executeTools({
|
@@ -3201,14 +3201,14 @@ async function streamText({
|
|
3201
3201
|
onFinish,
|
3202
3202
|
...settings
|
3203
3203
|
}) {
|
3204
|
-
var
|
3204
|
+
var _a10;
|
3205
3205
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
3206
3206
|
model,
|
3207
3207
|
telemetry,
|
3208
3208
|
headers,
|
3209
3209
|
settings: { ...settings, maxRetries }
|
3210
3210
|
});
|
3211
|
-
const tracer = getTracer({ isEnabled: (
|
3211
|
+
const tracer = getTracer({ isEnabled: (_a10 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a10 : false });
|
3212
3212
|
return recordSpan({
|
3213
3213
|
name: "ai.streamText",
|
3214
3214
|
attributes: selectTelemetryAttributes({
|
@@ -3585,8 +3585,8 @@ var DefaultStreamTextResult = class {
|
|
3585
3585
|
return this.pipeDataStreamToResponse(response, init);
|
3586
3586
|
}
|
3587
3587
|
pipeDataStreamToResponse(response, init) {
|
3588
|
-
var
|
3589
|
-
response.writeHead((
|
3588
|
+
var _a10;
|
3589
|
+
response.writeHead((_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200, {
|
3590
3590
|
"Content-Type": "text/plain; charset=utf-8",
|
3591
3591
|
...init == null ? void 0 : init.headers
|
3592
3592
|
});
|
@@ -3608,8 +3608,8 @@ var DefaultStreamTextResult = class {
|
|
3608
3608
|
read();
|
3609
3609
|
}
|
3610
3610
|
pipeTextStreamToResponse(response, init) {
|
3611
|
-
var
|
3612
|
-
response.writeHead((
|
3611
|
+
var _a10;
|
3612
|
+
response.writeHead((_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200, {
|
3613
3613
|
"Content-Type": "text/plain; charset=utf-8",
|
3614
3614
|
...init == null ? void 0 : init.headers
|
3615
3615
|
});
|
@@ -3634,7 +3634,7 @@ var DefaultStreamTextResult = class {
|
|
3634
3634
|
return this.toDataStreamResponse(options);
|
3635
3635
|
}
|
3636
3636
|
toDataStreamResponse(options) {
|
3637
|
-
var
|
3637
|
+
var _a10;
|
3638
3638
|
const init = options == null ? void 0 : "init" in options ? options.init : {
|
3639
3639
|
headers: "headers" in options ? options.headers : void 0,
|
3640
3640
|
status: "status" in options ? options.status : void 0,
|
@@ -3644,7 +3644,7 @@ var DefaultStreamTextResult = class {
|
|
3644
3644
|
const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
3645
3645
|
const stream = data ? mergeStreams(data.stream, this.toDataStream({ getErrorMessage: getErrorMessage4 })) : this.toDataStream({ getErrorMessage: getErrorMessage4 });
|
3646
3646
|
return new Response(stream, {
|
3647
|
-
status: (
|
3647
|
+
status: (_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200,
|
3648
3648
|
statusText: init == null ? void 0 : init.statusText,
|
3649
3649
|
headers: prepareResponseHeaders(init, {
|
3650
3650
|
contentType: "text/plain; charset=utf-8",
|
@@ -3653,9 +3653,9 @@ var DefaultStreamTextResult = class {
|
|
3653
3653
|
});
|
3654
3654
|
}
|
3655
3655
|
toTextStreamResponse(init) {
|
3656
|
-
var
|
3656
|
+
var _a10;
|
3657
3657
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3658
|
-
status: (
|
3658
|
+
status: (_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200,
|
3659
3659
|
headers: prepareResponseHeaders(init, {
|
3660
3660
|
contentType: "text/plain; charset=utf-8"
|
3661
3661
|
})
|
@@ -3666,7 +3666,7 @@ var experimental_streamText = streamText;
|
|
3666
3666
|
|
3667
3667
|
// core/prompt/attachments-to-parts.ts
|
3668
3668
|
function attachmentsToParts(attachments) {
|
3669
|
-
var
|
3669
|
+
var _a10, _b, _c;
|
3670
3670
|
const parts = [];
|
3671
3671
|
for (const attachment of attachments) {
|
3672
3672
|
let url;
|
@@ -3678,7 +3678,7 @@ function attachmentsToParts(attachments) {
|
|
3678
3678
|
switch (url.protocol) {
|
3679
3679
|
case "http:":
|
3680
3680
|
case "https:": {
|
3681
|
-
if ((
|
3681
|
+
if ((_a10 = attachment.contentType) == null ? void 0 : _a10.startsWith("image/")) {
|
3682
3682
|
parts.push({ type: "image", image: url });
|
3683
3683
|
}
|
3684
3684
|
break;
|
@@ -3786,110 +3786,62 @@ function convertToCoreMessages(messages) {
|
|
3786
3786
|
return coreMessages;
|
3787
3787
|
}
|
3788
3788
|
|
3789
|
-
// core/registry/
|
3790
|
-
import {
|
3791
|
-
|
3789
|
+
// core/registry/custom-provider.ts
|
3790
|
+
import { NoSuchModelError } from "@ai-sdk/provider";
|
3791
|
+
function experimental_customProvider({
|
3792
|
+
languageModels,
|
3793
|
+
textEmbeddingModels,
|
3794
|
+
fallbackProvider
|
3795
|
+
}) {
|
3796
|
+
return {
|
3797
|
+
languageModel(modelId) {
|
3798
|
+
if (languageModels != null && modelId in languageModels) {
|
3799
|
+
return languageModels[modelId];
|
3800
|
+
}
|
3801
|
+
if (fallbackProvider) {
|
3802
|
+
return fallbackProvider.languageModel(modelId);
|
3803
|
+
}
|
3804
|
+
throw new NoSuchModelError({ modelId, modelType: "languageModel" });
|
3805
|
+
},
|
3806
|
+
textEmbeddingModel(modelId) {
|
3807
|
+
if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
|
3808
|
+
return textEmbeddingModels[modelId];
|
3809
|
+
}
|
3810
|
+
if (fallbackProvider) {
|
3811
|
+
return fallbackProvider.textEmbeddingModel(modelId);
|
3812
|
+
}
|
3813
|
+
throw new NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
|
3814
|
+
}
|
3815
|
+
};
|
3816
|
+
}
|
3817
|
+
|
3818
|
+
// core/registry/no-such-provider-error.ts
|
3819
|
+
import { AISDKError as AISDKError9, NoSuchModelError as NoSuchModelError2 } from "@ai-sdk/provider";
|
3820
|
+
var name9 = "AI_NoSuchProviderError";
|
3792
3821
|
var marker9 = `vercel.ai.error.${name9}`;
|
3793
3822
|
var symbol9 = Symbol.for(marker9);
|
3794
3823
|
var _a9;
|
3795
|
-
var
|
3796
|
-
constructor({
|
3797
|
-
id,
|
3798
|
-
message = `Invalid model id: ${id}`
|
3799
|
-
}) {
|
3800
|
-
super({ name: name9, message });
|
3801
|
-
this[_a9] = true;
|
3802
|
-
this.id = id;
|
3803
|
-
}
|
3804
|
-
static isInstance(error) {
|
3805
|
-
return AISDKError9.hasMarker(error, marker9);
|
3806
|
-
}
|
3807
|
-
/**
|
3808
|
-
* @deprecated use `isInstance` instead
|
3809
|
-
*/
|
3810
|
-
static isInvalidModelIdError(error) {
|
3811
|
-
return error instanceof Error && error.name === name9 && typeof error.id === "string";
|
3812
|
-
}
|
3813
|
-
/**
|
3814
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
3815
|
-
*/
|
3816
|
-
toJSON() {
|
3817
|
-
return {
|
3818
|
-
name: this.name,
|
3819
|
-
message: this.message,
|
3820
|
-
stack: this.stack,
|
3821
|
-
id: this.id
|
3822
|
-
};
|
3823
|
-
}
|
3824
|
-
};
|
3825
|
-
_a9 = symbol9;
|
3826
|
-
|
3827
|
-
// core/registry/no-such-model-error.ts
|
3828
|
-
import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
|
3829
|
-
var name10 = "AI_NoSuchModelError";
|
3830
|
-
var marker10 = `vercel.ai.error.${name10}`;
|
3831
|
-
var symbol10 = Symbol.for(marker10);
|
3832
|
-
var _a10;
|
3833
|
-
var NoSuchModelError = class extends AISDKError10 {
|
3824
|
+
var NoSuchProviderError = class extends NoSuchModelError2 {
|
3834
3825
|
constructor({
|
3835
3826
|
modelId,
|
3836
3827
|
modelType,
|
3837
|
-
message = `No such ${modelType}: ${modelId}`
|
3838
|
-
}) {
|
3839
|
-
super({ name: name10, message });
|
3840
|
-
this[_a10] = true;
|
3841
|
-
this.modelId = modelId;
|
3842
|
-
this.modelType = modelType;
|
3843
|
-
}
|
3844
|
-
static isInstance(error) {
|
3845
|
-
return AISDKError10.hasMarker(error, marker10);
|
3846
|
-
}
|
3847
|
-
/**
|
3848
|
-
* @deprecated use `isInstance` instead
|
3849
|
-
*/
|
3850
|
-
static isNoSuchModelError(error) {
|
3851
|
-
return error instanceof Error && error.name === name10 && typeof error.modelId === "string" && typeof error.modelType === "string";
|
3852
|
-
}
|
3853
|
-
/**
|
3854
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
3855
|
-
*/
|
3856
|
-
toJSON() {
|
3857
|
-
return {
|
3858
|
-
name: this.name,
|
3859
|
-
message: this.message,
|
3860
|
-
stack: this.stack,
|
3861
|
-
modelId: this.modelId,
|
3862
|
-
modelType: this.modelType
|
3863
|
-
};
|
3864
|
-
}
|
3865
|
-
};
|
3866
|
-
_a10 = symbol10;
|
3867
|
-
|
3868
|
-
// core/registry/no-such-provider-error.ts
|
3869
|
-
import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
|
3870
|
-
var name11 = "AI_NoSuchProviderError";
|
3871
|
-
var marker11 = `vercel.ai.error.${name11}`;
|
3872
|
-
var symbol11 = Symbol.for(marker11);
|
3873
|
-
var _a11;
|
3874
|
-
var NoSuchProviderError = class extends AISDKError11 {
|
3875
|
-
constructor({
|
3876
3828
|
providerId,
|
3877
3829
|
availableProviders,
|
3878
3830
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
3879
3831
|
}) {
|
3880
|
-
super({
|
3881
|
-
this[
|
3832
|
+
super({ errorName: name9, modelId, modelType, message });
|
3833
|
+
this[_a9] = true;
|
3882
3834
|
this.providerId = providerId;
|
3883
3835
|
this.availableProviders = availableProviders;
|
3884
3836
|
}
|
3885
3837
|
static isInstance(error) {
|
3886
|
-
return
|
3838
|
+
return AISDKError9.hasMarker(error, marker9);
|
3887
3839
|
}
|
3888
3840
|
/**
|
3889
3841
|
* @deprecated use `isInstance` instead
|
3890
3842
|
*/
|
3891
3843
|
static isNoSuchProviderError(error) {
|
3892
|
-
return error instanceof Error && error.name ===
|
3844
|
+
return error instanceof Error && error.name === name9 && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
|
3893
3845
|
}
|
3894
3846
|
/**
|
3895
3847
|
* @deprecated Do not use this method. It will be removed in the next major version.
|
@@ -3899,14 +3851,17 @@ var NoSuchProviderError = class extends AISDKError11 {
|
|
3899
3851
|
name: this.name,
|
3900
3852
|
message: this.message,
|
3901
3853
|
stack: this.stack,
|
3854
|
+
modelId: this.modelId,
|
3855
|
+
modelType: this.modelType,
|
3902
3856
|
providerId: this.providerId,
|
3903
3857
|
availableProviders: this.availableProviders
|
3904
3858
|
};
|
3905
3859
|
}
|
3906
3860
|
};
|
3907
|
-
|
3861
|
+
_a9 = symbol9;
|
3908
3862
|
|
3909
3863
|
// core/registry/provider-registry.ts
|
3864
|
+
import { NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
|
3910
3865
|
function experimental_createProviderRegistry(providers) {
|
3911
3866
|
const registry = new DefaultProviderRegistry();
|
3912
3867
|
for (const [id, provider] of Object.entries(providers)) {
|
@@ -3929,35 +3884,41 @@ var DefaultProviderRegistry = class {
|
|
3929
3884
|
const provider = this.providers[id];
|
3930
3885
|
if (provider == null) {
|
3931
3886
|
throw new NoSuchProviderError({
|
3887
|
+
modelId: id,
|
3888
|
+
modelType: "languageModel",
|
3932
3889
|
providerId: id,
|
3933
3890
|
availableProviders: Object.keys(this.providers)
|
3934
3891
|
});
|
3935
3892
|
}
|
3936
3893
|
return provider;
|
3937
3894
|
}
|
3938
|
-
splitId(id) {
|
3895
|
+
splitId(id, modelType) {
|
3939
3896
|
const index = id.indexOf(":");
|
3940
3897
|
if (index === -1) {
|
3941
|
-
throw new
|
3898
|
+
throw new NoSuchModelError3({
|
3899
|
+
modelId: id,
|
3900
|
+
modelType,
|
3901
|
+
message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId:modelId")`
|
3902
|
+
});
|
3942
3903
|
}
|
3943
3904
|
return [id.slice(0, index), id.slice(index + 1)];
|
3944
3905
|
}
|
3945
3906
|
languageModel(id) {
|
3946
|
-
var
|
3947
|
-
const [providerId, modelId] = this.splitId(id);
|
3948
|
-
const model = (_b = (
|
3907
|
+
var _a10, _b;
|
3908
|
+
const [providerId, modelId] = this.splitId(id, "languageModel");
|
3909
|
+
const model = (_b = (_a10 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a10, modelId);
|
3949
3910
|
if (model == null) {
|
3950
|
-
throw new
|
3911
|
+
throw new NoSuchModelError3({ modelId: id, modelType: "languageModel" });
|
3951
3912
|
}
|
3952
3913
|
return model;
|
3953
3914
|
}
|
3954
3915
|
textEmbeddingModel(id) {
|
3955
|
-
var
|
3956
|
-
const [providerId, modelId] = this.splitId(id);
|
3916
|
+
var _a10, _b, _c;
|
3917
|
+
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
3957
3918
|
const provider = this.getProvider(providerId);
|
3958
|
-
const model = (_c = (
|
3919
|
+
const model = (_c = (_a10 = provider.textEmbeddingModel) == null ? void 0 : _a10.call(provider, modelId)) != null ? _c : "textEmbedding" in provider ? (_b = provider.textEmbedding) == null ? void 0 : _b.call(provider, modelId) : void 0;
|
3959
3920
|
if (model == null) {
|
3960
|
-
throw new
|
3921
|
+
throw new NoSuchModelError3({
|
3961
3922
|
modelId: id,
|
3962
3923
|
modelType: "textEmbeddingModel"
|
3963
3924
|
});
|
@@ -3998,13 +3959,15 @@ function magnitude(vector) {
|
|
3998
3959
|
|
3999
3960
|
// errors/index.ts
|
4000
3961
|
import {
|
4001
|
-
AISDKError as
|
3962
|
+
AISDKError as AISDKError10,
|
4002
3963
|
APICallError as APICallError2,
|
4003
3964
|
EmptyResponseBodyError,
|
4004
3965
|
InvalidPromptError as InvalidPromptError2,
|
4005
3966
|
InvalidResponseDataError,
|
4006
3967
|
JSONParseError,
|
4007
3968
|
LoadAPIKeyError,
|
3969
|
+
NoContentGeneratedError,
|
3970
|
+
NoSuchModelError as NoSuchModelError4,
|
4008
3971
|
TypeValidationError as TypeValidationError2,
|
4009
3972
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
4010
3973
|
} from "@ai-sdk/provider";
|
@@ -4127,8 +4090,8 @@ function readableFromAsyncIterable(iterable) {
|
|
4127
4090
|
controller.enqueue(value);
|
4128
4091
|
},
|
4129
4092
|
async cancel(reason) {
|
4130
|
-
var
|
4131
|
-
await ((
|
4093
|
+
var _a10;
|
4094
|
+
await ((_a10 = it.return) == null ? void 0 : _a10.call(it, reason));
|
4132
4095
|
}
|
4133
4096
|
});
|
4134
4097
|
}
|
@@ -4267,7 +4230,7 @@ import {
|
|
4267
4230
|
function AssistantResponse({ threadId, messageId }, process2) {
|
4268
4231
|
const stream = new ReadableStream({
|
4269
4232
|
async start(controller) {
|
4270
|
-
var
|
4233
|
+
var _a10;
|
4271
4234
|
const textEncoder = new TextEncoder();
|
4272
4235
|
const sendMessage = (message) => {
|
4273
4236
|
controller.enqueue(
|
@@ -4285,7 +4248,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4285
4248
|
);
|
4286
4249
|
};
|
4287
4250
|
const forwardStream = async (stream2) => {
|
4288
|
-
var
|
4251
|
+
var _a11, _b;
|
4289
4252
|
let result = void 0;
|
4290
4253
|
for await (const value of stream2) {
|
4291
4254
|
switch (value.event) {
|
@@ -4302,7 +4265,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4302
4265
|
break;
|
4303
4266
|
}
|
4304
4267
|
case "thread.message.delta": {
|
4305
|
-
const content = (
|
4268
|
+
const content = (_a11 = value.data.delta.content) == null ? void 0 : _a11[0];
|
4306
4269
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
4307
4270
|
controller.enqueue(
|
4308
4271
|
textEncoder.encode(
|
@@ -4338,7 +4301,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4338
4301
|
forwardStream
|
4339
4302
|
});
|
4340
4303
|
} catch (error) {
|
4341
|
-
sendError((
|
4304
|
+
sendError((_a10 = error.message) != null ? _a10 : `${error}`);
|
4342
4305
|
} finally {
|
4343
4306
|
controller.close();
|
4344
4307
|
}
|
@@ -4359,9 +4322,9 @@ var experimental_AssistantResponse = AssistantResponse;
|
|
4359
4322
|
|
4360
4323
|
// streams/aws-bedrock-stream.ts
|
4361
4324
|
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
4362
|
-
var
|
4325
|
+
var _a10, _b;
|
4363
4326
|
const decoder = new TextDecoder();
|
4364
|
-
for await (const chunk of (
|
4327
|
+
for await (const chunk of (_a10 = response.body) != null ? _a10 : []) {
|
4365
4328
|
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
4366
4329
|
if (bytes != null) {
|
4367
4330
|
const chunkText = decoder.decode(bytes);
|
@@ -4375,8 +4338,8 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
|
4375
4338
|
}
|
4376
4339
|
function AWSBedrockAnthropicMessagesStream(response, callbacks) {
|
4377
4340
|
return AWSBedrockStream(response, callbacks, (chunk) => {
|
4378
|
-
var
|
4379
|
-
return (
|
4341
|
+
var _a10;
|
4342
|
+
return (_a10 = chunk.delta) == null ? void 0 : _a10.text;
|
4380
4343
|
});
|
4381
4344
|
}
|
4382
4345
|
function AWSBedrockAnthropicStream(response, callbacks) {
|
@@ -4423,8 +4386,8 @@ async function readAndProcessLines(reader, controller) {
|
|
4423
4386
|
controller.close();
|
4424
4387
|
}
|
4425
4388
|
function createParser2(res) {
|
4426
|
-
var
|
4427
|
-
const reader = (
|
4389
|
+
var _a10;
|
4390
|
+
const reader = (_a10 = res.body) == null ? void 0 : _a10.getReader();
|
4428
4391
|
return new ReadableStream({
|
4429
4392
|
async start(controller) {
|
4430
4393
|
if (!reader) {
|
@@ -4454,9 +4417,9 @@ function CohereStream(reader, callbacks) {
|
|
4454
4417
|
|
4455
4418
|
// streams/google-generative-ai-stream.ts
|
4456
4419
|
async function* streamable3(response) {
|
4457
|
-
var
|
4420
|
+
var _a10, _b, _c;
|
4458
4421
|
for await (const chunk of response.stream) {
|
4459
|
-
const parts = (_c = (_b = (
|
4422
|
+
const parts = (_c = (_b = (_a10 = chunk.candidates) == null ? void 0 : _a10[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
4460
4423
|
if (parts === void 0) {
|
4461
4424
|
continue;
|
4462
4425
|
}
|
@@ -4475,13 +4438,13 @@ function createParser3(res) {
|
|
4475
4438
|
const trimStartOfStream = trimStartOfStreamHelper();
|
4476
4439
|
return new ReadableStream({
|
4477
4440
|
async pull(controller) {
|
4478
|
-
var
|
4441
|
+
var _a10, _b;
|
4479
4442
|
const { value, done } = await res.next();
|
4480
4443
|
if (done) {
|
4481
4444
|
controller.close();
|
4482
4445
|
return;
|
4483
4446
|
}
|
4484
|
-
const text = trimStartOfStream((_b = (
|
4447
|
+
const text = trimStartOfStream((_b = (_a10 = value.token) == null ? void 0 : _a10.text) != null ? _b : "");
|
4485
4448
|
if (!text)
|
4486
4449
|
return;
|
4487
4450
|
if (value.generated_text != null && value.generated_text.length > 0) {
|
@@ -4506,11 +4469,11 @@ function InkeepStream(res, callbacks) {
|
|
4506
4469
|
let chat_session_id = "";
|
4507
4470
|
let records_cited;
|
4508
4471
|
const inkeepEventParser = (data, options) => {
|
4509
|
-
var
|
4472
|
+
var _a10, _b;
|
4510
4473
|
const { event } = options;
|
4511
4474
|
if (event === "records_cited") {
|
4512
4475
|
records_cited = JSON.parse(data);
|
4513
|
-
(
|
4476
|
+
(_a10 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a10.call(callbacks, records_cited);
|
4514
4477
|
}
|
4515
4478
|
if (event === "message_chunk") {
|
4516
4479
|
const inkeepMessageChunk = JSON.parse(data);
|
@@ -4523,12 +4486,12 @@ function InkeepStream(res, callbacks) {
|
|
4523
4486
|
passThroughCallbacks = {
|
4524
4487
|
...passThroughCallbacks,
|
4525
4488
|
onFinal: (completion) => {
|
4526
|
-
var
|
4489
|
+
var _a10;
|
4527
4490
|
const inkeepOnFinalMetadata = {
|
4528
4491
|
chat_session_id,
|
4529
4492
|
records_cited
|
4530
4493
|
};
|
4531
|
-
(
|
4494
|
+
(_a10 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a10.call(callbacks, completion, inkeepOnFinalMetadata);
|
4532
4495
|
}
|
4533
4496
|
};
|
4534
4497
|
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
@@ -4550,7 +4513,7 @@ function toDataStream(stream, callbacks) {
|
|
4550
4513
|
return stream.pipeThrough(
|
4551
4514
|
new TransformStream({
|
4552
4515
|
transform: async (value, controller) => {
|
4553
|
-
var
|
4516
|
+
var _a10;
|
4554
4517
|
if (typeof value === "string") {
|
4555
4518
|
controller.enqueue(value);
|
4556
4519
|
return;
|
@@ -4558,7 +4521,7 @@ function toDataStream(stream, callbacks) {
|
|
4558
4521
|
if ("event" in value) {
|
4559
4522
|
if (value.event === "on_chat_model_stream") {
|
4560
4523
|
forwardAIMessageChunk(
|
4561
|
-
(
|
4524
|
+
(_a10 = value.data) == null ? void 0 : _a10.chunk,
|
4562
4525
|
controller
|
4563
4526
|
);
|
4564
4527
|
}
|
@@ -4570,13 +4533,13 @@ function toDataStream(stream, callbacks) {
|
|
4570
4533
|
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
4571
4534
|
}
|
4572
4535
|
function toDataStreamResponse(stream, options) {
|
4573
|
-
var
|
4536
|
+
var _a10;
|
4574
4537
|
const dataStream = toDataStream(stream, options == null ? void 0 : options.callbacks);
|
4575
4538
|
const data = options == null ? void 0 : options.data;
|
4576
4539
|
const init = options == null ? void 0 : options.init;
|
4577
4540
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
4578
4541
|
return new Response(responseStream, {
|
4579
|
-
status: (
|
4542
|
+
status: (_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200,
|
4580
4543
|
statusText: init == null ? void 0 : init.statusText,
|
4581
4544
|
headers: prepareResponseHeaders(init, {
|
4582
4545
|
contentType: "text/plain; charset=utf-8",
|
@@ -4658,9 +4621,9 @@ function LangChainStream(callbacks) {
|
|
4658
4621
|
|
4659
4622
|
// streams/mistral-stream.ts
|
4660
4623
|
async function* streamable4(stream) {
|
4661
|
-
var
|
4624
|
+
var _a10, _b;
|
4662
4625
|
for await (const chunk of stream) {
|
4663
|
-
const content = (_b = (
|
4626
|
+
const content = (_b = (_a10 = chunk.choices[0]) == null ? void 0 : _a10.delta) == null ? void 0 : _b.content;
|
4664
4627
|
if (content === void 0 || content === "") {
|
4665
4628
|
continue;
|
4666
4629
|
}
|
@@ -4693,10 +4656,10 @@ async function* streamable5(stream) {
|
|
4693
4656
|
model: chunk.model,
|
4694
4657
|
// not exposed by Azure API
|
4695
4658
|
choices: chunk.choices.map((choice) => {
|
4696
|
-
var
|
4659
|
+
var _a10, _b, _c, _d, _e, _f, _g;
|
4697
4660
|
return {
|
4698
4661
|
delta: {
|
4699
|
-
content: (
|
4662
|
+
content: (_a10 = choice.delta) == null ? void 0 : _a10.content,
|
4700
4663
|
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
4701
4664
|
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
4702
4665
|
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
@@ -4721,9 +4684,9 @@ function chunkToText() {
|
|
4721
4684
|
const trimStartOfStream = trimStartOfStreamHelper();
|
4722
4685
|
let isFunctionStreamingIn;
|
4723
4686
|
return (json) => {
|
4724
|
-
var
|
4687
|
+
var _a10, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
4725
4688
|
if (isChatCompletionChunk(json)) {
|
4726
|
-
const delta = (
|
4689
|
+
const delta = (_a10 = json.choices[0]) == null ? void 0 : _a10.delta;
|
4727
4690
|
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
4728
4691
|
isFunctionStreamingIn = true;
|
4729
4692
|
return {
|
@@ -4996,8 +4959,8 @@ function createFunctionCallTransformer(callbacks) {
|
|
4996
4959
|
|
4997
4960
|
// streams/replicate-stream.ts
|
4998
4961
|
async function ReplicateStream(res, cb, options) {
|
4999
|
-
var
|
5000
|
-
const url = (
|
4962
|
+
var _a10;
|
4963
|
+
const url = (_a10 = res.urls) == null ? void 0 : _a10.stream;
|
5001
4964
|
if (!url) {
|
5002
4965
|
if (res.error)
|
5003
4966
|
throw new Error(res.error);
|
@@ -5018,8 +4981,8 @@ async function ReplicateStream(res, cb, options) {
|
|
5018
4981
|
|
5019
4982
|
// streams/stream-to-response.ts
|
5020
4983
|
function streamToResponse(res, response, init, data) {
|
5021
|
-
var
|
5022
|
-
response.writeHead((
|
4984
|
+
var _a10;
|
4985
|
+
response.writeHead((_a10 = init == null ? void 0 : init.status) != null ? _a10 : 200, {
|
5023
4986
|
"Content-Type": "text/plain; charset=utf-8",
|
5024
4987
|
...init == null ? void 0 : init.headers
|
5025
4988
|
});
|
@@ -5062,7 +5025,7 @@ var StreamingTextResponse = class extends Response {
|
|
5062
5025
|
var generateId2 = generateIdImpl;
|
5063
5026
|
var nanoid = generateIdImpl;
|
5064
5027
|
export {
|
5065
|
-
|
5028
|
+
AISDKError10 as AISDKError,
|
5066
5029
|
AIStream,
|
5067
5030
|
APICallError2 as APICallError,
|
5068
5031
|
AWSBedrockAnthropicMessagesStream,
|
@@ -5081,7 +5044,6 @@ export {
|
|
5081
5044
|
InvalidArgumentError,
|
5082
5045
|
InvalidDataContentError,
|
5083
5046
|
InvalidMessageRoleError,
|
5084
|
-
InvalidModelIdError,
|
5085
5047
|
InvalidPromptError2 as InvalidPromptError,
|
5086
5048
|
InvalidResponseDataError,
|
5087
5049
|
InvalidToolArgumentsError,
|
@@ -5090,8 +5052,9 @@ export {
|
|
5090
5052
|
LangChainStream,
|
5091
5053
|
LoadAPIKeyError,
|
5092
5054
|
MistralStream,
|
5055
|
+
NoContentGeneratedError,
|
5093
5056
|
NoObjectGeneratedError,
|
5094
|
-
NoSuchModelError,
|
5057
|
+
NoSuchModelError4 as NoSuchModelError,
|
5095
5058
|
NoSuchProviderError,
|
5096
5059
|
NoSuchToolError,
|
5097
5060
|
OpenAIStream,
|
@@ -5112,6 +5075,7 @@ export {
|
|
5112
5075
|
experimental_StreamData,
|
5113
5076
|
experimental_createModelRegistry,
|
5114
5077
|
experimental_createProviderRegistry,
|
5078
|
+
experimental_customProvider,
|
5115
5079
|
experimental_generateObject,
|
5116
5080
|
experimental_generateText,
|
5117
5081
|
experimental_streamObject,
|