ai 3.3.18 → 3.3.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +49 -59
- package/dist/index.d.ts +49 -59
- package/dist/index.js +172 -138
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +153 -120
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
var __defProp = Object.defineProperty;
|
2
2
|
var __export = (target, all) => {
|
3
|
-
for (var
|
4
|
-
__defProp(target,
|
3
|
+
for (var name11 in all)
|
4
|
+
__defProp(target, name11, { get: all[name11], enumerable: true });
|
5
5
|
};
|
6
6
|
|
7
7
|
// streams/index.ts
|
@@ -142,7 +142,7 @@ function getBaseTelemetryAttributes({
|
|
142
142
|
telemetry,
|
143
143
|
headers
|
144
144
|
}) {
|
145
|
-
var
|
145
|
+
var _a11;
|
146
146
|
return {
|
147
147
|
"ai.model.provider": model.provider,
|
148
148
|
"ai.model.id": model.modelId,
|
@@ -152,7 +152,7 @@ function getBaseTelemetryAttributes({
|
|
152
152
|
return attributes;
|
153
153
|
}, {}),
|
154
154
|
// add metadata as attributes:
|
155
|
-
...Object.entries((
|
155
|
+
...Object.entries((_a11 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a11 : {}).reduce(
|
156
156
|
(attributes, [key, value]) => {
|
157
157
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
158
158
|
return attributes;
|
@@ -177,7 +177,7 @@ var noopTracer = {
|
|
177
177
|
startSpan() {
|
178
178
|
return noopSpan;
|
179
179
|
},
|
180
|
-
startActiveSpan(
|
180
|
+
startActiveSpan(name11, arg1, arg2, arg3) {
|
181
181
|
if (typeof arg1 === "function") {
|
182
182
|
return arg1(noopSpan);
|
183
183
|
}
|
@@ -245,13 +245,13 @@ function getTracer({ isEnabled }) {
|
|
245
245
|
// core/telemetry/record-span.ts
|
246
246
|
import { SpanStatusCode } from "@opentelemetry/api";
|
247
247
|
function recordSpan({
|
248
|
-
name:
|
248
|
+
name: name11,
|
249
249
|
tracer,
|
250
250
|
attributes,
|
251
251
|
fn,
|
252
252
|
endWhenDone = true
|
253
253
|
}) {
|
254
|
-
return tracer.startActiveSpan(
|
254
|
+
return tracer.startActiveSpan(name11, { attributes }, async (span) => {
|
255
255
|
try {
|
256
256
|
const result = await fn(span);
|
257
257
|
if (endWhenDone) {
|
@@ -317,14 +317,14 @@ async function embed({
|
|
317
317
|
headers,
|
318
318
|
experimental_telemetry: telemetry
|
319
319
|
}) {
|
320
|
-
var
|
320
|
+
var _a11;
|
321
321
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
322
322
|
model,
|
323
323
|
telemetry,
|
324
324
|
headers,
|
325
325
|
settings: { maxRetries }
|
326
326
|
});
|
327
|
-
const tracer = getTracer({ isEnabled: (
|
327
|
+
const tracer = getTracer({ isEnabled: (_a11 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a11 : false });
|
328
328
|
return recordSpan({
|
329
329
|
name: "ai.embed",
|
330
330
|
attributes: selectTelemetryAttributes({
|
@@ -357,14 +357,14 @@ async function embed({
|
|
357
357
|
}),
|
358
358
|
tracer,
|
359
359
|
fn: async (doEmbedSpan) => {
|
360
|
-
var
|
360
|
+
var _a12;
|
361
361
|
const modelResponse = await model.doEmbed({
|
362
362
|
values: [value],
|
363
363
|
abortSignal,
|
364
364
|
headers
|
365
365
|
});
|
366
366
|
const embedding2 = modelResponse.embeddings[0];
|
367
|
-
const usage2 = (
|
367
|
+
const usage2 = (_a12 = modelResponse.usage) != null ? _a12 : { tokens: NaN };
|
368
368
|
doEmbedSpan.setAttributes(
|
369
369
|
selectTelemetryAttributes({
|
370
370
|
telemetry,
|
@@ -430,14 +430,14 @@ async function embedMany({
|
|
430
430
|
headers,
|
431
431
|
experimental_telemetry: telemetry
|
432
432
|
}) {
|
433
|
-
var
|
433
|
+
var _a11;
|
434
434
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
435
435
|
model,
|
436
436
|
telemetry,
|
437
437
|
headers,
|
438
438
|
settings: { maxRetries }
|
439
439
|
});
|
440
|
-
const tracer = getTracer({ isEnabled: (
|
440
|
+
const tracer = getTracer({ isEnabled: (_a11 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a11 : false });
|
441
441
|
return recordSpan({
|
442
442
|
name: "ai.embedMany",
|
443
443
|
attributes: selectTelemetryAttributes({
|
@@ -475,14 +475,14 @@ async function embedMany({
|
|
475
475
|
}),
|
476
476
|
tracer,
|
477
477
|
fn: async (doEmbedSpan) => {
|
478
|
-
var
|
478
|
+
var _a12;
|
479
479
|
const modelResponse = await model.doEmbed({
|
480
480
|
values,
|
481
481
|
abortSignal,
|
482
482
|
headers
|
483
483
|
});
|
484
484
|
const embeddings3 = modelResponse.embeddings;
|
485
|
-
const usage2 = (
|
485
|
+
const usage2 = (_a12 = modelResponse.usage) != null ? _a12 : { tokens: NaN };
|
486
486
|
doEmbedSpan.setAttributes(
|
487
487
|
selectTelemetryAttributes({
|
488
488
|
telemetry,
|
@@ -534,14 +534,14 @@ async function embedMany({
|
|
534
534
|
}),
|
535
535
|
tracer,
|
536
536
|
fn: async (doEmbedSpan) => {
|
537
|
-
var
|
537
|
+
var _a12;
|
538
538
|
const modelResponse = await model.doEmbed({
|
539
539
|
values: chunk,
|
540
540
|
abortSignal,
|
541
541
|
headers
|
542
542
|
});
|
543
543
|
const embeddings2 = modelResponse.embeddings;
|
544
|
-
const usage2 = (
|
544
|
+
const usage2 = (_a12 = modelResponse.usage) != null ? _a12 : { tokens: NaN };
|
545
545
|
doEmbedSpan.setAttributes(
|
546
546
|
selectTelemetryAttributes({
|
547
547
|
telemetry,
|
@@ -643,7 +643,7 @@ async function download({
|
|
643
643
|
url,
|
644
644
|
fetchImplementation = fetch
|
645
645
|
}) {
|
646
|
-
var
|
646
|
+
var _a11;
|
647
647
|
const urlText = url.toString();
|
648
648
|
try {
|
649
649
|
const response = await fetchImplementation(urlText);
|
@@ -656,7 +656,7 @@ async function download({
|
|
656
656
|
}
|
657
657
|
return {
|
658
658
|
data: new Uint8Array(await response.arrayBuffer()),
|
659
|
-
mimeType: (
|
659
|
+
mimeType: (_a11 = response.headers.get("content-type")) != null ? _a11 : void 0
|
660
660
|
};
|
661
661
|
} catch (error) {
|
662
662
|
if (DownloadError.isInstance(error)) {
|
@@ -737,8 +737,8 @@ var dataContentSchema = z.union([
|
|
737
737
|
z.custom(
|
738
738
|
// Buffer might not be available in some environments such as CloudFlare:
|
739
739
|
(value) => {
|
740
|
-
var
|
741
|
-
return (_b = (
|
740
|
+
var _a11, _b;
|
741
|
+
return (_b = (_a11 = globalThis.Buffer) == null ? void 0 : _a11.isBuffer(value)) != null ? _b : false;
|
742
742
|
},
|
743
743
|
{ message: "Must be a Buffer" }
|
744
744
|
)
|
@@ -866,7 +866,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
866
866
|
role: "user",
|
867
867
|
content: message.content.map(
|
868
868
|
(part) => {
|
869
|
-
var
|
869
|
+
var _a11, _b, _c;
|
870
870
|
switch (part.type) {
|
871
871
|
case "text": {
|
872
872
|
return {
|
@@ -889,7 +889,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
889
889
|
return {
|
890
890
|
type: "image",
|
891
891
|
image: downloadedImage.data,
|
892
|
-
mimeType: (
|
892
|
+
mimeType: (_a11 = part.mimeType) != null ? _a11 : downloadedImage.mimeType,
|
893
893
|
providerMetadata: part.experimental_providerMetadata
|
894
894
|
};
|
895
895
|
}
|
@@ -1321,8 +1321,8 @@ function prepareResponseHeaders(init, {
|
|
1321
1321
|
contentType,
|
1322
1322
|
dataStreamVersion
|
1323
1323
|
}) {
|
1324
|
-
var
|
1325
|
-
const headers = new Headers((
|
1324
|
+
var _a11;
|
1325
|
+
const headers = new Headers((_a11 = init == null ? void 0 : init.headers) != null ? _a11 : {});
|
1326
1326
|
if (!headers.has("Content-Type")) {
|
1327
1327
|
headers.set("Content-Type", contentType);
|
1328
1328
|
}
|
@@ -1636,7 +1636,7 @@ async function generateObject({
|
|
1636
1636
|
experimental_telemetry: telemetry,
|
1637
1637
|
...settings
|
1638
1638
|
}) {
|
1639
|
-
var
|
1639
|
+
var _a11;
|
1640
1640
|
validateObjectGenerationInput({
|
1641
1641
|
output,
|
1642
1642
|
mode,
|
@@ -1654,7 +1654,7 @@ async function generateObject({
|
|
1654
1654
|
headers,
|
1655
1655
|
settings: { ...settings, maxRetries }
|
1656
1656
|
});
|
1657
|
-
const tracer = getTracer({ isEnabled: (
|
1657
|
+
const tracer = getTracer({ isEnabled: (_a11 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a11 : false });
|
1658
1658
|
return recordSpan({
|
1659
1659
|
name: "ai.generateObject",
|
1660
1660
|
attributes: selectTelemetryAttributes({
|
@@ -1815,7 +1815,7 @@ async function generateObject({
|
|
1815
1815
|
}),
|
1816
1816
|
tracer,
|
1817
1817
|
fn: async (span2) => {
|
1818
|
-
var
|
1818
|
+
var _a12, _b;
|
1819
1819
|
const result2 = await model.doGenerate({
|
1820
1820
|
mode: {
|
1821
1821
|
type: "object-tool",
|
@@ -1832,7 +1832,7 @@ async function generateObject({
|
|
1832
1832
|
abortSignal,
|
1833
1833
|
headers
|
1834
1834
|
});
|
1835
|
-
const objectText = (_b = (
|
1835
|
+
const objectText = (_b = (_a12 = result2.toolCalls) == null ? void 0 : _a12[0]) == null ? void 0 : _b.args;
|
1836
1836
|
if (objectText === void 0) {
|
1837
1837
|
throw new NoObjectGeneratedError();
|
1838
1838
|
}
|
@@ -1920,9 +1920,9 @@ var DefaultGenerateObjectResult = class {
|
|
1920
1920
|
this.experimental_providerMetadata = options.providerMetadata;
|
1921
1921
|
}
|
1922
1922
|
toJsonResponse(init) {
|
1923
|
-
var
|
1923
|
+
var _a11;
|
1924
1924
|
return new Response(JSON.stringify(this.object), {
|
1925
|
-
status: (
|
1925
|
+
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
1926
1926
|
headers: prepareResponseHeaders(init, {
|
1927
1927
|
contentType: "application/json; charset=utf-8"
|
1928
1928
|
})
|
@@ -1975,17 +1975,17 @@ var DelayedPromise = class {
|
|
1975
1975
|
return this.promise;
|
1976
1976
|
}
|
1977
1977
|
resolve(value) {
|
1978
|
-
var
|
1978
|
+
var _a11;
|
1979
1979
|
this.status = { type: "resolved", value };
|
1980
1980
|
if (this.promise) {
|
1981
|
-
(
|
1981
|
+
(_a11 = this._resolve) == null ? void 0 : _a11.call(this, value);
|
1982
1982
|
}
|
1983
1983
|
}
|
1984
1984
|
reject(error) {
|
1985
|
-
var
|
1985
|
+
var _a11;
|
1986
1986
|
this.status = { type: "rejected", error };
|
1987
1987
|
if (this.promise) {
|
1988
|
-
(
|
1988
|
+
(_a11 = this._reject) == null ? void 0 : _a11.call(this, error);
|
1989
1989
|
}
|
1990
1990
|
}
|
1991
1991
|
};
|
@@ -2008,7 +2008,7 @@ async function streamObject({
|
|
2008
2008
|
onFinish,
|
2009
2009
|
...settings
|
2010
2010
|
}) {
|
2011
|
-
var
|
2011
|
+
var _a11;
|
2012
2012
|
validateObjectGenerationInput({
|
2013
2013
|
output,
|
2014
2014
|
mode,
|
@@ -2026,7 +2026,7 @@ async function streamObject({
|
|
2026
2026
|
headers,
|
2027
2027
|
settings: { ...settings, maxRetries }
|
2028
2028
|
});
|
2029
|
-
const tracer = getTracer({ isEnabled: (
|
2029
|
+
const tracer = getTracer({ isEnabled: (_a11 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a11 : false });
|
2030
2030
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
2031
2031
|
return recordSpan({
|
2032
2032
|
name: "ai.streamObject",
|
@@ -2415,8 +2415,8 @@ var DefaultStreamObjectResult = class {
|
|
2415
2415
|
});
|
2416
2416
|
}
|
2417
2417
|
pipeTextStreamToResponse(response, init) {
|
2418
|
-
var
|
2419
|
-
response.writeHead((
|
2418
|
+
var _a11;
|
2419
|
+
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
2420
2420
|
"Content-Type": "text/plain; charset=utf-8",
|
2421
2421
|
...init == null ? void 0 : init.headers
|
2422
2422
|
});
|
@@ -2438,9 +2438,9 @@ var DefaultStreamObjectResult = class {
|
|
2438
2438
|
read();
|
2439
2439
|
}
|
2440
2440
|
toTextStreamResponse(init) {
|
2441
|
-
var
|
2441
|
+
var _a11;
|
2442
2442
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
2443
|
-
status: (
|
2443
|
+
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
2444
2444
|
headers: prepareResponseHeaders(init, {
|
2445
2445
|
contentType: "text/plain; charset=utf-8"
|
2446
2446
|
})
|
@@ -2469,9 +2469,9 @@ function prepareToolsAndToolChoice({
|
|
2469
2469
|
};
|
2470
2470
|
}
|
2471
2471
|
return {
|
2472
|
-
tools: Object.entries(tools).map(([
|
2472
|
+
tools: Object.entries(tools).map(([name11, tool2]) => ({
|
2473
2473
|
type: "function",
|
2474
|
-
name:
|
2474
|
+
name: name11,
|
2475
2475
|
description: tool2.description,
|
2476
2476
|
parameters: asSchema2(tool2.parameters).jsonSchema
|
2477
2477
|
})),
|
@@ -2620,14 +2620,14 @@ async function generateText({
|
|
2620
2620
|
experimental_telemetry: telemetry,
|
2621
2621
|
...settings
|
2622
2622
|
}) {
|
2623
|
-
var
|
2623
|
+
var _a11;
|
2624
2624
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
2625
2625
|
model,
|
2626
2626
|
telemetry,
|
2627
2627
|
headers,
|
2628
2628
|
settings: { ...settings, maxRetries }
|
2629
2629
|
});
|
2630
|
-
const tracer = getTracer({ isEnabled: (
|
2630
|
+
const tracer = getTracer({ isEnabled: (_a11 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a11 : false });
|
2631
2631
|
return recordSpan({
|
2632
2632
|
name: "ai.generateText",
|
2633
2633
|
attributes: selectTelemetryAttributes({
|
@@ -2647,7 +2647,7 @@ async function generateText({
|
|
2647
2647
|
}),
|
2648
2648
|
tracer,
|
2649
2649
|
fn: async (span) => {
|
2650
|
-
var
|
2650
|
+
var _a12, _b, _c, _d;
|
2651
2651
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
2652
2652
|
const validatedPrompt = validatePrompt({
|
2653
2653
|
system,
|
@@ -2733,7 +2733,7 @@ async function generateText({
|
|
2733
2733
|
}
|
2734
2734
|
})
|
2735
2735
|
);
|
2736
|
-
currentToolCalls = ((
|
2736
|
+
currentToolCalls = ((_a12 = currentModelResponse.toolCalls) != null ? _a12 : []).map(
|
2737
2737
|
(modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
|
2738
2738
|
);
|
2739
2739
|
currentToolResults = tools == null ? [] : await executeTools({
|
@@ -3201,14 +3201,14 @@ async function streamText({
|
|
3201
3201
|
onFinish,
|
3202
3202
|
...settings
|
3203
3203
|
}) {
|
3204
|
-
var
|
3204
|
+
var _a11;
|
3205
3205
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
3206
3206
|
model,
|
3207
3207
|
telemetry,
|
3208
3208
|
headers,
|
3209
3209
|
settings: { ...settings, maxRetries }
|
3210
3210
|
});
|
3211
|
-
const tracer = getTracer({ isEnabled: (
|
3211
|
+
const tracer = getTracer({ isEnabled: (_a11 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a11 : false });
|
3212
3212
|
return recordSpan({
|
3213
3213
|
name: "ai.streamText",
|
3214
3214
|
attributes: selectTelemetryAttributes({
|
@@ -3585,8 +3585,8 @@ var DefaultStreamTextResult = class {
|
|
3585
3585
|
return this.pipeDataStreamToResponse(response, init);
|
3586
3586
|
}
|
3587
3587
|
pipeDataStreamToResponse(response, init) {
|
3588
|
-
var
|
3589
|
-
response.writeHead((
|
3588
|
+
var _a11;
|
3589
|
+
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
3590
3590
|
"Content-Type": "text/plain; charset=utf-8",
|
3591
3591
|
...init == null ? void 0 : init.headers
|
3592
3592
|
});
|
@@ -3608,8 +3608,8 @@ var DefaultStreamTextResult = class {
|
|
3608
3608
|
read();
|
3609
3609
|
}
|
3610
3610
|
pipeTextStreamToResponse(response, init) {
|
3611
|
-
var
|
3612
|
-
response.writeHead((
|
3611
|
+
var _a11;
|
3612
|
+
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
3613
3613
|
"Content-Type": "text/plain; charset=utf-8",
|
3614
3614
|
...init == null ? void 0 : init.headers
|
3615
3615
|
});
|
@@ -3634,7 +3634,7 @@ var DefaultStreamTextResult = class {
|
|
3634
3634
|
return this.toDataStreamResponse(options);
|
3635
3635
|
}
|
3636
3636
|
toDataStreamResponse(options) {
|
3637
|
-
var
|
3637
|
+
var _a11;
|
3638
3638
|
const init = options == null ? void 0 : "init" in options ? options.init : {
|
3639
3639
|
headers: "headers" in options ? options.headers : void 0,
|
3640
3640
|
status: "status" in options ? options.status : void 0,
|
@@ -3644,7 +3644,7 @@ var DefaultStreamTextResult = class {
|
|
3644
3644
|
const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
3645
3645
|
const stream = data ? mergeStreams(data.stream, this.toDataStream({ getErrorMessage: getErrorMessage4 })) : this.toDataStream({ getErrorMessage: getErrorMessage4 });
|
3646
3646
|
return new Response(stream, {
|
3647
|
-
status: (
|
3647
|
+
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
3648
3648
|
statusText: init == null ? void 0 : init.statusText,
|
3649
3649
|
headers: prepareResponseHeaders(init, {
|
3650
3650
|
contentType: "text/plain; charset=utf-8",
|
@@ -3653,9 +3653,9 @@ var DefaultStreamTextResult = class {
|
|
3653
3653
|
});
|
3654
3654
|
}
|
3655
3655
|
toTextStreamResponse(init) {
|
3656
|
-
var
|
3656
|
+
var _a11;
|
3657
3657
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3658
|
-
status: (
|
3658
|
+
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
3659
3659
|
headers: prepareResponseHeaders(init, {
|
3660
3660
|
contentType: "text/plain; charset=utf-8"
|
3661
3661
|
})
|
@@ -3666,7 +3666,7 @@ var experimental_streamText = streamText;
|
|
3666
3666
|
|
3667
3667
|
// core/prompt/attachments-to-parts.ts
|
3668
3668
|
function attachmentsToParts(attachments) {
|
3669
|
-
var
|
3669
|
+
var _a11, _b, _c;
|
3670
3670
|
const parts = [];
|
3671
3671
|
for (const attachment of attachments) {
|
3672
3672
|
let url;
|
@@ -3678,7 +3678,7 @@ function attachmentsToParts(attachments) {
|
|
3678
3678
|
switch (url.protocol) {
|
3679
3679
|
case "http:":
|
3680
3680
|
case "https:": {
|
3681
|
-
if ((
|
3681
|
+
if ((_a11 = attachment.contentType) == null ? void 0 : _a11.startsWith("image/")) {
|
3682
3682
|
parts.push({ type: "image", image: url });
|
3683
3683
|
}
|
3684
3684
|
break;
|
@@ -3719,15 +3719,32 @@ function attachmentsToParts(attachments) {
|
|
3719
3719
|
return parts;
|
3720
3720
|
}
|
3721
3721
|
|
3722
|
+
// core/prompt/message-conversion-error.ts
|
3723
|
+
import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
|
3724
|
+
var name9 = "AI_MessageConversionError";
|
3725
|
+
var marker9 = `vercel.ai.error.${name9}`;
|
3726
|
+
var symbol9 = Symbol.for(marker9);
|
3727
|
+
var _a9;
|
3728
|
+
var MessageConversionError = class extends AISDKError9 {
|
3729
|
+
constructor({
|
3730
|
+
originalMessage,
|
3731
|
+
message
|
3732
|
+
}) {
|
3733
|
+
super({ name: name9, message });
|
3734
|
+
this[_a9] = true;
|
3735
|
+
this.originalMessage = originalMessage;
|
3736
|
+
}
|
3737
|
+
static isInstance(error) {
|
3738
|
+
return AISDKError9.hasMarker(error, marker9);
|
3739
|
+
}
|
3740
|
+
};
|
3741
|
+
_a9 = symbol9;
|
3742
|
+
|
3722
3743
|
// core/prompt/convert-to-core-messages.ts
|
3723
3744
|
function convertToCoreMessages(messages) {
|
3724
3745
|
const coreMessages = [];
|
3725
|
-
for (const {
|
3726
|
-
role,
|
3727
|
-
content,
|
3728
|
-
toolInvocations,
|
3729
|
-
experimental_attachments
|
3730
|
-
} of messages) {
|
3746
|
+
for (const message of messages) {
|
3747
|
+
const { role, content, toolInvocations, experimental_attachments } = message;
|
3731
3748
|
switch (role) {
|
3732
3749
|
case "system": {
|
3733
3750
|
coreMessages.push({
|
@@ -3765,21 +3782,36 @@ function convertToCoreMessages(messages) {
|
|
3765
3782
|
});
|
3766
3783
|
coreMessages.push({
|
3767
3784
|
role: "tool",
|
3768
|
-
content: toolInvocations.map(
|
3769
|
-
(
|
3785
|
+
content: toolInvocations.map((ToolInvocation) => {
|
3786
|
+
if (!("result" in ToolInvocation)) {
|
3787
|
+
throw new MessageConversionError({
|
3788
|
+
originalMessage: message,
|
3789
|
+
message: "ToolInvocation must have a result: " + JSON.stringify(ToolInvocation)
|
3790
|
+
});
|
3791
|
+
}
|
3792
|
+
const { toolCallId, toolName, args, result } = ToolInvocation;
|
3793
|
+
return {
|
3770
3794
|
type: "tool-result",
|
3771
3795
|
toolCallId,
|
3772
3796
|
toolName,
|
3773
3797
|
args,
|
3774
3798
|
result
|
3775
|
-
}
|
3776
|
-
)
|
3799
|
+
};
|
3800
|
+
})
|
3777
3801
|
});
|
3778
3802
|
break;
|
3779
3803
|
}
|
3804
|
+
case "function":
|
3805
|
+
case "data":
|
3806
|
+
case "tool": {
|
3807
|
+
break;
|
3808
|
+
}
|
3780
3809
|
default: {
|
3781
3810
|
const _exhaustiveCheck = role;
|
3782
|
-
throw new
|
3811
|
+
throw new MessageConversionError({
|
3812
|
+
originalMessage: message,
|
3813
|
+
message: `Unsupported role: ${_exhaustiveCheck}`
|
3814
|
+
});
|
3783
3815
|
}
|
3784
3816
|
}
|
3785
3817
|
}
|
@@ -3816,11 +3848,11 @@ function experimental_customProvider({
|
|
3816
3848
|
}
|
3817
3849
|
|
3818
3850
|
// core/registry/no-such-provider-error.ts
|
3819
|
-
import { AISDKError as
|
3820
|
-
var
|
3821
|
-
var
|
3822
|
-
var
|
3823
|
-
var
|
3851
|
+
import { AISDKError as AISDKError10, NoSuchModelError as NoSuchModelError2 } from "@ai-sdk/provider";
|
3852
|
+
var name10 = "AI_NoSuchProviderError";
|
3853
|
+
var marker10 = `vercel.ai.error.${name10}`;
|
3854
|
+
var symbol10 = Symbol.for(marker10);
|
3855
|
+
var _a10;
|
3824
3856
|
var NoSuchProviderError = class extends NoSuchModelError2 {
|
3825
3857
|
constructor({
|
3826
3858
|
modelId,
|
@@ -3829,19 +3861,19 @@ var NoSuchProviderError = class extends NoSuchModelError2 {
|
|
3829
3861
|
availableProviders,
|
3830
3862
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
3831
3863
|
}) {
|
3832
|
-
super({ errorName:
|
3833
|
-
this[
|
3864
|
+
super({ errorName: name10, modelId, modelType, message });
|
3865
|
+
this[_a10] = true;
|
3834
3866
|
this.providerId = providerId;
|
3835
3867
|
this.availableProviders = availableProviders;
|
3836
3868
|
}
|
3837
3869
|
static isInstance(error) {
|
3838
|
-
return
|
3870
|
+
return AISDKError10.hasMarker(error, marker10);
|
3839
3871
|
}
|
3840
3872
|
/**
|
3841
3873
|
* @deprecated use `isInstance` instead
|
3842
3874
|
*/
|
3843
3875
|
static isNoSuchProviderError(error) {
|
3844
|
-
return error instanceof Error && error.name ===
|
3876
|
+
return error instanceof Error && error.name === name10 && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
|
3845
3877
|
}
|
3846
3878
|
/**
|
3847
3879
|
* @deprecated Do not use this method. It will be removed in the next major version.
|
@@ -3858,7 +3890,7 @@ var NoSuchProviderError = class extends NoSuchModelError2 {
|
|
3858
3890
|
};
|
3859
3891
|
}
|
3860
3892
|
};
|
3861
|
-
|
3893
|
+
_a10 = symbol10;
|
3862
3894
|
|
3863
3895
|
// core/registry/provider-registry.ts
|
3864
3896
|
import { NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
|
@@ -3904,19 +3936,19 @@ var DefaultProviderRegistry = class {
|
|
3904
3936
|
return [id.slice(0, index), id.slice(index + 1)];
|
3905
3937
|
}
|
3906
3938
|
languageModel(id) {
|
3907
|
-
var
|
3939
|
+
var _a11, _b;
|
3908
3940
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
3909
|
-
const model = (_b = (
|
3941
|
+
const model = (_b = (_a11 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a11, modelId);
|
3910
3942
|
if (model == null) {
|
3911
3943
|
throw new NoSuchModelError3({ modelId: id, modelType: "languageModel" });
|
3912
3944
|
}
|
3913
3945
|
return model;
|
3914
3946
|
}
|
3915
3947
|
textEmbeddingModel(id) {
|
3916
|
-
var
|
3948
|
+
var _a11, _b, _c;
|
3917
3949
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
3918
3950
|
const provider = this.getProvider(providerId);
|
3919
|
-
const model = (_c = (
|
3951
|
+
const model = (_c = (_a11 = provider.textEmbeddingModel) == null ? void 0 : _a11.call(provider, modelId)) != null ? _c : "textEmbedding" in provider ? (_b = provider.textEmbedding) == null ? void 0 : _b.call(provider, modelId) : void 0;
|
3920
3952
|
if (model == null) {
|
3921
3953
|
throw new NoSuchModelError3({
|
3922
3954
|
modelId: id,
|
@@ -3959,7 +3991,7 @@ function magnitude(vector) {
|
|
3959
3991
|
|
3960
3992
|
// errors/index.ts
|
3961
3993
|
import {
|
3962
|
-
AISDKError as
|
3994
|
+
AISDKError as AISDKError11,
|
3963
3995
|
APICallError as APICallError2,
|
3964
3996
|
EmptyResponseBodyError,
|
3965
3997
|
InvalidPromptError as InvalidPromptError2,
|
@@ -4090,8 +4122,8 @@ function readableFromAsyncIterable(iterable) {
|
|
4090
4122
|
controller.enqueue(value);
|
4091
4123
|
},
|
4092
4124
|
async cancel(reason) {
|
4093
|
-
var
|
4094
|
-
await ((
|
4125
|
+
var _a11;
|
4126
|
+
await ((_a11 = it.return) == null ? void 0 : _a11.call(it, reason));
|
4095
4127
|
}
|
4096
4128
|
});
|
4097
4129
|
}
|
@@ -4230,7 +4262,7 @@ import {
|
|
4230
4262
|
function AssistantResponse({ threadId, messageId }, process2) {
|
4231
4263
|
const stream = new ReadableStream({
|
4232
4264
|
async start(controller) {
|
4233
|
-
var
|
4265
|
+
var _a11;
|
4234
4266
|
const textEncoder = new TextEncoder();
|
4235
4267
|
const sendMessage = (message) => {
|
4236
4268
|
controller.enqueue(
|
@@ -4248,7 +4280,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4248
4280
|
);
|
4249
4281
|
};
|
4250
4282
|
const forwardStream = async (stream2) => {
|
4251
|
-
var
|
4283
|
+
var _a12, _b;
|
4252
4284
|
let result = void 0;
|
4253
4285
|
for await (const value of stream2) {
|
4254
4286
|
switch (value.event) {
|
@@ -4265,7 +4297,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4265
4297
|
break;
|
4266
4298
|
}
|
4267
4299
|
case "thread.message.delta": {
|
4268
|
-
const content = (
|
4300
|
+
const content = (_a12 = value.data.delta.content) == null ? void 0 : _a12[0];
|
4269
4301
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
4270
4302
|
controller.enqueue(
|
4271
4303
|
textEncoder.encode(
|
@@ -4301,7 +4333,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4301
4333
|
forwardStream
|
4302
4334
|
});
|
4303
4335
|
} catch (error) {
|
4304
|
-
sendError((
|
4336
|
+
sendError((_a11 = error.message) != null ? _a11 : `${error}`);
|
4305
4337
|
} finally {
|
4306
4338
|
controller.close();
|
4307
4339
|
}
|
@@ -4322,9 +4354,9 @@ var experimental_AssistantResponse = AssistantResponse;
|
|
4322
4354
|
|
4323
4355
|
// streams/aws-bedrock-stream.ts
|
4324
4356
|
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
4325
|
-
var
|
4357
|
+
var _a11, _b;
|
4326
4358
|
const decoder = new TextDecoder();
|
4327
|
-
for await (const chunk of (
|
4359
|
+
for await (const chunk of (_a11 = response.body) != null ? _a11 : []) {
|
4328
4360
|
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
4329
4361
|
if (bytes != null) {
|
4330
4362
|
const chunkText = decoder.decode(bytes);
|
@@ -4338,8 +4370,8 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
|
4338
4370
|
}
|
4339
4371
|
function AWSBedrockAnthropicMessagesStream(response, callbacks) {
|
4340
4372
|
return AWSBedrockStream(response, callbacks, (chunk) => {
|
4341
|
-
var
|
4342
|
-
return (
|
4373
|
+
var _a11;
|
4374
|
+
return (_a11 = chunk.delta) == null ? void 0 : _a11.text;
|
4343
4375
|
});
|
4344
4376
|
}
|
4345
4377
|
function AWSBedrockAnthropicStream(response, callbacks) {
|
@@ -4386,8 +4418,8 @@ async function readAndProcessLines(reader, controller) {
|
|
4386
4418
|
controller.close();
|
4387
4419
|
}
|
4388
4420
|
function createParser2(res) {
|
4389
|
-
var
|
4390
|
-
const reader = (
|
4421
|
+
var _a11;
|
4422
|
+
const reader = (_a11 = res.body) == null ? void 0 : _a11.getReader();
|
4391
4423
|
return new ReadableStream({
|
4392
4424
|
async start(controller) {
|
4393
4425
|
if (!reader) {
|
@@ -4417,9 +4449,9 @@ function CohereStream(reader, callbacks) {
|
|
4417
4449
|
|
4418
4450
|
// streams/google-generative-ai-stream.ts
|
4419
4451
|
async function* streamable3(response) {
|
4420
|
-
var
|
4452
|
+
var _a11, _b, _c;
|
4421
4453
|
for await (const chunk of response.stream) {
|
4422
|
-
const parts = (_c = (_b = (
|
4454
|
+
const parts = (_c = (_b = (_a11 = chunk.candidates) == null ? void 0 : _a11[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
4423
4455
|
if (parts === void 0) {
|
4424
4456
|
continue;
|
4425
4457
|
}
|
@@ -4438,13 +4470,13 @@ function createParser3(res) {
|
|
4438
4470
|
const trimStartOfStream = trimStartOfStreamHelper();
|
4439
4471
|
return new ReadableStream({
|
4440
4472
|
async pull(controller) {
|
4441
|
-
var
|
4473
|
+
var _a11, _b;
|
4442
4474
|
const { value, done } = await res.next();
|
4443
4475
|
if (done) {
|
4444
4476
|
controller.close();
|
4445
4477
|
return;
|
4446
4478
|
}
|
4447
|
-
const text = trimStartOfStream((_b = (
|
4479
|
+
const text = trimStartOfStream((_b = (_a11 = value.token) == null ? void 0 : _a11.text) != null ? _b : "");
|
4448
4480
|
if (!text)
|
4449
4481
|
return;
|
4450
4482
|
if (value.generated_text != null && value.generated_text.length > 0) {
|
@@ -4469,11 +4501,11 @@ function InkeepStream(res, callbacks) {
|
|
4469
4501
|
let chat_session_id = "";
|
4470
4502
|
let records_cited;
|
4471
4503
|
const inkeepEventParser = (data, options) => {
|
4472
|
-
var
|
4504
|
+
var _a11, _b;
|
4473
4505
|
const { event } = options;
|
4474
4506
|
if (event === "records_cited") {
|
4475
4507
|
records_cited = JSON.parse(data);
|
4476
|
-
(
|
4508
|
+
(_a11 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a11.call(callbacks, records_cited);
|
4477
4509
|
}
|
4478
4510
|
if (event === "message_chunk") {
|
4479
4511
|
const inkeepMessageChunk = JSON.parse(data);
|
@@ -4486,12 +4518,12 @@ function InkeepStream(res, callbacks) {
|
|
4486
4518
|
passThroughCallbacks = {
|
4487
4519
|
...passThroughCallbacks,
|
4488
4520
|
onFinal: (completion) => {
|
4489
|
-
var
|
4521
|
+
var _a11;
|
4490
4522
|
const inkeepOnFinalMetadata = {
|
4491
4523
|
chat_session_id,
|
4492
4524
|
records_cited
|
4493
4525
|
};
|
4494
|
-
(
|
4526
|
+
(_a11 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a11.call(callbacks, completion, inkeepOnFinalMetadata);
|
4495
4527
|
}
|
4496
4528
|
};
|
4497
4529
|
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
@@ -4513,7 +4545,7 @@ function toDataStream(stream, callbacks) {
|
|
4513
4545
|
return stream.pipeThrough(
|
4514
4546
|
new TransformStream({
|
4515
4547
|
transform: async (value, controller) => {
|
4516
|
-
var
|
4548
|
+
var _a11;
|
4517
4549
|
if (typeof value === "string") {
|
4518
4550
|
controller.enqueue(value);
|
4519
4551
|
return;
|
@@ -4521,7 +4553,7 @@ function toDataStream(stream, callbacks) {
|
|
4521
4553
|
if ("event" in value) {
|
4522
4554
|
if (value.event === "on_chat_model_stream") {
|
4523
4555
|
forwardAIMessageChunk(
|
4524
|
-
(
|
4556
|
+
(_a11 = value.data) == null ? void 0 : _a11.chunk,
|
4525
4557
|
controller
|
4526
4558
|
);
|
4527
4559
|
}
|
@@ -4533,13 +4565,13 @@ function toDataStream(stream, callbacks) {
|
|
4533
4565
|
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
4534
4566
|
}
|
4535
4567
|
function toDataStreamResponse(stream, options) {
|
4536
|
-
var
|
4568
|
+
var _a11;
|
4537
4569
|
const dataStream = toDataStream(stream, options == null ? void 0 : options.callbacks);
|
4538
4570
|
const data = options == null ? void 0 : options.data;
|
4539
4571
|
const init = options == null ? void 0 : options.init;
|
4540
4572
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
4541
4573
|
return new Response(responseStream, {
|
4542
|
-
status: (
|
4574
|
+
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
4543
4575
|
statusText: init == null ? void 0 : init.statusText,
|
4544
4576
|
headers: prepareResponseHeaders(init, {
|
4545
4577
|
contentType: "text/plain; charset=utf-8",
|
@@ -4621,9 +4653,9 @@ function LangChainStream(callbacks) {
|
|
4621
4653
|
|
4622
4654
|
// streams/mistral-stream.ts
|
4623
4655
|
async function* streamable4(stream) {
|
4624
|
-
var
|
4656
|
+
var _a11, _b;
|
4625
4657
|
for await (const chunk of stream) {
|
4626
|
-
const content = (_b = (
|
4658
|
+
const content = (_b = (_a11 = chunk.choices[0]) == null ? void 0 : _a11.delta) == null ? void 0 : _b.content;
|
4627
4659
|
if (content === void 0 || content === "") {
|
4628
4660
|
continue;
|
4629
4661
|
}
|
@@ -4656,10 +4688,10 @@ async function* streamable5(stream) {
|
|
4656
4688
|
model: chunk.model,
|
4657
4689
|
// not exposed by Azure API
|
4658
4690
|
choices: chunk.choices.map((choice) => {
|
4659
|
-
var
|
4691
|
+
var _a11, _b, _c, _d, _e, _f, _g;
|
4660
4692
|
return {
|
4661
4693
|
delta: {
|
4662
|
-
content: (
|
4694
|
+
content: (_a11 = choice.delta) == null ? void 0 : _a11.content,
|
4663
4695
|
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
4664
4696
|
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
4665
4697
|
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
@@ -4684,9 +4716,9 @@ function chunkToText() {
|
|
4684
4716
|
const trimStartOfStream = trimStartOfStreamHelper();
|
4685
4717
|
let isFunctionStreamingIn;
|
4686
4718
|
return (json) => {
|
4687
|
-
var
|
4719
|
+
var _a11, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
4688
4720
|
if (isChatCompletionChunk(json)) {
|
4689
|
-
const delta = (
|
4721
|
+
const delta = (_a11 = json.choices[0]) == null ? void 0 : _a11.delta;
|
4690
4722
|
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
4691
4723
|
isFunctionStreamingIn = true;
|
4692
4724
|
return {
|
@@ -4959,8 +4991,8 @@ function createFunctionCallTransformer(callbacks) {
|
|
4959
4991
|
|
4960
4992
|
// streams/replicate-stream.ts
|
4961
4993
|
async function ReplicateStream(res, cb, options) {
|
4962
|
-
var
|
4963
|
-
const url = (
|
4994
|
+
var _a11;
|
4995
|
+
const url = (_a11 = res.urls) == null ? void 0 : _a11.stream;
|
4964
4996
|
if (!url) {
|
4965
4997
|
if (res.error)
|
4966
4998
|
throw new Error(res.error);
|
@@ -4981,8 +5013,8 @@ async function ReplicateStream(res, cb, options) {
|
|
4981
5013
|
|
4982
5014
|
// streams/stream-to-response.ts
|
4983
5015
|
function streamToResponse(res, response, init, data) {
|
4984
|
-
var
|
4985
|
-
response.writeHead((
|
5016
|
+
var _a11;
|
5017
|
+
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
4986
5018
|
"Content-Type": "text/plain; charset=utf-8",
|
4987
5019
|
...init == null ? void 0 : init.headers
|
4988
5020
|
});
|
@@ -5025,7 +5057,7 @@ var StreamingTextResponse = class extends Response {
|
|
5025
5057
|
var generateId2 = generateIdImpl;
|
5026
5058
|
var nanoid = generateIdImpl;
|
5027
5059
|
export {
|
5028
|
-
|
5060
|
+
AISDKError11 as AISDKError,
|
5029
5061
|
AIStream,
|
5030
5062
|
APICallError2 as APICallError,
|
5031
5063
|
AWSBedrockAnthropicMessagesStream,
|
@@ -5051,6 +5083,7 @@ export {
|
|
5051
5083
|
langchain_adapter_exports as LangChainAdapter,
|
5052
5084
|
LangChainStream,
|
5053
5085
|
LoadAPIKeyError,
|
5086
|
+
MessageConversionError,
|
5054
5087
|
MistralStream,
|
5055
5088
|
NoContentGeneratedError,
|
5056
5089
|
NoObjectGeneratedError,
|