ai 6.0.21 → 6.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +1 -2
- package/dist/index.d.ts +1 -2
- package/dist/index.js +3 -9
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +3 -9
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
package/dist/index.d.mts
CHANGED
|
@@ -2647,7 +2647,7 @@ If set and supported by the model, calls will generate deterministic results.
|
|
|
2647
2647
|
@return
|
|
2648
2648
|
A result object for accessing different stream types and additional information.
|
|
2649
2649
|
*/
|
|
2650
|
-
declare function streamText<TOOLS extends ToolSet, OUTPUT extends Output = Output<string, string>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, timeout, headers, stopWhen, experimental_output, output, experimental_telemetry: telemetry, prepareStep, providerOptions, experimental_activeTools, activeTools, experimental_repairToolCall: repairToolCall, experimental_transform: transform, experimental_download: download, includeRawChunks, onChunk, onError, onFinish, onAbort, onStepFinish, experimental_context, _internal: { now, generateId
|
|
2650
|
+
declare function streamText<TOOLS extends ToolSet, OUTPUT extends Output = Output<string, string>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, timeout, headers, stopWhen, experimental_output, output, experimental_telemetry: telemetry, prepareStep, providerOptions, experimental_activeTools, activeTools, experimental_repairToolCall: repairToolCall, experimental_transform: transform, experimental_download: download, includeRawChunks, onChunk, onError, onFinish, onAbort, onStepFinish, experimental_context, _internal: { now, generateId }, ...settings }: CallSettings & Prompt & {
|
|
2651
2651
|
/**
|
|
2652
2652
|
The language model to use.
|
|
2653
2653
|
*/
|
|
@@ -2768,7 +2768,6 @@ Internal. For test use only. May change without notice.
|
|
|
2768
2768
|
_internal?: {
|
|
2769
2769
|
now?: () => number;
|
|
2770
2770
|
generateId?: IdGenerator;
|
|
2771
|
-
currentDate?: () => Date;
|
|
2772
2771
|
};
|
|
2773
2772
|
}): StreamTextResult<TOOLS, OUTPUT>;
|
|
2774
2773
|
|
package/dist/index.d.ts
CHANGED
|
@@ -2647,7 +2647,7 @@ If set and supported by the model, calls will generate deterministic results.
|
|
|
2647
2647
|
@return
|
|
2648
2648
|
A result object for accessing different stream types and additional information.
|
|
2649
2649
|
*/
|
|
2650
|
-
declare function streamText<TOOLS extends ToolSet, OUTPUT extends Output = Output<string, string>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, timeout, headers, stopWhen, experimental_output, output, experimental_telemetry: telemetry, prepareStep, providerOptions, experimental_activeTools, activeTools, experimental_repairToolCall: repairToolCall, experimental_transform: transform, experimental_download: download, includeRawChunks, onChunk, onError, onFinish, onAbort, onStepFinish, experimental_context, _internal: { now, generateId
|
|
2650
|
+
declare function streamText<TOOLS extends ToolSet, OUTPUT extends Output = Output<string, string>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, timeout, headers, stopWhen, experimental_output, output, experimental_telemetry: telemetry, prepareStep, providerOptions, experimental_activeTools, activeTools, experimental_repairToolCall: repairToolCall, experimental_transform: transform, experimental_download: download, includeRawChunks, onChunk, onError, onFinish, onAbort, onStepFinish, experimental_context, _internal: { now, generateId }, ...settings }: CallSettings & Prompt & {
|
|
2651
2651
|
/**
|
|
2652
2652
|
The language model to use.
|
|
2653
2653
|
*/
|
|
@@ -2768,7 +2768,6 @@ Internal. For test use only. May change without notice.
|
|
|
2768
2768
|
_internal?: {
|
|
2769
2769
|
now?: () => number;
|
|
2770
2770
|
generateId?: IdGenerator;
|
|
2771
|
-
currentDate?: () => Date;
|
|
2772
2771
|
};
|
|
2773
2772
|
}): StreamTextResult<TOOLS, OUTPUT>;
|
|
2774
2773
|
|
package/dist/index.js
CHANGED
|
@@ -1013,7 +1013,7 @@ var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
|
1013
1013
|
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1014
1014
|
|
|
1015
1015
|
// src/version.ts
|
|
1016
|
-
var VERSION = true ? "6.0.
|
|
1016
|
+
var VERSION = true ? "6.0.22" : "0.0.0-test";
|
|
1017
1017
|
|
|
1018
1018
|
// src/util/download/download.ts
|
|
1019
1019
|
var download = async ({ url }) => {
|
|
@@ -5749,11 +5749,7 @@ function streamText({
|
|
|
5749
5749
|
onAbort,
|
|
5750
5750
|
onStepFinish,
|
|
5751
5751
|
experimental_context,
|
|
5752
|
-
_internal: {
|
|
5753
|
-
now: now2 = now,
|
|
5754
|
-
generateId: generateId2 = originalGenerateId2,
|
|
5755
|
-
currentDate = () => /* @__PURE__ */ new Date()
|
|
5756
|
-
} = {},
|
|
5752
|
+
_internal: { now: now2 = now, generateId: generateId2 = originalGenerateId2 } = {},
|
|
5757
5753
|
...settings
|
|
5758
5754
|
}) {
|
|
5759
5755
|
const totalTimeoutMs = getTotalTimeoutMs(timeout);
|
|
@@ -5786,7 +5782,6 @@ function streamText({
|
|
|
5786
5782
|
onAbort,
|
|
5787
5783
|
onStepFinish,
|
|
5788
5784
|
now: now2,
|
|
5789
|
-
currentDate,
|
|
5790
5785
|
generateId: generateId2,
|
|
5791
5786
|
experimental_context,
|
|
5792
5787
|
download: download2
|
|
@@ -5876,7 +5871,6 @@ var DefaultStreamTextResult = class {
|
|
|
5876
5871
|
prepareStep,
|
|
5877
5872
|
includeRawChunks,
|
|
5878
5873
|
now: now2,
|
|
5879
|
-
currentDate,
|
|
5880
5874
|
generateId: generateId2,
|
|
5881
5875
|
onChunk,
|
|
5882
5876
|
onError,
|
|
@@ -6443,7 +6437,7 @@ var DefaultStreamTextResult = class {
|
|
|
6443
6437
|
let stepFirstChunk = true;
|
|
6444
6438
|
let stepResponse = {
|
|
6445
6439
|
id: generateId2(),
|
|
6446
|
-
timestamp:
|
|
6440
|
+
timestamp: /* @__PURE__ */ new Date(),
|
|
6447
6441
|
modelId: model.modelId
|
|
6448
6442
|
};
|
|
6449
6443
|
let activeText = "";
|