ai 3.1.20 → 3.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +9 -1
- package/dist/index.d.ts +9 -1
- package/dist/index.js +16 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +16 -8
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/dist/index.d.mts
CHANGED
@@ -1882,6 +1882,9 @@ interface CompletionUsage {
|
|
1882
1882
|
total_tokens: number;
|
1883
1883
|
}
|
1884
1884
|
type AsyncIterableOpenAIStreamReturnTypes = AsyncIterable<ChatCompletionChunk> | AsyncIterable<Completion> | AsyncIterable<AzureChatCompletions>;
|
1885
|
+
/**
|
1886
|
+
* @deprecated Use the [OpenAI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/openai) instead.
|
1887
|
+
*/
|
1885
1888
|
declare function OpenAIStream(res: Response | AsyncIterableOpenAIStreamReturnTypes, callbacks?: OpenAIStreamCallbacks): ReadableStream;
|
1886
1889
|
|
1887
1890
|
interface FunctionCallPayload {
|
@@ -2099,6 +2102,8 @@ interface MessageStopEvent {
|
|
2099
2102
|
* Accepts either a fetch Response from the Anthropic `POST /v1/complete` endpoint,
|
2100
2103
|
* or the return value of `await client.completions.create({ stream: true })`
|
2101
2104
|
* from the `@anthropic-ai/sdk` package.
|
2105
|
+
*
|
2106
|
+
* @deprecated Use the [Anthropic provider](https://sdk.vercel.ai/providers/ai-sdk-providers/anthropic) instead.
|
2102
2107
|
*/
|
2103
2108
|
declare function AnthropicStream(res: Response | AsyncIterable<CompletionChunk> | AsyncIterable<MessageStreamEvent>, cb?: AIStreamCallbacksAndOptions): ReadableStream;
|
2104
2109
|
|
@@ -2189,6 +2194,9 @@ interface TextPart {
|
|
2189
2194
|
text: string;
|
2190
2195
|
inlineData?: never;
|
2191
2196
|
}
|
2197
|
+
/**
|
2198
|
+
* @deprecated Use the [Google Generative AI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/google-generative-ai) instead.
|
2199
|
+
*/
|
2192
2200
|
declare function GoogleGenerativeAIStream(response: {
|
2193
2201
|
stream: AsyncIterable<GenerateContentResponse>;
|
2194
2202
|
}, cb?: AIStreamCallbacksAndOptions): ReadableStream;
|
@@ -2240,7 +2248,7 @@ declare namespace langchainAdapter {
|
|
2240
2248
|
}
|
2241
2249
|
|
2242
2250
|
/**
|
2243
|
-
@deprecated Use LangChainAdapter.
|
2251
|
+
* @deprecated Use [LangChainAdapter](https://sdk.vercel.ai/providers/adapters/langchain) instead.
|
2244
2252
|
*/
|
2245
2253
|
declare function LangChainStream(callbacks?: AIStreamCallbacksAndOptions): {
|
2246
2254
|
stream: ReadableStream<any>;
|
package/dist/index.d.ts
CHANGED
@@ -1882,6 +1882,9 @@ interface CompletionUsage {
|
|
1882
1882
|
total_tokens: number;
|
1883
1883
|
}
|
1884
1884
|
type AsyncIterableOpenAIStreamReturnTypes = AsyncIterable<ChatCompletionChunk> | AsyncIterable<Completion> | AsyncIterable<AzureChatCompletions>;
|
1885
|
+
/**
|
1886
|
+
* @deprecated Use the [OpenAI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/openai) instead.
|
1887
|
+
*/
|
1885
1888
|
declare function OpenAIStream(res: Response | AsyncIterableOpenAIStreamReturnTypes, callbacks?: OpenAIStreamCallbacks): ReadableStream;
|
1886
1889
|
|
1887
1890
|
interface FunctionCallPayload {
|
@@ -2099,6 +2102,8 @@ interface MessageStopEvent {
|
|
2099
2102
|
* Accepts either a fetch Response from the Anthropic `POST /v1/complete` endpoint,
|
2100
2103
|
* or the return value of `await client.completions.create({ stream: true })`
|
2101
2104
|
* from the `@anthropic-ai/sdk` package.
|
2105
|
+
*
|
2106
|
+
* @deprecated Use the [Anthropic provider](https://sdk.vercel.ai/providers/ai-sdk-providers/anthropic) instead.
|
2102
2107
|
*/
|
2103
2108
|
declare function AnthropicStream(res: Response | AsyncIterable<CompletionChunk> | AsyncIterable<MessageStreamEvent>, cb?: AIStreamCallbacksAndOptions): ReadableStream;
|
2104
2109
|
|
@@ -2189,6 +2194,9 @@ interface TextPart {
|
|
2189
2194
|
text: string;
|
2190
2195
|
inlineData?: never;
|
2191
2196
|
}
|
2197
|
+
/**
|
2198
|
+
* @deprecated Use the [Google Generative AI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/google-generative-ai) instead.
|
2199
|
+
*/
|
2192
2200
|
declare function GoogleGenerativeAIStream(response: {
|
2193
2201
|
stream: AsyncIterable<GenerateContentResponse>;
|
2194
2202
|
}, cb?: AIStreamCallbacksAndOptions): ReadableStream;
|
@@ -2240,7 +2248,7 @@ declare namespace langchainAdapter {
|
|
2240
2248
|
}
|
2241
2249
|
|
2242
2250
|
/**
|
2243
|
-
@deprecated Use LangChainAdapter.
|
2251
|
+
* @deprecated Use [LangChainAdapter](https://sdk.vercel.ai/providers/adapters/langchain) instead.
|
2244
2252
|
*/
|
2245
2253
|
declare function LangChainStream(callbacks?: AIStreamCallbacksAndOptions): {
|
2246
2254
|
stream: ReadableStream<any>;
|
package/dist/index.js
CHANGED
@@ -1466,6 +1466,16 @@ function toResponseMessages({
|
|
1466
1466
|
}
|
1467
1467
|
var experimental_generateText = generateText;
|
1468
1468
|
|
1469
|
+
// core/util/prepare-response-headers.ts
|
1470
|
+
function prepareResponseHeaders(init, { contentType }) {
|
1471
|
+
var _a;
|
1472
|
+
const headers = new Headers((_a = init == null ? void 0 : init.headers) != null ? _a : {});
|
1473
|
+
if (!headers.has("Content-Type")) {
|
1474
|
+
headers.set("Content-Type", contentType);
|
1475
|
+
}
|
1476
|
+
return headers;
|
1477
|
+
}
|
1478
|
+
|
1469
1479
|
// core/generate-text/run-tools-transformation.ts
|
1470
1480
|
var import_provider7 = require("@ai-sdk/provider");
|
1471
1481
|
|
@@ -1938,10 +1948,9 @@ var StreamTextResult = class {
|
|
1938
1948
|
var _a;
|
1939
1949
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
1940
1950
|
status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
|
1941
|
-
headers: {
|
1942
|
-
|
1943
|
-
|
1944
|
-
}
|
1951
|
+
headers: prepareResponseHeaders(init, {
|
1952
|
+
contentType: "text/plain; charset=utf-8"
|
1953
|
+
})
|
1945
1954
|
});
|
1946
1955
|
}
|
1947
1956
|
};
|
@@ -3552,10 +3561,9 @@ var StreamingTextResponse = class extends Response {
|
|
3552
3561
|
super(processedStream, {
|
3553
3562
|
...init,
|
3554
3563
|
status: 200,
|
3555
|
-
headers: {
|
3556
|
-
|
3557
|
-
|
3558
|
-
}
|
3564
|
+
headers: prepareResponseHeaders(init, {
|
3565
|
+
contentType: "text/plain; charset=utf-8"
|
3566
|
+
})
|
3559
3567
|
});
|
3560
3568
|
}
|
3561
3569
|
};
|