ai 4.0.0-canary.8 → 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +116 -0
- package/README.md +10 -14
- package/dist/index.d.mts +8 -19
- package/dist/index.d.ts +8 -19
- package/dist/index.js +1073 -1037
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1076 -1040
- package/dist/index.mjs.map +1 -1
- package/package.json +10 -15
- package/test/dist/index.d.mts +3 -3
- package/test/dist/index.d.ts +3 -3
- package/test/dist/index.js +3 -3
- package/test/dist/index.js.map +1 -1
- package/test/dist/index.mjs +3 -3
- package/test/dist/index.mjs.map +1 -1
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,121 @@
|
|
1
1
|
# ai
|
2
2
|
|
3
|
+
## 4.0.0
|
4
|
+
|
5
|
+
### Major Changes
|
6
|
+
|
7
|
+
- 4e38b38: chore (ai): remove LanguageModelResponseMetadataWithHeaders type
|
8
|
+
- 8bf5756: chore: remove legacy function/tool calling
|
9
|
+
- f0cb69d: chore (ai/core): remove experimental function exports
|
10
|
+
- da8c609: chore (ai): remove Tokens RSC helper
|
11
|
+
- cbab571: chore (ai): remove ExperimentalXXXMessage types
|
12
|
+
- b469a7e: chore: remove isXXXError methods
|
13
|
+
- 54cb888: chore (ai): remove experimental_StreamData export
|
14
|
+
- 4d61295: chore (ai): remove streamToResponse and streamingTextResponse
|
15
|
+
- 9a3d741: chore (ai): remove ExperimentalTool export
|
16
|
+
- 064257d: chore (ai/core): rename simulateReadableStream values parameter to chunks
|
17
|
+
- 60e69ed: chore (ai/core): remove ai-stream related methods from streamText
|
18
|
+
- a4f8ce9: chore (ai): AssistantResponse cleanups
|
19
|
+
- d3ae4f6: chore (ui/react): remove useObject setInput helper
|
20
|
+
- 7264b0a: chore (ai): remove responseMessages property from streamText/generateText result
|
21
|
+
- b801982: chore (ai/core): remove init option from streamText result methods
|
22
|
+
- f68d7b1: chore (ai/core): streamObject returns result immediately (no Promise)
|
23
|
+
- 6090cea: chore (ai): remove rawResponse from generate/stream result objects
|
24
|
+
- 073f282: chore (ai): remove AIStream and related exports
|
25
|
+
- 1c58337: chore (ai): remove 2.x prompt helpers
|
26
|
+
- a40a93d: chore (ai/ui): remove vue, svelte, solid re-export and dependency
|
27
|
+
- a7ad35a: chore: remove legacy providers & rsc render
|
28
|
+
- c0ddc24: chore (ai): remove toJSON method from AI SDK errors
|
29
|
+
- 007cb81: chore (ai): change `streamText` warnings result to Promise
|
30
|
+
- effbce3: chore (ai): remove responseMessage from streamText onFinish callback
|
31
|
+
- 545d133: chore (ai): remove deprecated roundtrip settings from streamText / generateText
|
32
|
+
- 7e89ccb: chore: remove nanoid export
|
33
|
+
- f967199: chore (ai/core): streamText returns result immediately (no Promise)
|
34
|
+
- 62d08fd: chore (ai): remove TokenUsage, CompletionTokenUsage, and EmbeddingTokenUsage types
|
35
|
+
- e5d2ce8: chore (ai): remove deprecated provider registry exports
|
36
|
+
- 70ce742: chore (ai): remove experimental_continuationSteps option
|
37
|
+
- 2f09717: chore (ai): remove deprecated telemetry data
|
38
|
+
- 0827bf9: chore (ai): remove LangChain adapter `toAIStream` method
|
39
|
+
|
40
|
+
### Patch Changes
|
41
|
+
|
42
|
+
- dce4158: chore (dependencies): update eventsource-parser to 3.0.0
|
43
|
+
- f0ec721: chore (ai): remove openai peer dependency
|
44
|
+
- f9bb30c: chore (ai): remove unnecessary dev dependencies
|
45
|
+
- b053413: chore (ui): refactorings & README update
|
46
|
+
- Updated dependencies [e117b54]
|
47
|
+
- Updated dependencies [8bf5756]
|
48
|
+
- Updated dependencies [b469a7e]
|
49
|
+
- Updated dependencies [79c6dd9]
|
50
|
+
- Updated dependencies [9f81e66]
|
51
|
+
- Updated dependencies [70f28f6]
|
52
|
+
- Updated dependencies [dce4158]
|
53
|
+
- Updated dependencies [d3ae4f6]
|
54
|
+
- Updated dependencies [68d30e9]
|
55
|
+
- Updated dependencies [7814c4b]
|
56
|
+
- Updated dependencies [ca3e586]
|
57
|
+
- Updated dependencies [c0ddc24]
|
58
|
+
- Updated dependencies [fe4f109]
|
59
|
+
- Updated dependencies [84edae5]
|
60
|
+
- Updated dependencies [b1da952]
|
61
|
+
- Updated dependencies [04d3747]
|
62
|
+
- Updated dependencies [dce4158]
|
63
|
+
- Updated dependencies [7e89ccb]
|
64
|
+
- Updated dependencies [8426f55]
|
65
|
+
- Updated dependencies [db46ce5]
|
66
|
+
- Updated dependencies [b053413]
|
67
|
+
- @ai-sdk/react@1.0.0
|
68
|
+
- @ai-sdk/ui-utils@1.0.0
|
69
|
+
- @ai-sdk/provider-utils@2.0.0
|
70
|
+
- @ai-sdk/provider@1.0.0
|
71
|
+
|
72
|
+
## 4.0.0-canary.13
|
73
|
+
|
74
|
+
### Major Changes
|
75
|
+
|
76
|
+
- 064257d: chore (ai/core): rename simulateReadableStream values parameter to chunks
|
77
|
+
|
78
|
+
### Patch Changes
|
79
|
+
|
80
|
+
- Updated dependencies [79c6dd9]
|
81
|
+
- Updated dependencies [04d3747]
|
82
|
+
- @ai-sdk/react@1.0.0-canary.9
|
83
|
+
- @ai-sdk/ui-utils@1.0.0-canary.9
|
84
|
+
|
85
|
+
## 4.0.0-canary.12
|
86
|
+
|
87
|
+
### Patch Changes
|
88
|
+
|
89
|
+
- b053413: chore (ui): refactorings & README update
|
90
|
+
- Updated dependencies [b053413]
|
91
|
+
- @ai-sdk/ui-utils@1.0.0-canary.8
|
92
|
+
- @ai-sdk/react@1.0.0-canary.8
|
93
|
+
|
94
|
+
## 4.0.0-canary.11
|
95
|
+
|
96
|
+
### Major Changes
|
97
|
+
|
98
|
+
- f68d7b1: chore (ai/core): streamObject returns result immediately (no Promise)
|
99
|
+
- f967199: chore (ai/core): streamText returns result immediately (no Promise)
|
100
|
+
|
101
|
+
## 4.0.0-canary.10
|
102
|
+
|
103
|
+
### Major Changes
|
104
|
+
|
105
|
+
- effbce3: chore (ai): remove responseMessage from streamText onFinish callback
|
106
|
+
|
107
|
+
### Patch Changes
|
108
|
+
|
109
|
+
- Updated dependencies [fe4f109]
|
110
|
+
- @ai-sdk/ui-utils@1.0.0-canary.7
|
111
|
+
- @ai-sdk/react@1.0.0-canary.7
|
112
|
+
|
113
|
+
## 4.0.0-canary.9
|
114
|
+
|
115
|
+
### Patch Changes
|
116
|
+
|
117
|
+
- f0ec721: chore (ai): remove openai peer dependency
|
118
|
+
|
3
119
|
## 4.0.0-canary.8
|
4
120
|
|
5
121
|
### Major Changes
|
package/README.md
CHANGED
@@ -32,17 +32,13 @@ npm install @ai-sdk/openai
|
|
32
32
|
import { generateText } from 'ai';
|
33
33
|
import { openai } from '@ai-sdk/openai'; // Ensure OPENAI_API_KEY environment variable is set
|
34
34
|
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
});
|
41
|
-
|
42
|
-
console.log(text);
|
43
|
-
}
|
35
|
+
const { text } = await generateText({
|
36
|
+
model: openai('gpt-4o'),
|
37
|
+
system: 'You are a friendly assistant!',
|
38
|
+
prompt: 'Why is the sky blue?',
|
39
|
+
});
|
44
40
|
|
45
|
-
|
41
|
+
console.log(text);
|
46
42
|
```
|
47
43
|
|
48
44
|
### AI SDK UI
|
@@ -85,14 +81,14 @@ export default function Page() {
|
|
85
81
|
###### @/app/api/chat/route.ts (Next.js App Router)
|
86
82
|
|
87
83
|
```ts
|
88
|
-
import {
|
84
|
+
import { streamText } from 'ai';
|
89
85
|
import { openai } from '@ai-sdk/openai';
|
90
86
|
|
91
87
|
export async function POST(req: Request) {
|
92
|
-
const { messages }
|
88
|
+
const { messages } = await req.json();
|
93
89
|
|
94
|
-
const result =
|
95
|
-
model: openai('gpt-
|
90
|
+
const result = streamText({
|
91
|
+
model: openai('gpt-4o'),
|
96
92
|
system: 'You are a helpful assistant.',
|
97
93
|
messages,
|
98
94
|
});
|
package/dist/index.d.mts
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import { ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
|
2
|
-
export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema,
|
2
|
+
export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DataStreamPart, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema, ToolInvocation, UseAssistantOptions, formatAssistantStreamPart, formatDataStreamPart, jsonSchema, parseAssistantStreamPart, parseDataStreamPart, processDataStream, processTextStream } from '@ai-sdk/ui-utils';
|
3
3
|
export { ToolCall as CoreToolCall, ToolResult as CoreToolResult, generateId } from '@ai-sdk/provider-utils';
|
4
4
|
import { AttributeValue, Tracer } from '@opentelemetry/api';
|
5
5
|
import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
|
@@ -7,8 +7,6 @@ export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, I
|
|
7
7
|
import { z } from 'zod';
|
8
8
|
import { ServerResponse } from 'http';
|
9
9
|
import { ServerResponse as ServerResponse$1 } from 'node:http';
|
10
|
-
import { AssistantStream } from 'openai/lib/AssistantStream';
|
11
|
-
import { Run } from 'openai/resources/beta/threads/runs/runs';
|
12
10
|
|
13
11
|
/**
|
14
12
|
* Telemetry configuration.
|
@@ -893,7 +891,7 @@ interface StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {
|
|
893
891
|
/**
|
894
892
|
Warnings from the model provider (e.g. unsupported settings)
|
895
893
|
*/
|
896
|
-
readonly warnings: CallWarning[] | undefined
|
894
|
+
readonly warnings: Promise<CallWarning[] | undefined>;
|
897
895
|
/**
|
898
896
|
The token usage of the generated response. Resolved when the response is finished.
|
899
897
|
*/
|
@@ -1068,7 +1066,7 @@ Callback that is called when the LLM response and the final object validation ar
|
|
1068
1066
|
currentDate?: () => Date;
|
1069
1067
|
now?: () => number;
|
1070
1068
|
};
|
1071
|
-
}):
|
1069
|
+
}): StreamObjectResult<DeepPartial<OBJECT>, OBJECT, never>;
|
1072
1070
|
/**
|
1073
1071
|
Generate an array with structured, typed elements for a given prompt and element schema using a language model.
|
1074
1072
|
|
@@ -1135,7 +1133,7 @@ Callback that is called when the LLM response and the final object validation ar
|
|
1135
1133
|
currentDate?: () => Date;
|
1136
1134
|
now?: () => number;
|
1137
1135
|
};
|
1138
|
-
}):
|
1136
|
+
}): StreamObjectResult<Array<ELEMENT>, Array<ELEMENT>, AsyncIterableStream<ELEMENT>>;
|
1139
1137
|
/**
|
1140
1138
|
Generate JSON with any schema for a given prompt using a language model.
|
1141
1139
|
|
@@ -1176,7 +1174,7 @@ Callback that is called when the LLM response and the final object validation ar
|
|
1176
1174
|
currentDate?: () => Date;
|
1177
1175
|
now?: () => number;
|
1178
1176
|
};
|
1179
|
-
}):
|
1177
|
+
}): StreamObjectResult<JSONValue, JSONValue, never>;
|
1180
1178
|
|
1181
1179
|
type Parameters = z.ZodTypeAny | Schema<any>;
|
1182
1180
|
type inferParameters<PARAMETERS extends Parameters> = PARAMETERS extends Schema<any> ? PARAMETERS['_type'] : PARAMETERS extends z.ZodTypeAny ? z.infer<PARAMETERS> : never;
|
@@ -1878,15 +1876,6 @@ The usage is the combined usage of all steps.
|
|
1878
1876
|
Details for all steps.
|
1879
1877
|
*/
|
1880
1878
|
readonly steps: StepResult<TOOLS>[];
|
1881
|
-
/**
|
1882
|
-
The response messages that were generated during the call. It consists of an assistant message,
|
1883
|
-
potentially containing tool calls.
|
1884
|
-
|
1885
|
-
When there are tool results, there is an additional tool message with the tool results that are available.
|
1886
|
-
If there are tools that do not have execute functions, they are not included in the tool results and
|
1887
|
-
need to be added separately.
|
1888
|
-
*/
|
1889
|
-
readonly responseMessages: Array<CoreAssistantMessage | CoreToolMessage>;
|
1890
1879
|
}) => Promise<void> | void;
|
1891
1880
|
/**
|
1892
1881
|
Callback that is called when each step (LLM call) is finished, including intermediate steps.
|
@@ -1900,7 +1889,7 @@ need to be added separately.
|
|
1900
1889
|
generateId?: () => string;
|
1901
1890
|
currentDate?: () => Date;
|
1902
1891
|
};
|
1903
|
-
}):
|
1892
|
+
}): StreamTextResult<TOOLS>;
|
1904
1893
|
|
1905
1894
|
/**
|
1906
1895
|
* Experimental middleware for LanguageModelV1.
|
@@ -2162,7 +2151,7 @@ type AssistantResponseCallback = (options: {
|
|
2162
2151
|
/**
|
2163
2152
|
Forwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.
|
2164
2153
|
*/
|
2165
|
-
forwardStream: (stream:
|
2154
|
+
forwardStream: (stream: any) => Promise<any | undefined>;
|
2166
2155
|
}) => Promise<void>;
|
2167
2156
|
/**
|
2168
2157
|
The `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.
|
@@ -2213,7 +2202,7 @@ type LangChainStreamEvent = {
|
|
2213
2202
|
data: any;
|
2214
2203
|
};
|
2215
2204
|
/**
|
2216
|
-
Converts LangChain output streams to
|
2205
|
+
Converts LangChain output streams to an AI SDK Data Stream.
|
2217
2206
|
|
2218
2207
|
The following streams are supported:
|
2219
2208
|
- `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
|
package/dist/index.d.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import { ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
|
2
|
-
export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema,
|
2
|
+
export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DataStreamPart, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema, ToolInvocation, UseAssistantOptions, formatAssistantStreamPart, formatDataStreamPart, jsonSchema, parseAssistantStreamPart, parseDataStreamPart, processDataStream, processTextStream } from '@ai-sdk/ui-utils';
|
3
3
|
export { ToolCall as CoreToolCall, ToolResult as CoreToolResult, generateId } from '@ai-sdk/provider-utils';
|
4
4
|
import { AttributeValue, Tracer } from '@opentelemetry/api';
|
5
5
|
import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
|
@@ -7,8 +7,6 @@ export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, I
|
|
7
7
|
import { z } from 'zod';
|
8
8
|
import { ServerResponse } from 'http';
|
9
9
|
import { ServerResponse as ServerResponse$1 } from 'node:http';
|
10
|
-
import { AssistantStream } from 'openai/lib/AssistantStream';
|
11
|
-
import { Run } from 'openai/resources/beta/threads/runs/runs';
|
12
10
|
|
13
11
|
/**
|
14
12
|
* Telemetry configuration.
|
@@ -893,7 +891,7 @@ interface StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {
|
|
893
891
|
/**
|
894
892
|
Warnings from the model provider (e.g. unsupported settings)
|
895
893
|
*/
|
896
|
-
readonly warnings: CallWarning[] | undefined
|
894
|
+
readonly warnings: Promise<CallWarning[] | undefined>;
|
897
895
|
/**
|
898
896
|
The token usage of the generated response. Resolved when the response is finished.
|
899
897
|
*/
|
@@ -1068,7 +1066,7 @@ Callback that is called when the LLM response and the final object validation ar
|
|
1068
1066
|
currentDate?: () => Date;
|
1069
1067
|
now?: () => number;
|
1070
1068
|
};
|
1071
|
-
}):
|
1069
|
+
}): StreamObjectResult<DeepPartial<OBJECT>, OBJECT, never>;
|
1072
1070
|
/**
|
1073
1071
|
Generate an array with structured, typed elements for a given prompt and element schema using a language model.
|
1074
1072
|
|
@@ -1135,7 +1133,7 @@ Callback that is called when the LLM response and the final object validation ar
|
|
1135
1133
|
currentDate?: () => Date;
|
1136
1134
|
now?: () => number;
|
1137
1135
|
};
|
1138
|
-
}):
|
1136
|
+
}): StreamObjectResult<Array<ELEMENT>, Array<ELEMENT>, AsyncIterableStream<ELEMENT>>;
|
1139
1137
|
/**
|
1140
1138
|
Generate JSON with any schema for a given prompt using a language model.
|
1141
1139
|
|
@@ -1176,7 +1174,7 @@ Callback that is called when the LLM response and the final object validation ar
|
|
1176
1174
|
currentDate?: () => Date;
|
1177
1175
|
now?: () => number;
|
1178
1176
|
};
|
1179
|
-
}):
|
1177
|
+
}): StreamObjectResult<JSONValue, JSONValue, never>;
|
1180
1178
|
|
1181
1179
|
type Parameters = z.ZodTypeAny | Schema<any>;
|
1182
1180
|
type inferParameters<PARAMETERS extends Parameters> = PARAMETERS extends Schema<any> ? PARAMETERS['_type'] : PARAMETERS extends z.ZodTypeAny ? z.infer<PARAMETERS> : never;
|
@@ -1878,15 +1876,6 @@ The usage is the combined usage of all steps.
|
|
1878
1876
|
Details for all steps.
|
1879
1877
|
*/
|
1880
1878
|
readonly steps: StepResult<TOOLS>[];
|
1881
|
-
/**
|
1882
|
-
The response messages that were generated during the call. It consists of an assistant message,
|
1883
|
-
potentially containing tool calls.
|
1884
|
-
|
1885
|
-
When there are tool results, there is an additional tool message with the tool results that are available.
|
1886
|
-
If there are tools that do not have execute functions, they are not included in the tool results and
|
1887
|
-
need to be added separately.
|
1888
|
-
*/
|
1889
|
-
readonly responseMessages: Array<CoreAssistantMessage | CoreToolMessage>;
|
1890
1879
|
}) => Promise<void> | void;
|
1891
1880
|
/**
|
1892
1881
|
Callback that is called when each step (LLM call) is finished, including intermediate steps.
|
@@ -1900,7 +1889,7 @@ need to be added separately.
|
|
1900
1889
|
generateId?: () => string;
|
1901
1890
|
currentDate?: () => Date;
|
1902
1891
|
};
|
1903
|
-
}):
|
1892
|
+
}): StreamTextResult<TOOLS>;
|
1904
1893
|
|
1905
1894
|
/**
|
1906
1895
|
* Experimental middleware for LanguageModelV1.
|
@@ -2162,7 +2151,7 @@ type AssistantResponseCallback = (options: {
|
|
2162
2151
|
/**
|
2163
2152
|
Forwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.
|
2164
2153
|
*/
|
2165
|
-
forwardStream: (stream:
|
2154
|
+
forwardStream: (stream: any) => Promise<any | undefined>;
|
2166
2155
|
}) => Promise<void>;
|
2167
2156
|
/**
|
2168
2157
|
The `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.
|
@@ -2213,7 +2202,7 @@ type LangChainStreamEvent = {
|
|
2213
2202
|
data: any;
|
2214
2203
|
};
|
2215
2204
|
/**
|
2216
|
-
Converts LangChain output streams to
|
2205
|
+
Converts LangChain output streams to an AI SDK Data Stream.
|
2217
2206
|
|
2218
2207
|
The following streams are supported:
|
2219
2208
|
- `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
|