ai 5.0.156 → 5.0.158

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,20 @@
1
1
  # ai
2
2
 
3
+ ## 5.0.158
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [946ef88]
8
+ - @ai-sdk/gateway@2.0.62
9
+
10
+ ## 5.0.157
11
+
12
+ ### Patch Changes
13
+
14
+ - 9379c51: README updates
15
+ - Updated dependencies [2e35d03]
16
+ - @ai-sdk/gateway@2.0.61
17
+
3
18
  ## 5.0.156
4
19
 
5
20
  ### Patch Changes
@@ -565,15 +580,15 @@
565
580
  This change replaces
566
581
 
567
582
  ```ts
568
- import { experimental_createMCPClient } from 'ai';
569
- import { Experimental_StdioMCPTransport } from 'ai/mcp-stdio';
583
+ import { experimental_createMCPClient } from "ai";
584
+ import { Experimental_StdioMCPTransport } from "ai/mcp-stdio";
570
585
  ```
571
586
 
572
587
  with
573
588
 
574
589
  ```ts
575
- import { experimental_createMCPClient } from '@ai-sdk/mcp';
576
- import { Experimental_StdioMCPTransport } from '@ai-sdk/mcp/mcp-stdio';
590
+ import { experimental_createMCPClient } from "@ai-sdk/mcp";
591
+ import { Experimental_StdioMCPTransport } from "@ai-sdk/mcp/mcp-stdio";
577
592
  ```
578
593
 
579
594
  ## 5.0.78
@@ -1196,7 +1211,7 @@
1196
1211
 
1197
1212
  ```js
1198
1213
  await generateImage({
1199
- model: luma.image('photon-flash-1', {
1214
+ model: luma.image("photon-flash-1", {
1200
1215
  maxImagesPerCall: 5,
1201
1216
  pollIntervalMillis: 500,
1202
1217
  }),
@@ -1209,7 +1224,7 @@
1209
1224
 
1210
1225
  ```js
1211
1226
  await generateImage({
1212
- model: luma.image('photon-flash-1'),
1227
+ model: luma.image("photon-flash-1"),
1213
1228
  prompt,
1214
1229
  n: 10,
1215
1230
  maxImagesPerCall: 5,
@@ -1409,10 +1424,10 @@
1409
1424
  The `experimental_generateImage` method from the `ai` package now returnes revised prompts for OpenAI's image models.
1410
1425
 
1411
1426
  ```js
1412
- const prompt = 'Santa Claus driving a Cadillac';
1427
+ const prompt = "Santa Claus driving a Cadillac";
1413
1428
 
1414
1429
  const { providerMetadata } = await experimental_generateImage({
1415
- model: openai.image('dall-e-3'),
1430
+ model: openai.image("dall-e-3"),
1416
1431
  prompt,
1417
1432
  });
1418
1433
 
@@ -2234,7 +2249,7 @@
2234
2249
 
2235
2250
  ```js
2236
2251
  await generateImage({
2237
- model: luma.image('photon-flash-1', {
2252
+ model: luma.image("photon-flash-1", {
2238
2253
  maxImagesPerCall: 5,
2239
2254
  pollIntervalMillis: 500,
2240
2255
  }),
@@ -2247,7 +2262,7 @@
2247
2262
 
2248
2263
  ```js
2249
2264
  await generateImage({
2250
- model: luma.image('photon-flash-1'),
2265
+ model: luma.image("photon-flash-1"),
2251
2266
  prompt,
2252
2267
  n: 10,
2253
2268
  maxImagesPerCall: 5,
@@ -2348,10 +2363,10 @@
2348
2363
  The `experimental_generateImage` method from the `ai` package now returnes revised prompts for OpenAI's image models.
2349
2364
 
2350
2365
  ```js
2351
- const prompt = 'Santa Claus driving a Cadillac';
2366
+ const prompt = "Santa Claus driving a Cadillac";
2352
2367
 
2353
2368
  const { providerMetadata } = await experimental_generateImage({
2354
- model: openai.image('dall-e-3'),
2369
+ model: openai.image("dall-e-3"),
2355
2370
  prompt,
2356
2371
  });
2357
2372
 
package/README.md CHANGED
@@ -18,31 +18,39 @@ npm install ai
18
18
 
19
19
  The AI SDK provides a [unified API](https://ai-sdk.dev/docs/foundations/providers-and-models) to interact with model providers like [OpenAI](https://ai-sdk.dev/providers/ai-sdk-providers/openai), [Anthropic](https://ai-sdk.dev/providers/ai-sdk-providers/anthropic), [Google](https://ai-sdk.dev/providers/ai-sdk-providers/google-generative-ai), and [more](https://ai-sdk.dev/providers/ai-sdk-providers).
20
20
 
21
- ```shell
22
- npm install @ai-sdk/openai @ai-sdk/anthropic @ai-sdk/google
23
- ```
21
+ By default, the AI SDK uses the [Vercel AI Gateway](https://vercel.com/docs/ai-gateway) to give you access to all major providers out of the box. Just pass a model string for any supported model:
24
22
 
25
- Alternatively you can use the [Vercel AI Gateway](https://vercel.com/docs/ai-gateway).
23
+ ```ts
24
+ const result = await generateText({
25
+ model: 'anthropic/claude-opus-4.6', // or 'openai/gpt-5.4', 'google/gemini-3-flash', etc.
26
+ prompt: 'Hello!',
27
+ });
28
+ ```
26
29
 
27
- ## Usage
30
+ You can also connect to providers directly using their SDK packages:
28
31
 
29
- ### Generating Text
32
+ ```shell
33
+ npm install @ai-sdk/openai @ai-sdk/anthropic @ai-sdk/google
34
+ ```
30
35
 
31
36
  ```ts
32
- import { generateText } from 'ai';
37
+ import { anthropic } from '@ai-sdk/anthropic';
33
38
 
34
- const { text } = await generateText({
35
- model: 'openai/gpt-5', // use Vercel AI Gateway
36
- prompt: 'What is an agent?',
39
+ const result = await generateText({
40
+ model: anthropic('claude-opus-4-6'), // or openai('gpt-5.4'), google('gemini-3-flash'), etc.
41
+ prompt: 'Hello!',
37
42
  });
38
43
  ```
39
44
 
45
+ ## Usage
46
+
47
+ ### Generating Text
48
+
40
49
  ```ts
41
50
  import { generateText } from 'ai';
42
- import { openai } from '@ai-sdk/openai';
43
51
 
44
52
  const { text } = await generateText({
45
- model: openai('gpt-5'), // use OpenAI Responses API directly
53
+ model: 'openai/gpt-5.4', // use Vercel AI Gateway
46
54
  prompt: 'What is an agent?',
47
55
  });
48
56
  ```
@@ -54,7 +62,7 @@ import { generateObject } from 'ai';
54
62
  import { z } from 'zod';
55
63
 
56
64
  const { object } = await generateObject({
57
- model: 'openai/gpt-5',
65
+ model: 'openai/gpt-5.4',
58
66
  schema: z.object({
59
67
  recipe: z.object({
60
68
  name: z.string(),
package/dist/index.js CHANGED
@@ -779,7 +779,7 @@ var import_provider_utils2 = require("@ai-sdk/provider-utils");
779
779
  var import_provider_utils3 = require("@ai-sdk/provider-utils");
780
780
 
781
781
  // src/version.ts
782
- var VERSION = true ? "5.0.156" : "0.0.0-test";
782
+ var VERSION = true ? "5.0.158" : "0.0.0-test";
783
783
 
784
784
  // src/util/download/download.ts
785
785
  var download = async ({