@tavily/ai-sdk 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -39,7 +39,7 @@ Search the web for real-time, AI-optimized information:
39
39
 
40
40
  ```typescript
41
41
  import { tavilySearch } from "@tavily/ai-sdk";
42
- import { generateText, gateway } from "ai";
42
+ import { generateText, gateway, stepCountIs } from "ai";
43
43
 
44
44
  const result = await generateText({
45
45
  model: gateway("openai/gpt-5-mini"),
@@ -51,6 +51,7 @@ const result = await generateText({
51
51
  maxResults: 5,
52
52
  }),
53
53
  },
54
+ stopWhen: stepCountIs(3),
54
55
  });
55
56
  ```
56
57
 
@@ -75,6 +76,7 @@ const result = await generateText({
75
76
  - `autoParameters?: boolean` - Enable automatic parameter optimization
76
77
  - `timeout?: number` - Request timeout in milliseconds
77
78
  - `includeFavicon?: boolean` - Include favicon URLs in results
79
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response.
78
80
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
79
81
  - `apiBaseURL?: string` - Custom API base URL
80
82
 
@@ -84,7 +86,7 @@ Extract clean, structured content from URLs:
84
86
 
85
87
  ```typescript
86
88
  import { tavilyExtract } from "@tavily/ai-sdk";
87
- import { generateText, gateway } from "ai";
89
+ import { generateText, gateway, stepCountIs } from "ai";
88
90
 
89
91
  const result = await generateText({
90
92
  model: gateway("openai/gpt-5-mini"),
@@ -92,6 +94,7 @@ const result = await generateText({
92
94
  tools: {
93
95
  tavilyExtract: tavilyExtract(),
94
96
  },
97
+ stopWhen: stepCountIs(3),
95
98
  });
96
99
  ```
97
100
 
@@ -102,6 +105,7 @@ const result = await generateText({
102
105
  - `format?: "markdown" | "text"` - Output format (default: "markdown")
103
106
  - `timeout?: number` - Request timeout in milliseconds
104
107
  - `includeFavicon?: boolean` - Include favicon URLs in results
108
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response. NOTE:The value may be 0 if the total successful URL extractions has not yet reached 5 calls. See our [Credits & Pricing documentation]("https://docs.tavily.com/documentation/api-credits") for details.
105
109
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
106
110
  - `apiBaseURL?: string` - Custom API base URL
107
111
 
@@ -115,7 +119,7 @@ Crawl websites to discover and extract content from multiple pages:
115
119
 
116
120
  ```typescript
117
121
  import { tavilyCrawl } from "@tavily/ai-sdk";
118
- import { generateText, gateway } from "ai";
122
+ import { generateText, gateway, stepCountIs } from "ai";
119
123
 
120
124
  const result = await generateText({
121
125
  model: gateway("openai/gpt-5-mini"),
@@ -123,6 +127,7 @@ const result = await generateText({
123
127
  tools: {
124
128
  tavilyCrawl: tavilyCrawl(),
125
129
  },
130
+ stopWhen: stepCountIs(3),
126
131
  });
127
132
  ```
128
133
 
@@ -142,6 +147,7 @@ const result = await generateText({
142
147
  - `format?: "markdown" | "text"` - Output format (default: "markdown")
143
148
  - `timeout?: number` - Request timeout in milliseconds
144
149
  - `includeFavicon?: boolean` - Include favicon URLs in results
150
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response. NOTE:The value may be 0 if the total use of /extract and /map have not yet reached minimum requirements. See our [Credits & Pricing documentation]("https://docs.tavily.com/documentation/api-credits") for details.
145
151
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
146
152
  - `apiBaseURL?: string` - Custom API base URL
147
153
 
@@ -158,7 +164,7 @@ Map website structure to understand site architecture:
158
164
 
159
165
  ```typescript
160
166
  import { tavilyMap } from "@tavily/ai-sdk";
161
- import { generateText, gateway } from "ai";
167
+ import { generateText, gateway, stepCountIs } from "ai";
162
168
 
163
169
  const result = await generateText({
164
170
  model: gateway("openai/gpt-5-mini"),
@@ -166,6 +172,7 @@ const result = await generateText({
166
172
  tools: {
167
173
  tavilyMap: tavilyMap(),
168
174
  },
175
+ stopWhen: stepCountIs(3),
169
176
  });
170
177
  ```
171
178
 
@@ -183,6 +190,7 @@ const result = await generateText({
183
190
  - `timeout?: number` - Request timeout in milliseconds
184
191
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
185
192
  - `apiBaseURL?: string` - Custom API base URL
193
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response.NOTE:The value may be 0 if the total successful pages mapped has not yet reached 10 calls. See our [Credits & Pricing documentation]("https://docs.tavily.com/documentation/api-credits") for details.
186
194
 
187
195
  **Input Parameters (for AI agent):**
188
196
  - `url: string` - Base URL to start mapping from (required)
@@ -202,7 +210,7 @@ import {
202
210
  tavilyCrawl,
203
211
  tavilyMap
204
212
  } from "@tavily/ai-sdk";
205
- import { generateText, gateway } from "ai";
213
+ import { generateText, gateway, stepCountIs } from "ai";
206
214
 
207
215
  const result = await generateText({
208
216
  model: gateway("openai/gpt-5-mini"),
@@ -213,6 +221,7 @@ const result = await generateText({
213
221
  tavilyCrawl: tavilyCrawl(),
214
222
  tavilyMap: tavilyMap(),
215
223
  },
224
+ stopWhen: stepCountIs(3),
216
225
  });
217
226
  ```
218
227
 
package/dist/index.d.ts CHANGED
@@ -21,6 +21,7 @@ type TavilyExtractOptions = TavilyClientOptions & Partial<TavilyExtractOptions$1
21
21
  declare const tavilyExtract: (options?: TavilyExtractOptions) => ai.Tool<{
22
22
  urls: string[];
23
23
  extractDepth?: "basic" | "advanced" | undefined;
24
+ query?: string | undefined;
24
25
  }, _tavily_core.TavilyExtractResponse>;
25
26
 
26
27
  type TavilyCrawlOptions = TavilyClientOptions & Partial<TavilyCrawlOptions$1>;
@@ -34,6 +35,7 @@ declare const tavilyCrawl: (options?: TavilyCrawlOptions) => ai.Tool<{
34
35
  extractDepth?: "basic" | "advanced" | undefined;
35
36
  instructions?: string | undefined;
36
37
  allowExternal?: boolean | undefined;
38
+ query?: string | undefined;
37
39
  }, _tavily_core.TavilyCrawlResponse>;
38
40
 
39
41
  type TavilyMapOptions = TavilyClientOptions & Partial<TavilyMapOptions$1>;
package/dist/index.js CHANGED
@@ -38,18 +38,21 @@ var tavilyExtract = (options = {}) => {
38
38
  urls: z2.array(z2.string()).describe("Array of URLs to extract content from"),
39
39
  extractDepth: z2.enum(["basic", "advanced"]).optional().describe(
40
40
  "Extraction depth - 'basic' for main content, 'advanced' for comprehensive extraction (default: 'basic')"
41
- )
41
+ ),
42
+ query: z2.string().optional().describe("User intent query for reranking extracted content chunks")
42
43
  });
43
44
  return tool2({
44
45
  description: "Extract clean, structured content from one or more URLs. Returns parsed content in markdown or text format, optimized for AI consumption.",
45
46
  inputSchema,
46
47
  execute: async ({
47
48
  urls,
48
- extractDepth: inputExtractDepth
49
+ extractDepth: inputExtractDepth,
50
+ query: inputQuery
49
51
  }) => {
50
52
  return await client.extract(urls, {
51
53
  ...options,
52
- extractDepth: inputExtractDepth ?? options.extractDepth
54
+ extractDepth: inputExtractDepth ?? options.extractDepth,
55
+ query: inputQuery ?? options.query
53
56
  });
54
57
  }
55
58
  });
@@ -70,7 +73,8 @@ var tavilyCrawl = (options = {}) => {
70
73
  instructions: z3.string().optional().describe(
71
74
  "Optional instructions to guide the crawler (e.g., 'only crawl blog posts', 'focus on product pages')"
72
75
  ),
73
- allowExternal: z3.boolean().optional().describe("Whether to allow crawling external domains (default: false)")
76
+ allowExternal: z3.boolean().optional().describe("Whether to allow crawling external domains (default: false)"),
77
+ query: z3.string().optional().describe("User intent query for reranking extracted content chunks")
74
78
  });
75
79
  return tool3({
76
80
  description: "Crawl a website starting from a base URL to discover and extract content from multiple pages. Intelligently traverses links and extracts structured data at scale.",
@@ -80,14 +84,16 @@ var tavilyCrawl = (options = {}) => {
80
84
  maxDepth: inputMaxDepth,
81
85
  extractDepth: inputExtractDepth,
82
86
  instructions: inputInstructions,
83
- allowExternal: inputAllowExternal
87
+ allowExternal: inputAllowExternal,
88
+ query: inputQuery
84
89
  }) => {
85
90
  return await client.crawl(url, {
86
91
  ...options,
87
92
  maxDepth: inputMaxDepth ?? options.maxDepth,
88
93
  extractDepth: inputExtractDepth ?? options.extractDepth,
89
94
  instructions: inputInstructions ?? options.instructions,
90
- allowExternal: inputAllowExternal ?? options.allowExternal
95
+ allowExternal: inputAllowExternal ?? options.allowExternal,
96
+ query: inputQuery ?? options.query
91
97
  });
92
98
  }
93
99
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tavily/ai-sdk",
3
- "version": "0.1.0",
3
+ "version": "0.2.0",
4
4
  "description": "Tavily AI SDK tools - Search, Extract, Crawl, and Map",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -31,6 +31,14 @@
31
31
  ],
32
32
  "author": "Tavily",
33
33
  "license": "MIT",
34
+ "repository": {
35
+ "type": "git",
36
+ "url": "https://github.com/tavily-ai/ai-sdk"
37
+ },
38
+ "homepage": "https://docs.tavily.com/documentation/integrations/vercel",
39
+ "bugs": {
40
+ "url": "https://github.com/tavily-ai/ai-sdk/issues"
41
+ },
34
42
  "packageManager": "pnpm@10.8.0",
35
43
  "devDependencies": {
36
44
  "@ai-sdk/openai": "^2.0.71",