@tavily/ai-sdk 0.1.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -76,6 +76,7 @@ const result = await generateText({
76
76
  - `autoParameters?: boolean` - Enable automatic parameter optimization
77
77
  - `timeout?: number` - Request timeout in milliseconds
78
78
  - `includeFavicon?: boolean` - Include favicon URLs in results
79
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response.
79
80
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
80
81
  - `apiBaseURL?: string` - Custom API base URL
81
82
 
@@ -104,6 +105,7 @@ const result = await generateText({
104
105
  - `format?: "markdown" | "text"` - Output format (default: "markdown")
105
106
  - `timeout?: number` - Request timeout in milliseconds
106
107
  - `includeFavicon?: boolean` - Include favicon URLs in results
108
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response. NOTE:The value may be 0 if the total successful URL extractions has not yet reached 5 calls. See our [Credits & Pricing documentation]("https://docs.tavily.com/documentation/api-credits") for details.
107
109
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
108
110
  - `apiBaseURL?: string` - Custom API base URL
109
111
 
@@ -145,6 +147,7 @@ const result = await generateText({
145
147
  - `format?: "markdown" | "text"` - Output format (default: "markdown")
146
148
  - `timeout?: number` - Request timeout in milliseconds
147
149
  - `includeFavicon?: boolean` - Include favicon URLs in results
150
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response. NOTE:The value may be 0 if the total use of /extract and /map have not yet reached minimum requirements. See our [Credits & Pricing documentation]("https://docs.tavily.com/documentation/api-credits") for details.
148
151
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
149
152
  - `apiBaseURL?: string` - Custom API base URL
150
153
 
@@ -187,6 +190,7 @@ const result = await generateText({
187
190
  - `timeout?: number` - Request timeout in milliseconds
188
191
  - `proxies?: { http?: string, https?: string }` - HTTP/HTTPS proxy configuration
189
192
  - `apiBaseURL?: string` - Custom API base URL
193
+ - `includeUsage?: boolean` - Whether to include credit usage information in the response.NOTE:The value may be 0 if the total successful pages mapped has not yet reached 10 calls. See our [Credits & Pricing documentation]("https://docs.tavily.com/documentation/api-credits") for details.
190
194
 
191
195
  **Input Parameters (for AI agent):**
192
196
  - `url: string` - Base URL to start mapping from (required)
package/dist/index.d.ts CHANGED
@@ -21,6 +21,7 @@ type TavilyExtractOptions = TavilyClientOptions & Partial<TavilyExtractOptions$1
21
21
  declare const tavilyExtract: (options?: TavilyExtractOptions) => ai.Tool<{
22
22
  urls: string[];
23
23
  extractDepth?: "basic" | "advanced" | undefined;
24
+ query?: string | undefined;
24
25
  }, _tavily_core.TavilyExtractResponse>;
25
26
 
26
27
  type TavilyCrawlOptions = TavilyClientOptions & Partial<TavilyCrawlOptions$1>;
@@ -34,6 +35,7 @@ declare const tavilyCrawl: (options?: TavilyCrawlOptions) => ai.Tool<{
34
35
  extractDepth?: "basic" | "advanced" | undefined;
35
36
  instructions?: string | undefined;
36
37
  allowExternal?: boolean | undefined;
38
+ query?: string | undefined;
37
39
  }, _tavily_core.TavilyCrawlResponse>;
38
40
 
39
41
  type TavilyMapOptions = TavilyClientOptions & Partial<TavilyMapOptions$1>;
package/dist/index.js CHANGED
@@ -38,18 +38,21 @@ var tavilyExtract = (options = {}) => {
38
38
  urls: z2.array(z2.string()).describe("Array of URLs to extract content from"),
39
39
  extractDepth: z2.enum(["basic", "advanced"]).optional().describe(
40
40
  "Extraction depth - 'basic' for main content, 'advanced' for comprehensive extraction (default: 'basic')"
41
- )
41
+ ),
42
+ query: z2.string().optional().describe("User intent query for reranking extracted content chunks")
42
43
  });
43
44
  return tool2({
44
45
  description: "Extract clean, structured content from one or more URLs. Returns parsed content in markdown or text format, optimized for AI consumption.",
45
46
  inputSchema,
46
47
  execute: async ({
47
48
  urls,
48
- extractDepth: inputExtractDepth
49
+ extractDepth: inputExtractDepth,
50
+ query: inputQuery
49
51
  }) => {
50
52
  return await client.extract(urls, {
51
53
  ...options,
52
- extractDepth: inputExtractDepth ?? options.extractDepth
54
+ extractDepth: inputExtractDepth ?? options.extractDepth,
55
+ query: inputQuery ?? options.query
53
56
  });
54
57
  }
55
58
  });
@@ -70,7 +73,8 @@ var tavilyCrawl = (options = {}) => {
70
73
  instructions: z3.string().optional().describe(
71
74
  "Optional instructions to guide the crawler (e.g., 'only crawl blog posts', 'focus on product pages')"
72
75
  ),
73
- allowExternal: z3.boolean().optional().describe("Whether to allow crawling external domains (default: false)")
76
+ allowExternal: z3.boolean().optional().describe("Whether to allow crawling external domains (default: false)"),
77
+ query: z3.string().optional().describe("User intent query for reranking extracted content chunks")
74
78
  });
75
79
  return tool3({
76
80
  description: "Crawl a website starting from a base URL to discover and extract content from multiple pages. Intelligently traverses links and extracts structured data at scale.",
@@ -80,14 +84,16 @@ var tavilyCrawl = (options = {}) => {
80
84
  maxDepth: inputMaxDepth,
81
85
  extractDepth: inputExtractDepth,
82
86
  instructions: inputInstructions,
83
- allowExternal: inputAllowExternal
87
+ allowExternal: inputAllowExternal,
88
+ query: inputQuery
84
89
  }) => {
85
90
  return await client.crawl(url, {
86
91
  ...options,
87
92
  maxDepth: inputMaxDepth ?? options.maxDepth,
88
93
  extractDepth: inputExtractDepth ?? options.extractDepth,
89
94
  instructions: inputInstructions ?? options.instructions,
90
- allowExternal: inputAllowExternal ?? options.allowExternal
95
+ allowExternal: inputAllowExternal ?? options.allowExternal,
96
+ query: inputQuery ?? options.query
91
97
  });
92
98
  }
93
99
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tavily/ai-sdk",
3
- "version": "0.1.1",
3
+ "version": "0.2.0",
4
4
  "description": "Tavily AI SDK tools - Search, Extract, Crawl, and Map",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -15,6 +15,10 @@
15
15
  "files": [
16
16
  "dist"
17
17
  ],
18
+ "scripts": {
19
+ "build": "tsup src/index.ts --format esm --dts",
20
+ "prepublishOnly": "pnpm build"
21
+ },
18
22
  "keywords": [
19
23
  "ai",
20
24
  "ai-sdk",
@@ -35,6 +39,7 @@
35
39
  "bugs": {
36
40
  "url": "https://github.com/tavily-ai/ai-sdk/issues"
37
41
  },
42
+ "packageManager": "pnpm@10.8.0",
38
43
  "devDependencies": {
39
44
  "@ai-sdk/openai": "^2.0.71",
40
45
  "@types/node": "^24.10.1",
@@ -49,8 +54,5 @@
49
54
  "peerDependencies": {
50
55
  "ai": "^5.0.0",
51
56
  "zod": "^4.0.0"
52
- },
53
- "scripts": {
54
- "build": "tsup src/index.ts --format esm --dts"
55
57
  }
56
- }
58
+ }