@parallel-web/ai-sdk-tools 0.1.1-canary.776023d → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +117 -70
- package/dist/index.cjs +8 -11
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +30 -7
- package/dist/index.d.ts +30 -7
- package/dist/index.js +6 -9
- package/dist/index.js.map +1 -1
- package/package.json +6 -14
- package/dist/v4.cjs +0 -145
- package/dist/v4.cjs.map +0 -1
- package/dist/v4.d.cts +0 -15
- package/dist/v4.d.ts +0 -15
- package/dist/v4.js +0 -142
- package/dist/v4.js.map +0 -1
- package/dist/v5.cjs +0 -145
- package/dist/v5.cjs.map +0 -1
- package/dist/v5.d.cts +0 -2
- package/dist/v5.d.ts +0 -2
- package/dist/v5.js +0 -142
- package/dist/v5.js.map +0 -1
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# @parallel-web/ai-sdk-tools
|
|
2
2
|
|
|
3
|
-
AI
|
|
3
|
+
AI SDK tools for Parallel Web, built for Vercel's AI SDK v5.
|
|
4
4
|
|
|
5
5
|
## Installation
|
|
6
6
|
|
|
@@ -12,6 +12,8 @@ pnpm add ai @parallel-web/ai-sdk-tools
|
|
|
12
12
|
yarn add ai @parallel-web/ai-sdk-tools
|
|
13
13
|
```
|
|
14
14
|
|
|
15
|
+
> **Note:** This package requires AI SDK v5. If you're using AI SDK v4, see the [AI SDK v4 Implementation](#ai-sdk-v4-implementation) section below.
|
|
16
|
+
|
|
15
17
|
## Usage
|
|
16
18
|
|
|
17
19
|
Add `PARALLEL_API_KEY` obtained from [Parallel Platform](https://platform.parallel.ai/settings?tab=api-keys) to your environment variables.
|
|
@@ -20,35 +22,17 @@ Add `PARALLEL_API_KEY` obtained from [Parallel Platform](https://platform.parall
|
|
|
20
22
|
|
|
21
23
|
`searchTool` uses [Parallel's web search API](https://docs.parallel.ai/api-reference/search-api/search) to get fresh relevant search results.
|
|
22
24
|
|
|
23
|
-
###
|
|
25
|
+
### Extract Tool
|
|
24
26
|
|
|
25
|
-
|
|
26
|
-
import { openai } from '@ai-sdk/openai';
|
|
27
|
-
import { streamText, type Tool } from 'ai';
|
|
28
|
-
import { searchTool, extractTool } from '@parallel-web/ai-sdk-tools';
|
|
29
|
-
|
|
30
|
-
const result = streamText({
|
|
31
|
-
model: openai('gpt-4o'),
|
|
32
|
-
messages: [
|
|
33
|
-
{ role: 'user', content: 'What are the latest developments in AI?' }
|
|
34
|
-
],
|
|
35
|
-
tools: {
|
|
36
|
-
'web-search': searchTool as Tool,
|
|
37
|
-
'web-extract': extractTool as Tool,
|
|
38
|
-
},
|
|
39
|
-
toolChoice: 'auto',
|
|
40
|
-
});
|
|
27
|
+
`extractTool` uses [Parallel's extract API](https://docs.parallel.ai/api-reference/search-and-extract-api-beta/extract) to extract a web-page's content, for a given objective.
|
|
41
28
|
|
|
42
|
-
// Stream the response
|
|
43
|
-
return result.toDataStreamResponse();
|
|
44
|
-
```
|
|
45
29
|
|
|
46
|
-
###
|
|
30
|
+
### Basic Example
|
|
47
31
|
|
|
48
32
|
```typescript
|
|
49
33
|
import { openai } from '@ai-sdk/openai';
|
|
50
34
|
import { streamText, type Tool } from 'ai';
|
|
51
|
-
import { searchTool, extractTool } from '@parallel-web/ai-sdk-tools
|
|
35
|
+
import { searchTool, extractTool } from '@parallel-web/ai-sdk-tools';
|
|
52
36
|
|
|
53
37
|
const result = streamText({
|
|
54
38
|
model: openai('gpt-4o'),
|
|
@@ -56,8 +40,8 @@ const result = streamText({
|
|
|
56
40
|
{ role: 'user', content: 'What are the latest developments in AI?' }
|
|
57
41
|
],
|
|
58
42
|
tools: {
|
|
59
|
-
'web-search': searchTool
|
|
60
|
-
'web-extract': extractTool
|
|
43
|
+
'web-search': searchTool,
|
|
44
|
+
'web-extract': extractTool,
|
|
61
45
|
},
|
|
62
46
|
toolChoice: 'auto',
|
|
63
47
|
});
|
|
@@ -66,12 +50,9 @@ const result = streamText({
|
|
|
66
50
|
return result.toDataStreamResponse();
|
|
67
51
|
```
|
|
68
52
|
|
|
69
|
-
|
|
70
53
|
### Custom Tools
|
|
71
54
|
|
|
72
55
|
You can create custom tools that wrap the Parallel Web API:
|
|
73
|
-
|
|
74
|
-
**For AI SDK v5:**
|
|
75
56
|
```typescript
|
|
76
57
|
import { tool, generateText } from 'ai';
|
|
77
58
|
import { openai } from '@ai-sdk/openai';
|
|
@@ -84,7 +65,7 @@ const parallel = new Parallel({
|
|
|
84
65
|
|
|
85
66
|
const webSearch = tool({
|
|
86
67
|
description: 'Use this tool to search the web.',
|
|
87
|
-
inputSchema: z.object({
|
|
68
|
+
inputSchema: z.object({
|
|
88
69
|
searchQueries: z.array(z.string()).describe("Search queries"),
|
|
89
70
|
usersQuestion: z.string().describe("The user's question"),
|
|
90
71
|
}),
|
|
@@ -100,7 +81,12 @@ const webSearch = tool({
|
|
|
100
81
|
});
|
|
101
82
|
```
|
|
102
83
|
|
|
103
|
-
|
|
84
|
+
## AI SDK v4 Implementation
|
|
85
|
+
|
|
86
|
+
If you're using AI SDK v4, you can implement the tools manually using the Parallel Web API. The key difference is that v4 uses `parameters` instead of `inputSchema`.
|
|
87
|
+
|
|
88
|
+
### Search Tool (v4)
|
|
89
|
+
|
|
104
90
|
```typescript
|
|
105
91
|
import { tool } from 'ai';
|
|
106
92
|
import { z } from 'zod';
|
|
@@ -110,53 +96,114 @@ const parallel = new Parallel({
|
|
|
110
96
|
apiKey: process.env.PARALLEL_API_KEY,
|
|
111
97
|
});
|
|
112
98
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
99
|
+
function getSearchParams(
|
|
100
|
+
search_type: 'list' | 'targeted' | 'general' | 'single_page'
|
|
101
|
+
): Pick<BetaSearchParams, 'max_results' | 'max_chars_per_result'> {
|
|
102
|
+
switch (search_type) {
|
|
103
|
+
case 'targeted':
|
|
104
|
+
return {
|
|
105
|
+
max_results: 5,
|
|
106
|
+
max_chars_per_result: 16000
|
|
107
|
+
};
|
|
108
|
+
case 'general':
|
|
109
|
+
return {
|
|
110
|
+
max_results: 10,
|
|
111
|
+
max_chars_per_result: 9000
|
|
112
|
+
};
|
|
113
|
+
case 'single_page':
|
|
114
|
+
return {
|
|
115
|
+
max_results: 2,
|
|
116
|
+
max_chars_per_result: 30000
|
|
117
|
+
};
|
|
118
|
+
case 'list':
|
|
119
|
+
default:
|
|
120
|
+
return {
|
|
121
|
+
max_results: 20,
|
|
122
|
+
max_chars_per_result: 1500
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const searchTool = tool({
|
|
128
|
+
description: `Use the web_search_parallel tool to access information from the web. The
|
|
129
|
+
web_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.
|
|
130
|
+
Intelligently scale the number of web_search_parallel tool calls to get more information
|
|
131
|
+
when needed, from a single call for simple factual questions to five or more calls for
|
|
132
|
+
complex research questions.`,
|
|
133
|
+
parameters: z.object({ // v4 uses parameters instead of inputSchema
|
|
134
|
+
objective: z.string().describe(
|
|
135
|
+
'Natural-language description of what the web research goal is.'
|
|
136
|
+
),
|
|
137
|
+
search_type: z
|
|
138
|
+
.enum(['list', 'general', 'single_page', 'targeted'])
|
|
139
|
+
.optional()
|
|
140
|
+
.default('list'),
|
|
141
|
+
search_queries: z
|
|
142
|
+
.array(z.string())
|
|
143
|
+
.optional()
|
|
144
|
+
.describe('List of keyword search queries of 1-6 words.'),
|
|
145
|
+
include_domains: z
|
|
146
|
+
.array(z.string())
|
|
147
|
+
.optional()
|
|
148
|
+
.describe('List of valid URL domains to restrict search results.'),
|
|
118
149
|
}),
|
|
119
|
-
execute: async (
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
150
|
+
execute: async (
|
|
151
|
+
{ ...args },
|
|
152
|
+
{ abortSignal }: { abortSignal?: AbortSignal }
|
|
153
|
+
) => {
|
|
154
|
+
const results = const results = await search(
|
|
155
|
+
{ ...args, ...getSearchParams(args.search_type) },
|
|
156
|
+
{ abortSignal }
|
|
157
|
+
);
|
|
158
|
+
return {
|
|
159
|
+
searchParams: { objective, search_type, search_queries, include_domains },
|
|
160
|
+
answer: results,
|
|
161
|
+
};
|
|
127
162
|
},
|
|
128
163
|
});
|
|
129
164
|
```
|
|
130
165
|
|
|
166
|
+
### Extract Tool (v4)
|
|
131
167
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
**This package supports both AI SDK v4 and v5.**
|
|
137
|
-
|
|
138
|
-
Using npm package aliases, we bundle both `ai@4.x` and `ai@5.x` within the same package, providing compatibility for both versions.
|
|
139
|
-
|
|
140
|
-
### Choose Your Version
|
|
141
|
-
|
|
142
|
-
- **AI SDK v5 (Default/Recommended)**
|
|
143
|
-
```typescript
|
|
144
|
-
import { searchTool, extractTool } from '@parallel-web/ai-sdk-tools';
|
|
145
|
-
// or explicitly
|
|
146
|
-
import { searchTool, extractTool } from '@parallel-web/ai-sdk-tools/v5';
|
|
147
|
-
```
|
|
148
|
-
|
|
149
|
-
- **AI SDK v4**
|
|
150
|
-
```typescript
|
|
151
|
-
import { searchTool, extractTool } from '@parallel-web/ai-sdk-tools/v4';
|
|
152
|
-
```
|
|
153
|
-
|
|
154
|
-
### How It Works
|
|
168
|
+
```typescript
|
|
169
|
+
import { tool } from 'ai';
|
|
170
|
+
import { z } from 'zod';
|
|
171
|
+
import { Parallel } from 'parallel-web';
|
|
155
172
|
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
173
|
+
const parallel = new Parallel({
|
|
174
|
+
apiKey: process.env.PARALLEL_API_KEY,
|
|
175
|
+
});
|
|
159
176
|
|
|
160
|
-
|
|
177
|
+
const extractTool = tool({
|
|
178
|
+
description: `Purpose: Fetch and extract relevant content from specific web URLs.
|
|
179
|
+
|
|
180
|
+
Ideal Use Cases:
|
|
181
|
+
- Extracting content from specific URLs you've already identified
|
|
182
|
+
- Exploring URLs returned by a web search in greater depth`,
|
|
183
|
+
parameters: z.object({ // v4 uses parameters instead of inputSchema
|
|
184
|
+
objective: z.string().describe(
|
|
185
|
+
'Natural-language description of what information you\'re looking for from the URLs.'
|
|
186
|
+
),
|
|
187
|
+
urls: z.array(z.string()).describe(
|
|
188
|
+
'List of URLs to extract content from. Maximum 10 URLs per request.'
|
|
189
|
+
),
|
|
190
|
+
search_queries: z
|
|
191
|
+
.array(z.string())
|
|
192
|
+
.optional()
|
|
193
|
+
.describe('Optional keyword search queries related to the objective.'),
|
|
194
|
+
}),
|
|
195
|
+
execute: async ({ objective, urls, search_queries }) => {
|
|
196
|
+
const results = await parallel.beta.extract({
|
|
197
|
+
objective,
|
|
198
|
+
urls,
|
|
199
|
+
search_queries,
|
|
200
|
+
});
|
|
201
|
+
return {
|
|
202
|
+
searchParams: { objective, urls, search_queries },
|
|
203
|
+
answer: results,
|
|
204
|
+
};
|
|
205
|
+
},
|
|
206
|
+
});
|
|
207
|
+
```
|
|
161
208
|
|
|
162
209
|
|
package/dist/index.cjs
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var ai = require('ai');
|
|
4
4
|
var zod = require('zod');
|
|
5
5
|
var parallelWeb = require('parallel-web');
|
|
6
6
|
|
|
7
|
-
// src/
|
|
7
|
+
// src/tools/search.ts
|
|
8
8
|
var _parallelClient = null;
|
|
9
9
|
var parallelClient = new Proxy({}, {
|
|
10
10
|
get(_target, prop) {
|
|
@@ -17,7 +17,7 @@ var parallelClient = new Proxy({}, {
|
|
|
17
17
|
}
|
|
18
18
|
});
|
|
19
19
|
|
|
20
|
-
// src/
|
|
20
|
+
// src/tools/search.ts
|
|
21
21
|
function getSearchParams(search_type) {
|
|
22
22
|
switch (search_type) {
|
|
23
23
|
case "targeted":
|
|
@@ -37,12 +37,12 @@ var search = async (searchArgs, { abortSignal }) => {
|
|
|
37
37
|
...searchArgs
|
|
38
38
|
},
|
|
39
39
|
{
|
|
40
|
-
signal: abortSignal
|
|
41
|
-
|
|
40
|
+
signal: abortSignal,
|
|
41
|
+
headers: { "parallel-beta": "search-extract-2025-10-10" }
|
|
42
42
|
}
|
|
43
43
|
);
|
|
44
44
|
};
|
|
45
|
-
var searchTool =
|
|
45
|
+
var searchTool = ai.tool({
|
|
46
46
|
description: `Use the web_search_parallel tool to access information from the web. The
|
|
47
47
|
web_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.
|
|
48
48
|
Intelligently scale the number of web_search_parallel tool calls to get more information
|
|
@@ -102,7 +102,7 @@ use other search types with include_domains to get more detailed results.`,
|
|
|
102
102
|
};
|
|
103
103
|
}
|
|
104
104
|
});
|
|
105
|
-
var extractTool =
|
|
105
|
+
var extractTool = ai.tool({
|
|
106
106
|
description: `Purpose: Fetch and extract relevant content from specific web URLs.
|
|
107
107
|
|
|
108
108
|
Ideal Use Cases:
|
|
@@ -127,10 +127,7 @@ HTTP/HTTPS URLs. Maximum 10 URLs per request.`
|
|
|
127
127
|
execute: async function({ ...args }, { abortSignal }) {
|
|
128
128
|
const results = await parallelClient.beta.extract(
|
|
129
129
|
{ ...args },
|
|
130
|
-
{
|
|
131
|
-
signal: abortSignal,
|
|
132
|
-
headers: { "parallel-beta": "search-extract-2025-10-10" }
|
|
133
|
-
}
|
|
130
|
+
{ signal: abortSignal, headers: { "parallel-beta": "parallel" } }
|
|
134
131
|
);
|
|
135
132
|
return {
|
|
136
133
|
searchParams: args,
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/client.ts","../src/
|
|
1
|
+
{"version":3,"sources":["../src/client.ts","../src/tools/search.ts","../src/tools/extract.ts"],"names":["Parallel","tool","z"],"mappings":";;;;;;;AAMA,IAAI,eAAA,GAAmC,IAAA;AAEhC,IAAM,cAAA,GAAiB,IAAI,KAAA,CAAM,EAAC,EAAe;AAAA,EACtD,GAAA,CAAI,SAAS,IAAA,EAAM;AACjB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,eAAA,GAAkB,IAAIA,oBAAA,CAAS;AAAA,QAC7B,MAAA,EAAQ,OAAA,CAAQ,GAAA,CAAI,kBAAkB;AAAA,OACvC,CAAA;AAAA,IACH;AACA,IAAA,OAAQ,gBAAwB,IAAI,CAAA;AAAA,EACtC;AACF,CAAC,CAAA;;;ACRD,SAAS,gBACP,WAAA,EACgE;AAChE,EAAA,QAAQ,WAAA;AAAa,IACnB,KAAK,UAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,IAAA,EAAM;AAAA,IACvD,KAAK,SAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,GAAA,EAAK;AAAA,IACvD,KAAK,aAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,GAAA,EAAM;AAAA,IACvD,KAAK,MAAA;AAAA,IACL;AACE,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,IAAA,EAAK;AAAA;AAE3D;AAEA,IAAM,MAAA,GAAS,OACb,UAAA,EACA,EAAE,aAAY,KACX;AACH,EAAA,OAAO,MAAM,eAAe,IAAA,CAAK,MAAA;AAAA,IAC/B;AAAA,MACE,GAAG;AAAA,KACL;AAAA,IACA;AAAA,MACE,MAAA,EAAQ,WAAA;AAAA,MACR,OAAA,EAAS,EAAE,eAAA,EAAiB,2BAAA;AAA4B;AAC1D,GACF;AACF,CAAA;AAEO,IAAM,aAAaC,OAAA,CAAK;AAAA,EAC7B,WAAA,EAAa,CAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,yEAAA,CAAA;AAAA,EAmBb,WAAA,EAAaC,MAAE,MAAA,CAAO;AAAA,IACpB,SAAA,EAAWA,KAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA;AAAA;AAAA;AAAA,iBAAA;AAAA,KAKF;AAAA,IACA,WAAA,EAAaA,MACV,IAAA,CAAK,CAAC,QAAQ,SAAA,EAAW,aAAA,EAAe,UAAU,CAAC,CAAA,CACnD,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAAA;AAAA,KAQF,CACC,QAAA,EAAS,CACT,OAAA,CAAQ,MAAM,CAAA;AAAA,IACjB,cAAA,EAAgBA,MACb,KAAA,CAAMA,KAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA,KAIF;AAAA,IACF,eAAA,EAAiBA,MAAE,KAAA,CAAMA,KAAA,CAAE,QAAQ,CAAA,CAAE,QAAA,EAAS,CAC3C,QAAA,CAAS,CAAA;AAAA;AAAA;AAAA,uEAAA,CAGwD;AAAA,GACrE,CAAA;AAAA,EAED,OAAA,EAAS,eAAgB,EAAE,GAAG,MAAK,EAAG,EAAE,aAAY,EAAG;AACrD,IAAA,MAAM,UAAU,MAAM,MAAA;AAAA,MACpB,EAAE,GAAG,IAAA,EAAM,GAAG,eAAA,CAAgB,IAAA,CAAK,WAAW,CAAA,EAAE;AAAA,MAChD,EAAE,WAAA;AAAY,KAChB;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC;ACrGM,IAAM,cAAcD,OAAAA,CAAK;AAAA,EAC9B,WAAA,EAAa,CAAA;;AAAA;AAAA;AAAA,0DAAA,CAAA;AAAA,EAKb,WAAA,EAAaC,MAAE,MAAA,CAAO;AAAA,IACpB,SAAA,EAAWA,KAAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA,yBAAA;AAAA,KAEF;AAAA,IAEA,MAAMA,KAAAA,CAAE,KAAA,CAAMA,KAAAA,CAAE,MAAA,EAAQ,CAAA,CAAE,QAAA;AAAA,MACxB,CAAA;AAAA,6CAAA;AAAA,KAEF;AAAA,IACA,cAAA,EAAgBA,MACb,KAAA,CAAMA,KAAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA;AAIF,GACH,CAAA;AAAA,EAED,OAAA,EAAS,eAAgB,EAAE,GAAG,MAAK,EAAG,EAAE,aAAY,EAAG;AACrD,IAAA,MAAM,OAAA,GAAU,MAAM,cAAA,CAAe,IAAA,CAAK,OAAA;AAAA,MACxC,EAAE,GAAG,IAAA,EAAK;AAAA,MACV,EAAE,MAAA,EAAQ,WAAA,EAAa,SAAS,EAAE,eAAA,EAAiB,YAAW;AAAE,KAClE;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC","file":"index.cjs","sourcesContent":["/**\n * Shared Parallel Web client instance\n */\n\nimport { Parallel } from 'parallel-web';\n\nlet _parallelClient: Parallel | null = null;\n\nexport const parallelClient = new Proxy({} as Parallel, {\n get(_target, prop) {\n if (!_parallelClient) {\n _parallelClient = new Parallel({\n apiKey: process.env['PARALLEL_API_KEY'],\n });\n }\n return (_parallelClient as any)[prop];\n },\n});\n","/**\n * Search tool for Parallel Web\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { BetaSearchParams } from 'parallel-web/resources/beta/beta.mjs';\nimport { parallelClient } from '../client.js';\n\nfunction getSearchParams(\n search_type: 'list' | 'targeted' | 'general' | 'single_page'\n): Pick<BetaSearchParams, 'max_results' | 'max_chars_per_result'> {\n switch (search_type) {\n case 'targeted':\n return { max_results: 5, max_chars_per_result: 16000 };\n case 'general':\n return { max_results: 10, max_chars_per_result: 9000 };\n case 'single_page':\n return { max_results: 2, max_chars_per_result: 30000 };\n case 'list':\n default:\n return { max_results: 20, max_chars_per_result: 1500 };\n }\n}\n\nconst search = async (\n searchArgs: BetaSearchParams,\n { abortSignal }: { abortSignal: AbortSignal | undefined }\n) => {\n return await parallelClient.beta.search(\n {\n ...searchArgs,\n },\n {\n signal: abortSignal,\n headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n};\n\nexport const searchTool = tool({\n description: `Use the web_search_parallel tool to access information from the web. The\nweb_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.\nIntelligently scale the number of web_search_parallel tool calls to get more information\nwhen needed, from a single call for simple factual questions to five or more calls for\ncomplex research questions.\n\n* Keep queries concise - 1-6 words for best results. Start broad with very short\n queries and medium context, then add words to narrow results or use high context\n if needed.\n* Include broader context about what the search is trying to accomplish in the\n \\`objective\\` field. This helps the search engine understand the user's intent and\n provide relevant results and excerpts.\n* Never repeat similar search queries - make every query unique. If initial results are\n insufficient, reformulate queries to obtain new and better results.\n\nHow to use:\n- For simple queries, a one-shot call to depth is usually sufficient.\n- For complex multi-hop queries, first try to use breadth to narrow down sources. Then\nuse other search types with include_domains to get more detailed results.`,\n inputSchema: z.object({\n objective: z.string().describe(\n `Natural-language description of what the web research goal\n is. Specify the broad intent of the search query here. Also include any source or\n freshness guidance here. Limit to 200 characters. This should reflect the end goal so\n that the tool can better understand the intent and return the best results. Do not\n dump long texts.`\n ),\n search_type: z\n .enum(['list', 'general', 'single_page', 'targeted'])\n .describe(\n `Can be \"list\", \"general\", \"single_page\" or \"targeted\".\n \"list\" should be used for searching for data broadly, like aggregating data or\n considering multiple sources or doing broad initial research. \"targeted\" should be\n used for searching for data from a specific source set. \"general\" is a catch all case\n if there is no specific use case from list or targeted. \"single_page\" extracts data\n from a single page - extremely targeted. If there is a specific webpage you want the\n data from, use \"single_page\" and mention the URL in the objective.\n Use search_type appropriately.`\n )\n .optional()\n .default('list'),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n include_domains: z.array(z.string()).optional()\n .describe(`(optional) List of valid URL domains to explicitly\n focus on for the search. This will restrict all search results to only include results\n from the provided list. This is useful when you want to only use a specific set of\n sources. example: [\"google.com\", \"wikipedia.org\"]. Maximum 10 entries.`),\n }),\n\n execute: async function ({ ...args }, { abortSignal }) {\n const results = await search(\n { ...args, ...getSearchParams(args.search_type) },\n { abortSignal }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n","/**\n * Extract tool for Parallel Web\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { parallelClient } from '../client.js';\n\nexport const extractTool = tool({\n description: `Purpose: Fetch and extract relevant content from specific web URLs.\n\nIdeal Use Cases:\n- Extracting content from specific URLs you've already identified\n- Exploring URLs returned by a web search in greater depth`,\n inputSchema: z.object({\n objective: z.string().describe(\n `Natural-language description of what information you're looking for from the URLs. \n Limit to 200 characters.`\n ),\n\n urls: z.array(z.string()).describe(\n `List of URLs to extract content from. Must be valid\nHTTP/HTTPS URLs. Maximum 10 URLs per request.`\n ),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n }),\n\n execute: async function ({ ...args }, { abortSignal }) {\n const results = await parallelClient.beta.extract(\n { ...args },\n { signal: abortSignal, headers: { 'parallel-beta': 'parallel' } }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n"]}
|
package/dist/index.d.cts
CHANGED
|
@@ -1,15 +1,38 @@
|
|
|
1
|
-
import
|
|
1
|
+
import * as ai from 'ai';
|
|
2
|
+
import * as parallel_web_resources_beta_beta_mjs from 'parallel-web/resources/beta/beta.mjs';
|
|
2
3
|
|
|
3
4
|
/**
|
|
4
|
-
* Search tool for Parallel Web
|
|
5
|
+
* Search tool for Parallel Web
|
|
5
6
|
*/
|
|
6
|
-
|
|
7
|
-
|
|
7
|
+
declare const searchTool: ai.Tool<{
|
|
8
|
+
objective: string;
|
|
9
|
+
search_type: "list" | "targeted" | "general" | "single_page";
|
|
10
|
+
search_queries?: string[] | undefined;
|
|
11
|
+
include_domains?: string[] | undefined;
|
|
12
|
+
}, {
|
|
13
|
+
searchParams: {
|
|
14
|
+
objective: string;
|
|
15
|
+
search_type: "list" | "targeted" | "general" | "single_page";
|
|
16
|
+
search_queries?: string[] | undefined;
|
|
17
|
+
include_domains?: string[] | undefined;
|
|
18
|
+
};
|
|
19
|
+
answer: parallel_web_resources_beta_beta_mjs.SearchResult;
|
|
20
|
+
}>;
|
|
8
21
|
|
|
9
22
|
/**
|
|
10
|
-
* Extract tool for Parallel Web
|
|
23
|
+
* Extract tool for Parallel Web
|
|
11
24
|
*/
|
|
12
|
-
|
|
13
|
-
|
|
25
|
+
declare const extractTool: ai.Tool<{
|
|
26
|
+
objective: string;
|
|
27
|
+
urls: string[];
|
|
28
|
+
search_queries?: string[] | undefined;
|
|
29
|
+
}, {
|
|
30
|
+
searchParams: {
|
|
31
|
+
objective: string;
|
|
32
|
+
urls: string[];
|
|
33
|
+
search_queries?: string[] | undefined;
|
|
34
|
+
};
|
|
35
|
+
answer: parallel_web_resources_beta_beta_mjs.ExtractResponse;
|
|
36
|
+
}>;
|
|
14
37
|
|
|
15
38
|
export { extractTool, searchTool };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,15 +1,38 @@
|
|
|
1
|
-
import
|
|
1
|
+
import * as ai from 'ai';
|
|
2
|
+
import * as parallel_web_resources_beta_beta_mjs from 'parallel-web/resources/beta/beta.mjs';
|
|
2
3
|
|
|
3
4
|
/**
|
|
4
|
-
* Search tool for Parallel Web
|
|
5
|
+
* Search tool for Parallel Web
|
|
5
6
|
*/
|
|
6
|
-
|
|
7
|
-
|
|
7
|
+
declare const searchTool: ai.Tool<{
|
|
8
|
+
objective: string;
|
|
9
|
+
search_type: "list" | "targeted" | "general" | "single_page";
|
|
10
|
+
search_queries?: string[] | undefined;
|
|
11
|
+
include_domains?: string[] | undefined;
|
|
12
|
+
}, {
|
|
13
|
+
searchParams: {
|
|
14
|
+
objective: string;
|
|
15
|
+
search_type: "list" | "targeted" | "general" | "single_page";
|
|
16
|
+
search_queries?: string[] | undefined;
|
|
17
|
+
include_domains?: string[] | undefined;
|
|
18
|
+
};
|
|
19
|
+
answer: parallel_web_resources_beta_beta_mjs.SearchResult;
|
|
20
|
+
}>;
|
|
8
21
|
|
|
9
22
|
/**
|
|
10
|
-
* Extract tool for Parallel Web
|
|
23
|
+
* Extract tool for Parallel Web
|
|
11
24
|
*/
|
|
12
|
-
|
|
13
|
-
|
|
25
|
+
declare const extractTool: ai.Tool<{
|
|
26
|
+
objective: string;
|
|
27
|
+
urls: string[];
|
|
28
|
+
search_queries?: string[] | undefined;
|
|
29
|
+
}, {
|
|
30
|
+
searchParams: {
|
|
31
|
+
objective: string;
|
|
32
|
+
urls: string[];
|
|
33
|
+
search_queries?: string[] | undefined;
|
|
34
|
+
};
|
|
35
|
+
answer: parallel_web_resources_beta_beta_mjs.ExtractResponse;
|
|
36
|
+
}>;
|
|
14
37
|
|
|
15
38
|
export { extractTool, searchTool };
|
package/dist/index.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { tool } from 'ai
|
|
1
|
+
import { tool } from 'ai';
|
|
2
2
|
import { z } from 'zod';
|
|
3
3
|
import { Parallel } from 'parallel-web';
|
|
4
4
|
|
|
5
|
-
// src/
|
|
5
|
+
// src/tools/search.ts
|
|
6
6
|
var _parallelClient = null;
|
|
7
7
|
var parallelClient = new Proxy({}, {
|
|
8
8
|
get(_target, prop) {
|
|
@@ -15,7 +15,7 @@ var parallelClient = new Proxy({}, {
|
|
|
15
15
|
}
|
|
16
16
|
});
|
|
17
17
|
|
|
18
|
-
// src/
|
|
18
|
+
// src/tools/search.ts
|
|
19
19
|
function getSearchParams(search_type) {
|
|
20
20
|
switch (search_type) {
|
|
21
21
|
case "targeted":
|
|
@@ -35,8 +35,8 @@ var search = async (searchArgs, { abortSignal }) => {
|
|
|
35
35
|
...searchArgs
|
|
36
36
|
},
|
|
37
37
|
{
|
|
38
|
-
signal: abortSignal
|
|
39
|
-
|
|
38
|
+
signal: abortSignal,
|
|
39
|
+
headers: { "parallel-beta": "search-extract-2025-10-10" }
|
|
40
40
|
}
|
|
41
41
|
);
|
|
42
42
|
};
|
|
@@ -125,10 +125,7 @@ HTTP/HTTPS URLs. Maximum 10 URLs per request.`
|
|
|
125
125
|
execute: async function({ ...args }, { abortSignal }) {
|
|
126
126
|
const results = await parallelClient.beta.extract(
|
|
127
127
|
{ ...args },
|
|
128
|
-
{
|
|
129
|
-
signal: abortSignal,
|
|
130
|
-
headers: { "parallel-beta": "search-extract-2025-10-10" }
|
|
131
|
-
}
|
|
128
|
+
{ signal: abortSignal, headers: { "parallel-beta": "parallel" } }
|
|
132
129
|
);
|
|
133
130
|
return {
|
|
134
131
|
searchParams: args,
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/client.ts","../src/
|
|
1
|
+
{"version":3,"sources":["../src/client.ts","../src/tools/search.ts","../src/tools/extract.ts"],"names":["tool","z"],"mappings":";;;;;AAMA,IAAI,eAAA,GAAmC,IAAA;AAEhC,IAAM,cAAA,GAAiB,IAAI,KAAA,CAAM,EAAC,EAAe;AAAA,EACtD,GAAA,CAAI,SAAS,IAAA,EAAM;AACjB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,eAAA,GAAkB,IAAI,QAAA,CAAS;AAAA,QAC7B,MAAA,EAAQ,OAAA,CAAQ,GAAA,CAAI,kBAAkB;AAAA,OACvC,CAAA;AAAA,IACH;AACA,IAAA,OAAQ,gBAAwB,IAAI,CAAA;AAAA,EACtC;AACF,CAAC,CAAA;;;ACRD,SAAS,gBACP,WAAA,EACgE;AAChE,EAAA,QAAQ,WAAA;AAAa,IACnB,KAAK,UAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,IAAA,EAAM;AAAA,IACvD,KAAK,SAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,GAAA,EAAK;AAAA,IACvD,KAAK,aAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,GAAA,EAAM;AAAA,IACvD,KAAK,MAAA;AAAA,IACL;AACE,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,IAAA,EAAK;AAAA;AAE3D;AAEA,IAAM,MAAA,GAAS,OACb,UAAA,EACA,EAAE,aAAY,KACX;AACH,EAAA,OAAO,MAAM,eAAe,IAAA,CAAK,MAAA;AAAA,IAC/B;AAAA,MACE,GAAG;AAAA,KACL;AAAA,IACA;AAAA,MACE,MAAA,EAAQ,WAAA;AAAA,MACR,OAAA,EAAS,EAAE,eAAA,EAAiB,2BAAA;AAA4B;AAC1D,GACF;AACF,CAAA;AAEO,IAAM,aAAa,IAAA,CAAK;AAAA,EAC7B,WAAA,EAAa,CAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,yEAAA,CAAA;AAAA,EAmBb,WAAA,EAAa,EAAE,MAAA,CAAO;AAAA,IACpB,SAAA,EAAW,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA;AAAA;AAAA;AAAA,iBAAA;AAAA,KAKF;AAAA,IACA,WAAA,EAAa,EACV,IAAA,CAAK,CAAC,QAAQ,SAAA,EAAW,aAAA,EAAe,UAAU,CAAC,CAAA,CACnD,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAAA;AAAA,KAQF,CACC,QAAA,EAAS,CACT,OAAA,CAAQ,MAAM,CAAA;AAAA,IACjB,cAAA,EAAgB,EACb,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA,KAIF;AAAA,IACF,eAAA,EAAiB,EAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA,CAAE,QAAA,EAAS,CAC3C,QAAA,CAAS,CAAA;AAAA;AAAA;AAAA,uEAAA,CAGwD;AAAA,GACrE,CAAA;AAAA,EAED,OAAA,EAAS,eAAgB,EAAE,GAAG,MAAK,EAAG,EAAE,aAAY,EAAG;AACrD,IAAA,MAAM,UAAU,MAAM,MAAA;AAAA,MACpB,EAAE,GAAG,IAAA,EAAM,GAAG,eAAA,CAAgB,IAAA,CAAK,WAAW,CAAA,EAAE;AAAA,MAChD,EAAE,WAAA;AAAY,KAChB;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC;ACrGM,IAAM,cAAcA,IAAAA,CAAK;AAAA,EAC9B,WAAA,EAAa,CAAA;;AAAA;AAAA;AAAA,0DAAA,CAAA;AAAA,EAKb,WAAA,EAAaC,EAAE,MAAA,CAAO;AAAA,IACpB,SAAA,EAAWA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA,yBAAA;AAAA,KAEF;AAAA,IAEA,MAAMA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,EAAQ,CAAA,CAAE,QAAA;AAAA,MACxB,CAAA;AAAA,6CAAA;AAAA,KAEF;AAAA,IACA,cAAA,EAAgBA,EACb,KAAA,CAAMA,CAAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA;AAIF,GACH,CAAA;AAAA,EAED,OAAA,EAAS,eAAgB,EAAE,GAAG,MAAK,EAAG,EAAE,aAAY,EAAG;AACrD,IAAA,MAAM,OAAA,GAAU,MAAM,cAAA,CAAe,IAAA,CAAK,OAAA;AAAA,MACxC,EAAE,GAAG,IAAA,EAAK;AAAA,MACV,EAAE,MAAA,EAAQ,WAAA,EAAa,SAAS,EAAE,eAAA,EAAiB,YAAW;AAAE,KAClE;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC","file":"index.js","sourcesContent":["/**\n * Shared Parallel Web client instance\n */\n\nimport { Parallel } from 'parallel-web';\n\nlet _parallelClient: Parallel | null = null;\n\nexport const parallelClient = new Proxy({} as Parallel, {\n get(_target, prop) {\n if (!_parallelClient) {\n _parallelClient = new Parallel({\n apiKey: process.env['PARALLEL_API_KEY'],\n });\n }\n return (_parallelClient as any)[prop];\n },\n});\n","/**\n * Search tool for Parallel Web\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { BetaSearchParams } from 'parallel-web/resources/beta/beta.mjs';\nimport { parallelClient } from '../client.js';\n\nfunction getSearchParams(\n search_type: 'list' | 'targeted' | 'general' | 'single_page'\n): Pick<BetaSearchParams, 'max_results' | 'max_chars_per_result'> {\n switch (search_type) {\n case 'targeted':\n return { max_results: 5, max_chars_per_result: 16000 };\n case 'general':\n return { max_results: 10, max_chars_per_result: 9000 };\n case 'single_page':\n return { max_results: 2, max_chars_per_result: 30000 };\n case 'list':\n default:\n return { max_results: 20, max_chars_per_result: 1500 };\n }\n}\n\nconst search = async (\n searchArgs: BetaSearchParams,\n { abortSignal }: { abortSignal: AbortSignal | undefined }\n) => {\n return await parallelClient.beta.search(\n {\n ...searchArgs,\n },\n {\n signal: abortSignal,\n headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n};\n\nexport const searchTool = tool({\n description: `Use the web_search_parallel tool to access information from the web. The\nweb_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.\nIntelligently scale the number of web_search_parallel tool calls to get more information\nwhen needed, from a single call for simple factual questions to five or more calls for\ncomplex research questions.\n\n* Keep queries concise - 1-6 words for best results. Start broad with very short\n queries and medium context, then add words to narrow results or use high context\n if needed.\n* Include broader context about what the search is trying to accomplish in the\n \\`objective\\` field. This helps the search engine understand the user's intent and\n provide relevant results and excerpts.\n* Never repeat similar search queries - make every query unique. If initial results are\n insufficient, reformulate queries to obtain new and better results.\n\nHow to use:\n- For simple queries, a one-shot call to depth is usually sufficient.\n- For complex multi-hop queries, first try to use breadth to narrow down sources. Then\nuse other search types with include_domains to get more detailed results.`,\n inputSchema: z.object({\n objective: z.string().describe(\n `Natural-language description of what the web research goal\n is. Specify the broad intent of the search query here. Also include any source or\n freshness guidance here. Limit to 200 characters. This should reflect the end goal so\n that the tool can better understand the intent and return the best results. Do not\n dump long texts.`\n ),\n search_type: z\n .enum(['list', 'general', 'single_page', 'targeted'])\n .describe(\n `Can be \"list\", \"general\", \"single_page\" or \"targeted\".\n \"list\" should be used for searching for data broadly, like aggregating data or\n considering multiple sources or doing broad initial research. \"targeted\" should be\n used for searching for data from a specific source set. \"general\" is a catch all case\n if there is no specific use case from list or targeted. \"single_page\" extracts data\n from a single page - extremely targeted. If there is a specific webpage you want the\n data from, use \"single_page\" and mention the URL in the objective.\n Use search_type appropriately.`\n )\n .optional()\n .default('list'),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n include_domains: z.array(z.string()).optional()\n .describe(`(optional) List of valid URL domains to explicitly\n focus on for the search. This will restrict all search results to only include results\n from the provided list. This is useful when you want to only use a specific set of\n sources. example: [\"google.com\", \"wikipedia.org\"]. Maximum 10 entries.`),\n }),\n\n execute: async function ({ ...args }, { abortSignal }) {\n const results = await search(\n { ...args, ...getSearchParams(args.search_type) },\n { abortSignal }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n","/**\n * Extract tool for Parallel Web\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { parallelClient } from '../client.js';\n\nexport const extractTool = tool({\n description: `Purpose: Fetch and extract relevant content from specific web URLs.\n\nIdeal Use Cases:\n- Extracting content from specific URLs you've already identified\n- Exploring URLs returned by a web search in greater depth`,\n inputSchema: z.object({\n objective: z.string().describe(\n `Natural-language description of what information you're looking for from the URLs. \n Limit to 200 characters.`\n ),\n\n urls: z.array(z.string()).describe(\n `List of URLs to extract content from. Must be valid\nHTTP/HTTPS URLs. Maximum 10 URLs per request.`\n ),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n }),\n\n execute: async function ({ ...args }, { abortSignal }) {\n const results = await parallelClient.beta.extract(\n { ...args },\n { signal: abortSignal, headers: { 'parallel-beta': 'parallel' } }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@parallel-web/ai-sdk-tools",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.3",
|
|
4
4
|
"description": "AI SDK tools for Parallel Web",
|
|
5
5
|
"author": "Parallel Web",
|
|
6
6
|
"license": "MIT",
|
|
@@ -13,16 +13,6 @@
|
|
|
13
13
|
"types": "./dist/index.d.ts",
|
|
14
14
|
"import": "./dist/index.js",
|
|
15
15
|
"require": "./dist/index.cjs"
|
|
16
|
-
},
|
|
17
|
-
"./v4": {
|
|
18
|
-
"types": "./dist/v4.d.ts",
|
|
19
|
-
"import": "./dist/v4.js",
|
|
20
|
-
"require": "./dist/v4.cjs"
|
|
21
|
-
},
|
|
22
|
-
"./v5": {
|
|
23
|
-
"types": "./dist/v5.d.ts",
|
|
24
|
-
"import": "./dist/v5.js",
|
|
25
|
-
"require": "./dist/v5.cjs"
|
|
26
16
|
}
|
|
27
17
|
},
|
|
28
18
|
"files": [
|
|
@@ -51,12 +41,14 @@
|
|
|
51
41
|
"access": "public"
|
|
52
42
|
},
|
|
53
43
|
"dependencies": {
|
|
54
|
-
"ai-v4": "npm:ai@^4.0.0",
|
|
55
|
-
"ai-v5": "npm:ai@^5.0.0",
|
|
56
44
|
"parallel-web": "^0.2.1",
|
|
57
45
|
"zod": "^3.23.0"
|
|
58
46
|
},
|
|
47
|
+
"peerDependencies": {
|
|
48
|
+
"ai": "^5.0.0"
|
|
49
|
+
},
|
|
59
50
|
"devDependencies": {
|
|
60
|
-
"@types/node": "^20.0.0"
|
|
51
|
+
"@types/node": "^20.0.0",
|
|
52
|
+
"ai": "^5.0.0"
|
|
61
53
|
}
|
|
62
54
|
}
|