@parallel-web/ai-sdk-tools 0.1.1-canary.023f473 → 0.1.1-canary.776023d

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
- import { Parallel } from 'parallel-web';
2
- import { tool } from 'ai';
1
+ import { tool } from 'ai-v5';
3
2
  import { z } from 'zod';
3
+ import { Parallel } from 'parallel-web';
4
4
 
5
- // src/client.ts
5
+ // src/v5/tools/search.ts
6
6
  var _parallelClient = null;
7
7
  var parallelClient = new Proxy({}, {
8
8
  get(_target, prop) {
@@ -14,6 +14,21 @@ var parallelClient = new Proxy({}, {
14
14
  return _parallelClient[prop];
15
15
  }
16
16
  });
17
+
18
+ // src/v5/tools/search.ts
19
+ function getSearchParams(search_type) {
20
+ switch (search_type) {
21
+ case "targeted":
22
+ return { max_results: 5, max_chars_per_result: 16e3 };
23
+ case "general":
24
+ return { max_results: 10, max_chars_per_result: 9e3 };
25
+ case "single_page":
26
+ return { max_results: 2, max_chars_per_result: 3e4 };
27
+ case "list":
28
+ default:
29
+ return { max_results: 20, max_chars_per_result: 1500 };
30
+ }
31
+ }
17
32
  var search = async (searchArgs, { abortSignal }) => {
18
33
  return await parallelClient.beta.search(
19
34
  {
@@ -21,31 +36,100 @@ var search = async (searchArgs, { abortSignal }) => {
21
36
  },
22
37
  {
23
38
  signal: abortSignal
39
+ // headers: { 'parallel-beta': 'search-extract-2025-10-10' },
24
40
  }
25
41
  );
26
42
  };
27
43
  var searchTool = tool({
28
- description: "Search Tool to quickly search in websites and published/public information on the internet like news, articles, blogs, posts, products, services, etc.",
29
- parameters: z.object({
44
+ description: `Use the web_search_parallel tool to access information from the web. The
45
+ web_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.
46
+ Intelligently scale the number of web_search_parallel tool calls to get more information
47
+ when needed, from a single call for simple factual questions to five or more calls for
48
+ complex research questions.
49
+
50
+ * Keep queries concise - 1-6 words for best results. Start broad with very short
51
+ queries and medium context, then add words to narrow results or use high context
52
+ if needed.
53
+ * Include broader context about what the search is trying to accomplish in the
54
+ \`objective\` field. This helps the search engine understand the user's intent and
55
+ provide relevant results and excerpts.
56
+ * Never repeat similar search queries - make every query unique. If initial results are
57
+ insufficient, reformulate queries to obtain new and better results.
58
+
59
+ How to use:
60
+ - For simple queries, a one-shot call to depth is usually sufficient.
61
+ - For complex multi-hop queries, first try to use breadth to narrow down sources. Then
62
+ use other search types with include_domains to get more detailed results.`,
63
+ inputSchema: z.object({
30
64
  objective: z.string().describe(
31
- "Natural-language description of what the web search is trying to find. May include guidance about preferred sources or freshness. At least one of objective or search_queries must be provided."
65
+ `Natural-language description of what the web research goal
66
+ is. Specify the broad intent of the search query here. Also include any source or
67
+ freshness guidance here. Limit to 200 characters. This should reflect the end goal so
68
+ that the tool can better understand the intent and return the best results. Do not
69
+ dump long texts.`
32
70
  ),
71
+ search_type: z.enum(["list", "general", "single_page", "targeted"]).describe(
72
+ `Can be "list", "general", "single_page" or "targeted".
73
+ "list" should be used for searching for data broadly, like aggregating data or
74
+ considering multiple sources or doing broad initial research. "targeted" should be
75
+ used for searching for data from a specific source set. "general" is a catch all case
76
+ if there is no specific use case from list or targeted. "single_page" extracts data
77
+ from a single page - extremely targeted. If there is a specific webpage you want the
78
+ data from, use "single_page" and mention the URL in the objective.
79
+ Use search_type appropriately.`
80
+ ).optional().default("list"),
33
81
  search_queries: z.array(z.string()).optional().describe(
34
- "Optional list of traditional keyword search queries to guide the search. May contain search operators. At least one of objective or search_queries must be provided."
82
+ `(optional) List of keyword search queries of 1-6
83
+ words, which may include search operators. The search queries should be related to the
84
+ objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are
85
+ ideal.`
35
86
  ),
36
- processor: z.enum(["pro", "base"]).optional().describe(
37
- "The processor to use for the search. `pro` is recommended for complex queries, or incomplete objectives. `base` is recommended for simple queries."
87
+ include_domains: z.array(z.string()).optional().describe(`(optional) List of valid URL domains to explicitly
88
+ focus on for the search. This will restrict all search results to only include results
89
+ from the provided list. This is useful when you want to only use a specific set of
90
+ sources. example: ["google.com", "wikipedia.org"]. Maximum 10 entries.`)
91
+ }),
92
+ execute: async function({ ...args }, { abortSignal }) {
93
+ const results = await search(
94
+ { ...args, ...getSearchParams(args.search_type) },
95
+ { abortSignal }
96
+ );
97
+ return {
98
+ searchParams: args,
99
+ answer: results
100
+ };
101
+ }
102
+ });
103
+ var extractTool = tool({
104
+ description: `Purpose: Fetch and extract relevant content from specific web URLs.
105
+
106
+ Ideal Use Cases:
107
+ - Extracting content from specific URLs you've already identified
108
+ - Exploring URLs returned by a web search in greater depth`,
109
+ inputSchema: z.object({
110
+ objective: z.string().describe(
111
+ `Natural-language description of what information you're looking for from the URLs.
112
+ Limit to 200 characters.`
38
113
  ),
39
- max_results: z.number().optional().describe(
40
- "The maximum number of results to return. Default is 10. Optional value, do not pass if not needed."
114
+ urls: z.array(z.string()).describe(
115
+ `List of URLs to extract content from. Must be valid
116
+ HTTP/HTTPS URLs. Maximum 10 URLs per request.`
41
117
  ),
42
- source_policy: z.object({
43
- include_domains: z.array(z.string()).optional().describe("The sources to include in the search. Optional value."),
44
- exclude_domains: z.array(z.string()).optional().describe("The sources to exclude in the search. Optional value.")
45
- }).optional().describe("The policy to use for the search. Optional value.")
118
+ search_queries: z.array(z.string()).optional().describe(
119
+ `(optional) List of keyword search queries of 1-6
120
+ words, which may include search operators. The search queries should be related to the
121
+ objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are
122
+ ideal.`
123
+ )
46
124
  }),
47
125
  execute: async function({ ...args }, { abortSignal }) {
48
- const results = await search(args, { abortSignal });
126
+ const results = await parallelClient.beta.extract(
127
+ { ...args },
128
+ {
129
+ signal: abortSignal,
130
+ headers: { "parallel-beta": "search-extract-2025-10-10" }
131
+ }
132
+ );
49
133
  return {
50
134
  searchParams: args,
51
135
  answer: results
@@ -53,6 +137,6 @@ var searchTool = tool({
53
137
  }
54
138
  });
55
139
 
56
- export { parallelClient, searchTool };
140
+ export { extractTool, searchTool };
57
141
  //# sourceMappingURL=index.js.map
58
142
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/client.ts","../src/tools/search.ts"],"names":[],"mappings":";;;;;AAMA,IAAI,eAAA,GAAmC,IAAA;AAEhC,IAAM,cAAA,GAAiB,IAAI,KAAA,CAAM,EAAC,EAAe;AAAA,EACtD,GAAA,CAAI,SAAS,IAAA,EAAM;AACjB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,eAAA,GAAkB,IAAI,QAAA,CAAS;AAAA,QAC7B,MAAA,EAAQ,OAAA,CAAQ,GAAA,CAAI,kBAAkB;AAAA,OACvC,CAAA;AAAA,IACH;AACA,IAAA,OAAQ,gBAAwB,IAAI,CAAA;AAAA,EACtC;AACF,CAAC;ACRD,IAAM,MAAA,GAAS,OACb,UAAA,EACA,EAAE,aAAY,KACX;AACH,EAAA,OAAO,MAAM,eAAe,IAAA,CAAK,MAAA;AAAA,IAC/B;AAAA,MACE,GAAG;AAAA,KACL;AAAA,IACA;AAAA,MACE,MAAA,EAAQ;AAAA;AACV,GACF;AACF,CAAA;AAEO,IAAM,aAAa,IAAA,CAAK;AAAA,EAC7B,WAAA,EACE,wJAAA;AAAA,EACF,UAAA,EAAY,EAAE,MAAA,CAAO;AAAA,IACnB,SAAA,EAAW,CAAA,CACR,MAAA,EAAO,CACP,QAAA;AAAA,MACC;AAAA,KACF;AAAA,IACF,cAAA,EAAgB,EACb,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC;AAAA,KACF;AAAA,IACF,SAAA,EAAW,EACR,IAAA,CAAK,CAAC,OAAO,MAAM,CAAC,CAAA,CACpB,QAAA,EAAS,CACT,QAAA;AAAA,MACC;AAAA,KACF;AAAA,IACF,WAAA,EAAa,CAAA,CACV,MAAA,EAAO,CACP,UAAS,CACT,QAAA;AAAA,MACC;AAAA,KACF;AAAA,IACF,aAAA,EAAe,EACZ,MAAA,CAAO;AAAA,MACN,eAAA,EAAiB,CAAA,CACd,KAAA,CAAM,CAAA,CAAE,MAAA,EAAQ,CAAA,CAChB,QAAA,EAAS,CACT,QAAA,CAAS,uDAAuD,CAAA;AAAA,MACnE,eAAA,EAAiB,CAAA,CACd,KAAA,CAAM,CAAA,CAAE,MAAA,EAAQ,CAAA,CAChB,QAAA,EAAS,CACT,QAAA,CAAS,uDAAuD;AAAA,KACpE,CAAA,CACA,QAAA,EAAS,CACT,SAAS,mDAAmD;AAAA,GAChE,CAAA;AAAA,EAED,OAAA,EAAS,eAAgB,EAAE,GAAG,MAAK,EAAG,EAAE,aAAY,EAAG;AACrD,IAAA,MAAM,UAAU,MAAM,MAAA,CAAO,IAAA,EAAM,EAAE,aAAa,CAAA;AAElD,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC","file":"index.js","sourcesContent":["/**\n * Shared Parallel Web client instance\n */\n\nimport { Parallel } from 'parallel-web';\n\nlet _parallelClient: Parallel | null = null;\n\nexport const parallelClient = new Proxy({} as Parallel, {\n get(_target, prop) {\n if (!_parallelClient) {\n _parallelClient = new Parallel({\n apiKey: process.env['PARALLEL_API_KEY'],\n });\n }\n return (_parallelClient as any)[prop];\n },\n});\n","/**\n * Search tool for Parallel Web\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { BetaSearchParams } from 'parallel-web/resources/beta/beta.mjs';\nimport { parallelClient } from '../client.js';\n\nconst search = async (\n searchArgs: BetaSearchParams,\n { abortSignal }: { abortSignal: AbortSignal | undefined }\n) => {\n return await parallelClient.beta.search(\n {\n ...searchArgs,\n },\n {\n signal: abortSignal,\n }\n );\n};\n\nexport const searchTool = tool({\n description:\n 'Search Tool to quickly search in websites and published/public information on the internet like news, articles, blogs, posts, products, services, etc.',\n parameters: z.object({\n objective: z\n .string()\n .describe(\n 'Natural-language description of what the web search is trying to find. May include guidance about preferred sources or freshness. At least one of objective or search_queries must be provided.'\n ),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n 'Optional list of traditional keyword search queries to guide the search. May contain search operators. At least one of objective or search_queries must be provided.'\n ),\n processor: z\n .enum(['pro', 'base'])\n .optional()\n .describe(\n 'The processor to use for the search. `pro` is recommended for complex queries, or incomplete objectives. `base` is recommended for simple queries.'\n ),\n max_results: z\n .number()\n .optional()\n .describe(\n 'The maximum number of results to return. Default is 10. Optional value, do not pass if not needed.'\n ),\n source_policy: z\n .object({\n include_domains: z\n .array(z.string())\n .optional()\n .describe('The sources to include in the search. Optional value.'),\n exclude_domains: z\n .array(z.string())\n .optional()\n .describe('The sources to exclude in the search. Optional value.'),\n })\n .optional()\n .describe('The policy to use for the search. Optional value.'),\n }),\n\n execute: async function ({ ...args }, { abortSignal }) {\n const results = await search(args, { abortSignal });\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n"]}
1
+ {"version":3,"sources":["../src/client.ts","../src/v5/tools/search.ts","../src/v5/tools/extract.ts"],"names":["tool","z"],"mappings":";;;;;AAMA,IAAI,eAAA,GAAmC,IAAA;AAEhC,IAAM,cAAA,GAAiB,IAAI,KAAA,CAAM,EAAC,EAAe;AAAA,EACtD,GAAA,CAAI,SAAS,IAAA,EAAM;AACjB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,eAAA,GAAkB,IAAI,QAAA,CAAS;AAAA,QAC7B,MAAA,EAAQ,OAAA,CAAQ,GAAA,CAAI,kBAAkB;AAAA,OACvC,CAAA;AAAA,IACH;AACA,IAAA,OAAQ,gBAAwB,IAAI,CAAA;AAAA,EACtC;AACF,CAAC,CAAA;;;ACRD,SAAS,gBACP,WAAA,EACgE;AAChE,EAAA,QAAQ,WAAA;AAAa,IACnB,KAAK,UAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,IAAA,EAAM;AAAA,IACvD,KAAK,SAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,GAAA,EAAK;AAAA,IACvD,KAAK,aAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,GAAA,EAAM;AAAA,IACvD,KAAK,MAAA;AAAA,IACL;AACE,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,IAAA,EAAK;AAAA;AAE3D;AAEA,IAAM,MAAA,GAAS,OACb,UAAA,EACA,EAAE,aAAY,KACX;AACH,EAAA,OAAO,MAAM,eAAe,IAAA,CAAK,MAAA;AAAA,IAC/B;AAAA,MACE,GAAG;AAAA,KACL;AAAA,IACA;AAAA,MACE,MAAA,EAAQ;AAAA;AAAA;AAEV,GACF;AACF,CAAA;AAEO,IAAM,aAAqB,IAAA,CAAK;AAAA,EACrC,WAAA,EAAa,CAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,yEAAA,CAAA;AAAA,EAmBb,WAAA,EAAa,EAAE,MAAA,CAAO;AAAA,IACpB,SAAA,EAAW,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA;AAAA;AAAA;AAAA,iBAAA;AAAA,KAKF;AAAA,IACA,WAAA,EAAa,EACV,IAAA,CAAK,CAAC,QAAQ,SAAA,EAAW,aAAA,EAAe,UAAU,CAAC,CAAA,CACnD,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAAA;AAAA,KAQF,CACC,QAAA,EAAS,CACT,OAAA,CAAQ,MAAM,CAAA;AAAA,IACjB,cAAA,EAAgB,EACb,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA,KAIF;AAAA,IACF,eAAA,EAAiB,EAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA,CAAE,QAAA,EAAS,CAC3C,QAAA,CAAS,CAAA;AAAA;AAAA;AAAA,uEAAA,CAGwD;AAAA,GACrE,CAAA;AAAA,EAED,OAAA,EAAS,eAAgB,EAAE,GAAG,MAAK,EAAG,EAAE,aAAY,EAAG;AACrD,IAAA,MAAM,UAAU,MAAM,MAAA;AAAA,MACpB,EAAE,GAAG,IAAA,EAAM,GAAG,eAAA,CAAgB,IAAA,CAAK,WAAW,CAAA,EAAE;AAAA,MAChD,EAAE,WAAA;AAAY,KAChB;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC;ACrGM,IAAM,cAAsBA,IAAAA,CAAK;AAAA,EACtC,WAAA,EAAa,CAAA;;AAAA;AAAA;AAAA,0DAAA,CAAA;AAAA,EAKb,WAAA,EAAaC,EAAE,MAAA,CAAO;AAAA,IACpB,SAAA,EAAWA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA,yBAAA;AAAA,KAEF;AAAA,IAEA,MAAMA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,EAAQ,CAAA,CAAE,QAAA;AAAA,MACxB,CAAA;AAAA,6CAAA;AAAA,KAEF;AAAA,IACA,cAAA,EAAgBA,EACb,KAAA,CAAMA,CAAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA;AAIF,GACH,CAAA;AAAA,EAED,OAAA,EAAS,eAAgB,EAAE,GAAG,MAAK,EAAG,EAAE,aAAY,EAAG;AACrD,IAAA,MAAM,OAAA,GAAU,MAAM,cAAA,CAAe,IAAA,CAAK,OAAA;AAAA,MACxC,EAAE,GAAG,IAAA,EAAK;AAAA,MACV;AAAA,QACE,MAAA,EAAQ,WAAA;AAAA,QACR,OAAA,EAAS,EAAE,eAAA,EAAiB,2BAAA;AAA4B;AAC1D,KACF;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC","file":"index.js","sourcesContent":["/**\n * Shared Parallel Web client instance\n */\n\nimport { Parallel } from 'parallel-web';\n\nlet _parallelClient: Parallel | null = null;\n\nexport const parallelClient = new Proxy({} as Parallel, {\n get(_target, prop) {\n if (!_parallelClient) {\n _parallelClient = new Parallel({\n apiKey: process.env['PARALLEL_API_KEY'],\n });\n }\n return (_parallelClient as any)[prop];\n },\n});\n","/**\n * Search tool for Parallel Web (AI SDK v5)\n */\n\nimport { tool, type Tool as ToolV5 } from 'ai-v5';\nimport { z } from 'zod';\nimport { BetaSearchParams } from 'parallel-web/resources/beta/beta.mjs';\nimport { parallelClient } from '../../client.js';\n\nfunction getSearchParams(\n search_type: 'list' | 'targeted' | 'general' | 'single_page'\n): Pick<BetaSearchParams, 'max_results' | 'max_chars_per_result'> {\n switch (search_type) {\n case 'targeted':\n return { max_results: 5, max_chars_per_result: 16000 };\n case 'general':\n return { max_results: 10, max_chars_per_result: 9000 };\n case 'single_page':\n return { max_results: 2, max_chars_per_result: 30000 };\n case 'list':\n default:\n return { max_results: 20, max_chars_per_result: 1500 };\n }\n}\n\nconst search = async (\n searchArgs: BetaSearchParams,\n { abortSignal }: { abortSignal: AbortSignal | undefined }\n) => {\n return await parallelClient.beta.search(\n {\n ...searchArgs,\n },\n {\n signal: abortSignal,\n // headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n};\n\nexport const searchTool: ToolV5 = tool({\n description: `Use the web_search_parallel tool to access information from the web. The\nweb_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.\nIntelligently scale the number of web_search_parallel tool calls to get more information\nwhen needed, from a single call for simple factual questions to five or more calls for\ncomplex research questions.\n\n* Keep queries concise - 1-6 words for best results. Start broad with very short\n queries and medium context, then add words to narrow results or use high context\n if needed.\n* Include broader context about what the search is trying to accomplish in the\n \\`objective\\` field. This helps the search engine understand the user's intent and\n provide relevant results and excerpts.\n* Never repeat similar search queries - make every query unique. If initial results are\n insufficient, reformulate queries to obtain new and better results.\n\nHow to use:\n- For simple queries, a one-shot call to depth is usually sufficient.\n- For complex multi-hop queries, first try to use breadth to narrow down sources. Then\nuse other search types with include_domains to get more detailed results.`,\n inputSchema: z.object({\n objective: z.string().describe(\n `Natural-language description of what the web research goal\n is. Specify the broad intent of the search query here. Also include any source or\n freshness guidance here. Limit to 200 characters. This should reflect the end goal so\n that the tool can better understand the intent and return the best results. Do not\n dump long texts.`\n ),\n search_type: z\n .enum(['list', 'general', 'single_page', 'targeted'])\n .describe(\n `Can be \"list\", \"general\", \"single_page\" or \"targeted\".\n \"list\" should be used for searching for data broadly, like aggregating data or\n considering multiple sources or doing broad initial research. \"targeted\" should be\n used for searching for data from a specific source set. \"general\" is a catch all case\n if there is no specific use case from list or targeted. \"single_page\" extracts data\n from a single page - extremely targeted. If there is a specific webpage you want the\n data from, use \"single_page\" and mention the URL in the objective.\n Use search_type appropriately.`\n )\n .optional()\n .default('list'),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n include_domains: z.array(z.string()).optional()\n .describe(`(optional) List of valid URL domains to explicitly\n focus on for the search. This will restrict all search results to only include results\n from the provided list. This is useful when you want to only use a specific set of\n sources. example: [\"google.com\", \"wikipedia.org\"]. Maximum 10 entries.`),\n }),\n\n execute: async function ({ ...args }, { abortSignal }) {\n const results = await search(\n { ...args, ...getSearchParams(args.search_type) },\n { abortSignal }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n","/**\n * Extract tool for Parallel Web (AI SDK v5)\n */\n\nimport { tool, type Tool as ToolV5 } from 'ai-v5';\nimport { z } from 'zod';\nimport { parallelClient } from '../../client.js';\n\nexport const extractTool: ToolV5 = tool({\n description: `Purpose: Fetch and extract relevant content from specific web URLs.\n\nIdeal Use Cases:\n- Extracting content from specific URLs you've already identified\n- Exploring URLs returned by a web search in greater depth`,\n inputSchema: z.object({\n objective: z.string().describe(\n `Natural-language description of what information you're looking for from the URLs. \n Limit to 200 characters.`\n ),\n\n urls: z.array(z.string()).describe(\n `List of URLs to extract content from. Must be valid\nHTTP/HTTPS URLs. Maximum 10 URLs per request.`\n ),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n }),\n\n execute: async function ({ ...args }, { abortSignal }) {\n const results = await parallelClient.beta.extract(\n { ...args },\n {\n signal: abortSignal,\n headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n"]}
package/dist/v4.cjs ADDED
@@ -0,0 +1,145 @@
1
+ 'use strict';
2
+
3
+ var aiV4 = require('ai-v4');
4
+ var zod = require('zod');
5
+ var parallelWeb = require('parallel-web');
6
+
7
+ // src/v4/tools/search.ts
8
+ var _parallelClient = null;
9
+ var parallelClient = new Proxy({}, {
10
+ get(_target, prop) {
11
+ if (!_parallelClient) {
12
+ _parallelClient = new parallelWeb.Parallel({
13
+ apiKey: process.env["PARALLEL_API_KEY"]
14
+ });
15
+ }
16
+ return _parallelClient[prop];
17
+ }
18
+ });
19
+
20
+ // src/v4/tools/search.ts
21
+ function getSearchParams(search_type) {
22
+ switch (search_type) {
23
+ case "targeted":
24
+ return { max_results: 5, max_chars_per_result: 16e3 };
25
+ case "general":
26
+ return { max_results: 10, max_chars_per_result: 9e3 };
27
+ case "single_page":
28
+ return { max_results: 2, max_chars_per_result: 3e4 };
29
+ case "list":
30
+ default:
31
+ return { max_results: 20, max_chars_per_result: 1500 };
32
+ }
33
+ }
34
+ var search = async (searchArgs, { abortSignal }) => {
35
+ return await parallelClient.beta.search(
36
+ {
37
+ ...searchArgs
38
+ },
39
+ {
40
+ signal: abortSignal
41
+ // headers: { 'parallel-beta': 'search-extract-2025-10-10' },
42
+ }
43
+ );
44
+ };
45
+ var searchTool = aiV4.tool({
46
+ description: `Use the web_search_parallel tool to access information from the web. The
47
+ web_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.
48
+ Intelligently scale the number of web_search_parallel tool calls to get more information
49
+ when needed, from a single call for simple factual questions to five or more calls for
50
+ complex research questions.
51
+
52
+ * Keep queries concise - 1-6 words for best results. Start broad with very short
53
+ queries and medium context, then add words to narrow results or use high context
54
+ if needed.
55
+ * Include broader context about what the search is trying to accomplish in the
56
+ \`objective\` field. This helps the search engine understand the user's intent and
57
+ provide relevant results and excerpts.
58
+ * Never repeat similar search queries - make every query unique. If initial results are
59
+ insufficient, reformulate queries to obtain new and better results.
60
+
61
+ How to use:
62
+ - For simple queries, a one-shot call to depth is usually sufficient.
63
+ - For complex multi-hop queries, first try to use breadth to narrow down sources. Then
64
+ use other search types with include_domains to get more detailed results.`,
65
+ parameters: zod.z.object({
66
+ objective: zod.z.string().describe(
67
+ `Natural-language description of what the web research goal
68
+ is. Specify the broad intent of the search query here. Also include any source or
69
+ freshness guidance here. Limit to 200 characters. This should reflect the end goal so
70
+ that the tool can better understand the intent and return the best results. Do not
71
+ dump long texts.`
72
+ ),
73
+ search_type: zod.z.enum(["list", "general", "single_page", "targeted"]).describe(
74
+ `Can be "list", "general", "single_page" or "targeted".
75
+ "list" should be used for searching for data broadly, like aggregating data or
76
+ considering multiple sources or doing broad initial research. "targeted" should be
77
+ used for searching for data from a specific source set. "general" is a catch all case
78
+ if there is no specific use case from list or targeted. "single_page" extracts data
79
+ from a single page - extremely targeted. If there is a specific webpage you want the
80
+ data from, use "single_page" and mention the URL in the objective.
81
+ Use search_type appropriately.`
82
+ ).optional().default("list"),
83
+ search_queries: zod.z.array(zod.z.string()).optional().describe(
84
+ `(optional) List of keyword search queries of 1-6
85
+ words, which may include search operators. The search queries should be related to the
86
+ objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are
87
+ ideal.`
88
+ ),
89
+ include_domains: zod.z.array(zod.z.string()).optional().describe(`(optional) List of valid URL domains to explicitly
90
+ focus on for the search. This will restrict all search results to only include results
91
+ from the provided list. This is useful when you want to only use a specific set of
92
+ sources. example: ["google.com", "wikipedia.org"]. Maximum 10 entries.`)
93
+ }),
94
+ execute: async function({ ...args }, { abortSignal }) {
95
+ const results = await search(
96
+ { ...args, ...getSearchParams(args.search_type) },
97
+ { abortSignal }
98
+ );
99
+ return {
100
+ searchParams: args,
101
+ answer: results
102
+ };
103
+ }
104
+ });
105
+ var extractTool = aiV4.tool({
106
+ description: `Purpose: Fetch and extract relevant content from specific web URLs.
107
+
108
+ Ideal Use Cases:
109
+ - Extracting content from specific URLs you've already identified
110
+ - Exploring URLs returned by a web search in greater depth`,
111
+ parameters: zod.z.object({
112
+ objective: zod.z.string().describe(
113
+ `Natural-language description of what information you're looking for from the URLs.
114
+ Limit to 200 characters.`
115
+ ),
116
+ urls: zod.z.array(zod.z.string()).describe(
117
+ `List of URLs to extract content from. Must be valid
118
+ HTTP/HTTPS URLs. Maximum 10 URLs per request.`
119
+ ),
120
+ search_queries: zod.z.array(zod.z.string()).optional().describe(
121
+ `(optional) List of keyword search queries of 1-6
122
+ words, which may include search operators. The search queries should be related to the
123
+ objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are
124
+ ideal.`
125
+ )
126
+ }),
127
+ execute: async function({ ...args }, { abortSignal }) {
128
+ const results = await parallelClient.beta.extract(
129
+ { ...args },
130
+ {
131
+ signal: abortSignal,
132
+ headers: { "parallel-beta": "search-extract-2025-10-10" }
133
+ }
134
+ );
135
+ return {
136
+ searchParams: args,
137
+ answer: results
138
+ };
139
+ }
140
+ });
141
+
142
+ exports.extractTool = extractTool;
143
+ exports.searchTool = searchTool;
144
+ //# sourceMappingURL=v4.cjs.map
145
+ //# sourceMappingURL=v4.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/client.ts","../src/v4/tools/search.ts","../src/v4/tools/extract.ts"],"names":["Parallel","tool","z"],"mappings":";;;;;;;AAMA,IAAI,eAAA,GAAmC,IAAA;AAEhC,IAAM,cAAA,GAAiB,IAAI,KAAA,CAAM,EAAC,EAAe;AAAA,EACtD,GAAA,CAAI,SAAS,IAAA,EAAM;AACjB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,eAAA,GAAkB,IAAIA,oBAAA,CAAS;AAAA,QAC7B,MAAA,EAAQ,OAAA,CAAQ,GAAA,CAAI,kBAAkB;AAAA,OACvC,CAAA;AAAA,IACH;AACA,IAAA,OAAQ,gBAAwB,IAAI,CAAA;AAAA,EACtC;AACF,CAAC,CAAA;;;ACRD,SAAS,gBACP,WAAA,EACgE;AAChE,EAAA,QAAQ,WAAA;AAAa,IACnB,KAAK,UAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,IAAA,EAAM;AAAA,IACvD,KAAK,SAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,GAAA,EAAK;AAAA,IACvD,KAAK,aAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,GAAA,EAAM;AAAA,IACvD,KAAK,MAAA;AAAA,IACL;AACE,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,IAAA,EAAK;AAAA;AAE3D;AAEA,IAAM,MAAA,GAAS,OACb,UAAA,EACA,EAAE,aAAY,KACX;AACH,EAAA,OAAO,MAAM,eAAe,IAAA,CAAK,MAAA;AAAA,IAC/B;AAAA,MACE,GAAG;AAAA,KACL;AAAA,IACA;AAAA,MACE,MAAA,EAAQ;AAAA;AAAA;AAEV,GACF;AACF,CAAA;AAEO,IAAM,aAAqBC,SAAA,CAAK;AAAA,EACrC,WAAA,EAAa,CAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,yEAAA,CAAA;AAAA,EAmBb,UAAA,EAAYC,MAAE,MAAA,CAAO;AAAA,IACnB,SAAA,EAAWA,KAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA;AAAA;AAAA;AAAA,iBAAA;AAAA,KAKF;AAAA,IACA,WAAA,EAAaA,MACV,IAAA,CAAK,CAAC,QAAQ,SAAA,EAAW,aAAA,EAAe,UAAU,CAAC,CAAA,CACnD,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAAA;AAAA,KAQF,CACC,QAAA,EAAS,CACT,OAAA,CAAQ,MAAM,CAAA;AAAA,IACjB,cAAA,EAAgBA,MACb,KAAA,CAAMA,KAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA,KAIF;AAAA,IACF,eAAA,EAAiBA,MAAE,KAAA,CAAMA,KAAA,CAAE,QAAQ,CAAA,CAAE,QAAA,EAAS,CAC3C,QAAA,CAAS,CAAA;AAAA;AAAA;AAAA,uEAAA,CAGwD;AAAA,GACrE,CAAA;AAAA,EAED,OAAA,EAAS,eACP,EAAE,GAAG,MAAK,EACV,EAAE,aAAY,EACd;AACA,IAAA,MAAM,UAAU,MAAM,MAAA;AAAA,MACpB,EAAE,GAAG,IAAA,EAAM,GAAG,eAAA,CAAgB,IAAA,CAAK,WAAW,CAAA,EAAE;AAAA,MAChD,EAAE,WAAA;AAAY,KAChB;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC;ACxGM,IAAM,cAAsBD,SAAAA,CAAK;AAAA,EACtC,WAAA,EAAa,CAAA;;AAAA;AAAA;AAAA,0DAAA,CAAA;AAAA,EAKb,UAAA,EAAYC,MAAE,MAAA,CAAO;AAAA,IACnB,SAAA,EAAWA,KAAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA,yBAAA;AAAA,KAEF;AAAA,IAEA,MAAMA,KAAAA,CAAE,KAAA,CAAMA,KAAAA,CAAE,MAAA,EAAQ,CAAA,CAAE,QAAA;AAAA,MACxB,CAAA;AAAA,6CAAA;AAAA,KAEF;AAAA,IACA,cAAA,EAAgBA,MACb,KAAA,CAAMA,KAAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA;AAIF,GACH,CAAA;AAAA,EAED,OAAA,EAAS,eACP,EAAE,GAAG,MAAK,EACV,EAAE,aAAY,EACd;AACA,IAAA,MAAM,OAAA,GAAU,MAAM,cAAA,CAAe,IAAA,CAAK,OAAA;AAAA,MACxC,EAAE,GAAG,IAAA,EAAK;AAAA,MACV;AAAA,QACE,MAAA,EAAQ,WAAA;AAAA,QACR,OAAA,EAAS,EAAE,eAAA,EAAiB,2BAAA;AAA4B;AAC1D,KACF;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC","file":"v4.cjs","sourcesContent":["/**\n * Shared Parallel Web client instance\n */\n\nimport { Parallel } from 'parallel-web';\n\nlet _parallelClient: Parallel | null = null;\n\nexport const parallelClient = new Proxy({} as Parallel, {\n get(_target, prop) {\n if (!_parallelClient) {\n _parallelClient = new Parallel({\n apiKey: process.env['PARALLEL_API_KEY'],\n });\n }\n return (_parallelClient as any)[prop];\n },\n});\n","/**\n * Search tool for Parallel Web (AI SDK v4)\n */\n\nimport { tool, type Tool as ToolV4 } from 'ai-v4';\nimport { z } from 'zod';\nimport { BetaSearchParams } from 'parallel-web/resources/beta/beta.mjs';\nimport { parallelClient } from '../../client.js';\n\nfunction getSearchParams(\n search_type: 'list' | 'targeted' | 'general' | 'single_page'\n): Pick<BetaSearchParams, 'max_results' | 'max_chars_per_result'> {\n switch (search_type) {\n case 'targeted':\n return { max_results: 5, max_chars_per_result: 16000 };\n case 'general':\n return { max_results: 10, max_chars_per_result: 9000 };\n case 'single_page':\n return { max_results: 2, max_chars_per_result: 30000 };\n case 'list':\n default:\n return { max_results: 20, max_chars_per_result: 1500 };\n }\n}\n\nconst search = async (\n searchArgs: BetaSearchParams,\n { abortSignal }: { abortSignal: AbortSignal | undefined }\n) => {\n return await parallelClient.beta.search(\n {\n ...searchArgs,\n },\n {\n signal: abortSignal,\n // headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n};\n\nexport const searchTool: ToolV4 = tool({\n description: `Use the web_search_parallel tool to access information from the web. The\nweb_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.\nIntelligently scale the number of web_search_parallel tool calls to get more information\nwhen needed, from a single call for simple factual questions to five or more calls for\ncomplex research questions.\n\n* Keep queries concise - 1-6 words for best results. Start broad with very short\n queries and medium context, then add words to narrow results or use high context\n if needed.\n* Include broader context about what the search is trying to accomplish in the\n \\`objective\\` field. This helps the search engine understand the user's intent and\n provide relevant results and excerpts.\n* Never repeat similar search queries - make every query unique. If initial results are\n insufficient, reformulate queries to obtain new and better results.\n\nHow to use:\n- For simple queries, a one-shot call to depth is usually sufficient.\n- For complex multi-hop queries, first try to use breadth to narrow down sources. Then\nuse other search types with include_domains to get more detailed results.`,\n parameters: z.object({\n objective: z.string().describe(\n `Natural-language description of what the web research goal\n is. Specify the broad intent of the search query here. Also include any source or\n freshness guidance here. Limit to 200 characters. This should reflect the end goal so\n that the tool can better understand the intent and return the best results. Do not\n dump long texts.`\n ),\n search_type: z\n .enum(['list', 'general', 'single_page', 'targeted'])\n .describe(\n `Can be \"list\", \"general\", \"single_page\" or \"targeted\".\n \"list\" should be used for searching for data broadly, like aggregating data or\n considering multiple sources or doing broad initial research. \"targeted\" should be\n used for searching for data from a specific source set. \"general\" is a catch all case\n if there is no specific use case from list or targeted. \"single_page\" extracts data\n from a single page - extremely targeted. If there is a specific webpage you want the\n data from, use \"single_page\" and mention the URL in the objective.\n Use search_type appropriately.`\n )\n .optional()\n .default('list'),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n include_domains: z.array(z.string()).optional()\n .describe(`(optional) List of valid URL domains to explicitly\n focus on for the search. This will restrict all search results to only include results\n from the provided list. This is useful when you want to only use a specific set of\n sources. example: [\"google.com\", \"wikipedia.org\"]. Maximum 10 entries.`),\n }),\n\n execute: async function (\n { ...args },\n { abortSignal }: { abortSignal?: AbortSignal }\n ) {\n const results = await search(\n { ...args, ...getSearchParams(args.search_type) },\n { abortSignal }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n","/**\n * Extract tool for Parallel Web (AI SDK v4)\n */\n\nimport { tool, type Tool as ToolV4 } from 'ai-v4';\nimport { z } from 'zod';\nimport { parallelClient } from '../../client.js';\n\nexport const extractTool: ToolV4 = tool({\n description: `Purpose: Fetch and extract relevant content from specific web URLs.\n\nIdeal Use Cases:\n- Extracting content from specific URLs you've already identified\n- Exploring URLs returned by a web search in greater depth`,\n parameters: z.object({\n objective: z.string().describe(\n `Natural-language description of what information you're looking for from the URLs. \n Limit to 200 characters.`\n ),\n\n urls: z.array(z.string()).describe(\n `List of URLs to extract content from. Must be valid\nHTTP/HTTPS URLs. Maximum 10 URLs per request.`\n ),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n }),\n\n execute: async function (\n { ...args },\n { abortSignal }: { abortSignal?: AbortSignal }\n ) {\n const results = await parallelClient.beta.extract(\n { ...args },\n {\n signal: abortSignal,\n headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n"]}
package/dist/v4.d.cts ADDED
@@ -0,0 +1,15 @@
1
+ import { Tool } from 'ai-v4';
2
+
3
+ /**
4
+ * Search tool for Parallel Web (AI SDK v4)
5
+ */
6
+
7
+ declare const searchTool: Tool;
8
+
9
+ /**
10
+ * Extract tool for Parallel Web (AI SDK v4)
11
+ */
12
+
13
+ declare const extractTool: Tool;
14
+
15
+ export { extractTool, searchTool };
package/dist/v4.d.ts ADDED
@@ -0,0 +1,15 @@
1
+ import { Tool } from 'ai-v4';
2
+
3
+ /**
4
+ * Search tool for Parallel Web (AI SDK v4)
5
+ */
6
+
7
+ declare const searchTool: Tool;
8
+
9
+ /**
10
+ * Extract tool for Parallel Web (AI SDK v4)
11
+ */
12
+
13
+ declare const extractTool: Tool;
14
+
15
+ export { extractTool, searchTool };
package/dist/v4.js ADDED
@@ -0,0 +1,142 @@
1
+ import { tool } from 'ai-v4';
2
+ import { z } from 'zod';
3
+ import { Parallel } from 'parallel-web';
4
+
5
+ // src/v4/tools/search.ts
6
+ var _parallelClient = null;
7
+ var parallelClient = new Proxy({}, {
8
+ get(_target, prop) {
9
+ if (!_parallelClient) {
10
+ _parallelClient = new Parallel({
11
+ apiKey: process.env["PARALLEL_API_KEY"]
12
+ });
13
+ }
14
+ return _parallelClient[prop];
15
+ }
16
+ });
17
+
18
+ // src/v4/tools/search.ts
19
+ function getSearchParams(search_type) {
20
+ switch (search_type) {
21
+ case "targeted":
22
+ return { max_results: 5, max_chars_per_result: 16e3 };
23
+ case "general":
24
+ return { max_results: 10, max_chars_per_result: 9e3 };
25
+ case "single_page":
26
+ return { max_results: 2, max_chars_per_result: 3e4 };
27
+ case "list":
28
+ default:
29
+ return { max_results: 20, max_chars_per_result: 1500 };
30
+ }
31
+ }
32
+ var search = async (searchArgs, { abortSignal }) => {
33
+ return await parallelClient.beta.search(
34
+ {
35
+ ...searchArgs
36
+ },
37
+ {
38
+ signal: abortSignal
39
+ // headers: { 'parallel-beta': 'search-extract-2025-10-10' },
40
+ }
41
+ );
42
+ };
43
+ var searchTool = tool({
44
+ description: `Use the web_search_parallel tool to access information from the web. The
45
+ web_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.
46
+ Intelligently scale the number of web_search_parallel tool calls to get more information
47
+ when needed, from a single call for simple factual questions to five or more calls for
48
+ complex research questions.
49
+
50
+ * Keep queries concise - 1-6 words for best results. Start broad with very short
51
+ queries and medium context, then add words to narrow results or use high context
52
+ if needed.
53
+ * Include broader context about what the search is trying to accomplish in the
54
+ \`objective\` field. This helps the search engine understand the user's intent and
55
+ provide relevant results and excerpts.
56
+ * Never repeat similar search queries - make every query unique. If initial results are
57
+ insufficient, reformulate queries to obtain new and better results.
58
+
59
+ How to use:
60
+ - For simple queries, a one-shot call to depth is usually sufficient.
61
+ - For complex multi-hop queries, first try to use breadth to narrow down sources. Then
62
+ use other search types with include_domains to get more detailed results.`,
63
+ parameters: z.object({
64
+ objective: z.string().describe(
65
+ `Natural-language description of what the web research goal
66
+ is. Specify the broad intent of the search query here. Also include any source or
67
+ freshness guidance here. Limit to 200 characters. This should reflect the end goal so
68
+ that the tool can better understand the intent and return the best results. Do not
69
+ dump long texts.`
70
+ ),
71
+ search_type: z.enum(["list", "general", "single_page", "targeted"]).describe(
72
+ `Can be "list", "general", "single_page" or "targeted".
73
+ "list" should be used for searching for data broadly, like aggregating data or
74
+ considering multiple sources or doing broad initial research. "targeted" should be
75
+ used for searching for data from a specific source set. "general" is a catch all case
76
+ if there is no specific use case from list or targeted. "single_page" extracts data
77
+ from a single page - extremely targeted. If there is a specific webpage you want the
78
+ data from, use "single_page" and mention the URL in the objective.
79
+ Use search_type appropriately.`
80
+ ).optional().default("list"),
81
+ search_queries: z.array(z.string()).optional().describe(
82
+ `(optional) List of keyword search queries of 1-6
83
+ words, which may include search operators. The search queries should be related to the
84
+ objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are
85
+ ideal.`
86
+ ),
87
+ include_domains: z.array(z.string()).optional().describe(`(optional) List of valid URL domains to explicitly
88
+ focus on for the search. This will restrict all search results to only include results
89
+ from the provided list. This is useful when you want to only use a specific set of
90
+ sources. example: ["google.com", "wikipedia.org"]. Maximum 10 entries.`)
91
+ }),
92
+ execute: async function({ ...args }, { abortSignal }) {
93
+ const results = await search(
94
+ { ...args, ...getSearchParams(args.search_type) },
95
+ { abortSignal }
96
+ );
97
+ return {
98
+ searchParams: args,
99
+ answer: results
100
+ };
101
+ }
102
+ });
103
+ var extractTool = tool({
104
+ description: `Purpose: Fetch and extract relevant content from specific web URLs.
105
+
106
+ Ideal Use Cases:
107
+ - Extracting content from specific URLs you've already identified
108
+ - Exploring URLs returned by a web search in greater depth`,
109
+ parameters: z.object({
110
+ objective: z.string().describe(
111
+ `Natural-language description of what information you're looking for from the URLs.
112
+ Limit to 200 characters.`
113
+ ),
114
+ urls: z.array(z.string()).describe(
115
+ `List of URLs to extract content from. Must be valid
116
+ HTTP/HTTPS URLs. Maximum 10 URLs per request.`
117
+ ),
118
+ search_queries: z.array(z.string()).optional().describe(
119
+ `(optional) List of keyword search queries of 1-6
120
+ words, which may include search operators. The search queries should be related to the
121
+ objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are
122
+ ideal.`
123
+ )
124
+ }),
125
+ execute: async function({ ...args }, { abortSignal }) {
126
+ const results = await parallelClient.beta.extract(
127
+ { ...args },
128
+ {
129
+ signal: abortSignal,
130
+ headers: { "parallel-beta": "search-extract-2025-10-10" }
131
+ }
132
+ );
133
+ return {
134
+ searchParams: args,
135
+ answer: results
136
+ };
137
+ }
138
+ });
139
+
140
+ export { extractTool, searchTool };
141
+ //# sourceMappingURL=v4.js.map
142
+ //# sourceMappingURL=v4.js.map
package/dist/v4.js.map ADDED
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/client.ts","../src/v4/tools/search.ts","../src/v4/tools/extract.ts"],"names":["tool","z"],"mappings":";;;;;AAMA,IAAI,eAAA,GAAmC,IAAA;AAEhC,IAAM,cAAA,GAAiB,IAAI,KAAA,CAAM,EAAC,EAAe;AAAA,EACtD,GAAA,CAAI,SAAS,IAAA,EAAM;AACjB,IAAA,IAAI,CAAC,eAAA,EAAiB;AACpB,MAAA,eAAA,GAAkB,IAAI,QAAA,CAAS;AAAA,QAC7B,MAAA,EAAQ,OAAA,CAAQ,GAAA,CAAI,kBAAkB;AAAA,OACvC,CAAA;AAAA,IACH;AACA,IAAA,OAAQ,gBAAwB,IAAI,CAAA;AAAA,EACtC;AACF,CAAC,CAAA;;;ACRD,SAAS,gBACP,WAAA,EACgE;AAChE,EAAA,QAAQ,WAAA;AAAa,IACnB,KAAK,UAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,IAAA,EAAM;AAAA,IACvD,KAAK,SAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,GAAA,EAAK;AAAA,IACvD,KAAK,aAAA;AACH,MAAA,OAAO,EAAE,WAAA,EAAa,CAAA,EAAG,oBAAA,EAAsB,GAAA,EAAM;AAAA,IACvD,KAAK,MAAA;AAAA,IACL;AACE,MAAA,OAAO,EAAE,WAAA,EAAa,EAAA,EAAI,oBAAA,EAAsB,IAAA,EAAK;AAAA;AAE3D;AAEA,IAAM,MAAA,GAAS,OACb,UAAA,EACA,EAAE,aAAY,KACX;AACH,EAAA,OAAO,MAAM,eAAe,IAAA,CAAK,MAAA;AAAA,IAC/B;AAAA,MACE,GAAG;AAAA,KACL;AAAA,IACA;AAAA,MACE,MAAA,EAAQ;AAAA;AAAA;AAEV,GACF;AACF,CAAA;AAEO,IAAM,aAAqB,IAAA,CAAK;AAAA,EACrC,WAAA,EAAa,CAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,yEAAA,CAAA;AAAA,EAmBb,UAAA,EAAY,EAAE,MAAA,CAAO;AAAA,IACnB,SAAA,EAAW,CAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA;AAAA;AAAA;AAAA,iBAAA;AAAA,KAKF;AAAA,IACA,WAAA,EAAa,EACV,IAAA,CAAK,CAAC,QAAQ,SAAA,EAAW,aAAA,EAAe,UAAU,CAAC,CAAA,CACnD,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAAA;AAAA,KAQF,CACC,QAAA,EAAS,CACT,OAAA,CAAQ,MAAM,CAAA;AAAA,IACjB,cAAA,EAAgB,EACb,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA,KAIF;AAAA,IACF,eAAA,EAAiB,EAAE,KAAA,CAAM,CAAA,CAAE,QAAQ,CAAA,CAAE,QAAA,EAAS,CAC3C,QAAA,CAAS,CAAA;AAAA;AAAA;AAAA,uEAAA,CAGwD;AAAA,GACrE,CAAA;AAAA,EAED,OAAA,EAAS,eACP,EAAE,GAAG,MAAK,EACV,EAAE,aAAY,EACd;AACA,IAAA,MAAM,UAAU,MAAM,MAAA;AAAA,MACpB,EAAE,GAAG,IAAA,EAAM,GAAG,eAAA,CAAgB,IAAA,CAAK,WAAW,CAAA,EAAE;AAAA,MAChD,EAAE,WAAA;AAAY,KAChB;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC;ACxGM,IAAM,cAAsBA,IAAAA,CAAK;AAAA,EACtC,WAAA,EAAa,CAAA;;AAAA;AAAA;AAAA,0DAAA,CAAA;AAAA,EAKb,UAAA,EAAYC,EAAE,MAAA,CAAO;AAAA,IACnB,SAAA,EAAWA,CAAAA,CAAE,MAAA,EAAO,CAAE,QAAA;AAAA,MACpB,CAAA;AAAA,yBAAA;AAAA,KAEF;AAAA,IAEA,MAAMA,CAAAA,CAAE,KAAA,CAAMA,CAAAA,CAAE,MAAA,EAAQ,CAAA,CAAE,QAAA;AAAA,MACxB,CAAA;AAAA,6CAAA;AAAA,KAEF;AAAA,IACA,cAAA,EAAgBA,EACb,KAAA,CAAMA,CAAAA,CAAE,QAAQ,CAAA,CAChB,UAAS,CACT,QAAA;AAAA,MACC,CAAA;AAAA;AAAA;AAAA,OAAA;AAAA;AAIF,GACH,CAAA;AAAA,EAED,OAAA,EAAS,eACP,EAAE,GAAG,MAAK,EACV,EAAE,aAAY,EACd;AACA,IAAA,MAAM,OAAA,GAAU,MAAM,cAAA,CAAe,IAAA,CAAK,OAAA;AAAA,MACxC,EAAE,GAAG,IAAA,EAAK;AAAA,MACV;AAAA,QACE,MAAA,EAAQ,WAAA;AAAA,QACR,OAAA,EAAS,EAAE,eAAA,EAAiB,2BAAA;AAA4B;AAC1D,KACF;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA;AAAA,MACd,MAAA,EAAQ;AAAA,KACV;AAAA,EACF;AACF,CAAC","file":"v4.js","sourcesContent":["/**\n * Shared Parallel Web client instance\n */\n\nimport { Parallel } from 'parallel-web';\n\nlet _parallelClient: Parallel | null = null;\n\nexport const parallelClient = new Proxy({} as Parallel, {\n get(_target, prop) {\n if (!_parallelClient) {\n _parallelClient = new Parallel({\n apiKey: process.env['PARALLEL_API_KEY'],\n });\n }\n return (_parallelClient as any)[prop];\n },\n});\n","/**\n * Search tool for Parallel Web (AI SDK v4)\n */\n\nimport { tool, type Tool as ToolV4 } from 'ai-v4';\nimport { z } from 'zod';\nimport { BetaSearchParams } from 'parallel-web/resources/beta/beta.mjs';\nimport { parallelClient } from '../../client.js';\n\nfunction getSearchParams(\n search_type: 'list' | 'targeted' | 'general' | 'single_page'\n): Pick<BetaSearchParams, 'max_results' | 'max_chars_per_result'> {\n switch (search_type) {\n case 'targeted':\n return { max_results: 5, max_chars_per_result: 16000 };\n case 'general':\n return { max_results: 10, max_chars_per_result: 9000 };\n case 'single_page':\n return { max_results: 2, max_chars_per_result: 30000 };\n case 'list':\n default:\n return { max_results: 20, max_chars_per_result: 1500 };\n }\n}\n\nconst search = async (\n searchArgs: BetaSearchParams,\n { abortSignal }: { abortSignal: AbortSignal | undefined }\n) => {\n return await parallelClient.beta.search(\n {\n ...searchArgs,\n },\n {\n signal: abortSignal,\n // headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n};\n\nexport const searchTool: ToolV4 = tool({\n description: `Use the web_search_parallel tool to access information from the web. The\nweb_search_parallel tool returns ranked, extended web excerpts optimized for LLMs.\nIntelligently scale the number of web_search_parallel tool calls to get more information\nwhen needed, from a single call for simple factual questions to five or more calls for\ncomplex research questions.\n\n* Keep queries concise - 1-6 words for best results. Start broad with very short\n queries and medium context, then add words to narrow results or use high context\n if needed.\n* Include broader context about what the search is trying to accomplish in the\n \\`objective\\` field. This helps the search engine understand the user's intent and\n provide relevant results and excerpts.\n* Never repeat similar search queries - make every query unique. If initial results are\n insufficient, reformulate queries to obtain new and better results.\n\nHow to use:\n- For simple queries, a one-shot call to depth is usually sufficient.\n- For complex multi-hop queries, first try to use breadth to narrow down sources. Then\nuse other search types with include_domains to get more detailed results.`,\n parameters: z.object({\n objective: z.string().describe(\n `Natural-language description of what the web research goal\n is. Specify the broad intent of the search query here. Also include any source or\n freshness guidance here. Limit to 200 characters. This should reflect the end goal so\n that the tool can better understand the intent and return the best results. Do not\n dump long texts.`\n ),\n search_type: z\n .enum(['list', 'general', 'single_page', 'targeted'])\n .describe(\n `Can be \"list\", \"general\", \"single_page\" or \"targeted\".\n \"list\" should be used for searching for data broadly, like aggregating data or\n considering multiple sources or doing broad initial research. \"targeted\" should be\n used for searching for data from a specific source set. \"general\" is a catch all case\n if there is no specific use case from list or targeted. \"single_page\" extracts data\n from a single page - extremely targeted. If there is a specific webpage you want the\n data from, use \"single_page\" and mention the URL in the objective.\n Use search_type appropriately.`\n )\n .optional()\n .default('list'),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n include_domains: z.array(z.string()).optional()\n .describe(`(optional) List of valid URL domains to explicitly\n focus on for the search. This will restrict all search results to only include results\n from the provided list. This is useful when you want to only use a specific set of\n sources. example: [\"google.com\", \"wikipedia.org\"]. Maximum 10 entries.`),\n }),\n\n execute: async function (\n { ...args },\n { abortSignal }: { abortSignal?: AbortSignal }\n ) {\n const results = await search(\n { ...args, ...getSearchParams(args.search_type) },\n { abortSignal }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n","/**\n * Extract tool for Parallel Web (AI SDK v4)\n */\n\nimport { tool, type Tool as ToolV4 } from 'ai-v4';\nimport { z } from 'zod';\nimport { parallelClient } from '../../client.js';\n\nexport const extractTool: ToolV4 = tool({\n description: `Purpose: Fetch and extract relevant content from specific web URLs.\n\nIdeal Use Cases:\n- Extracting content from specific URLs you've already identified\n- Exploring URLs returned by a web search in greater depth`,\n parameters: z.object({\n objective: z.string().describe(\n `Natural-language description of what information you're looking for from the URLs. \n Limit to 200 characters.`\n ),\n\n urls: z.array(z.string()).describe(\n `List of URLs to extract content from. Must be valid\nHTTP/HTTPS URLs. Maximum 10 URLs per request.`\n ),\n search_queries: z\n .array(z.string())\n .optional()\n .describe(\n `(optional) List of keyword search queries of 1-6\n words, which may include search operators. The search queries should be related to the\n objective. Limited to 5 entries of 200 characters each. Usually 1-3 queries are\n ideal.`\n ),\n }),\n\n execute: async function (\n { ...args },\n { abortSignal }: { abortSignal?: AbortSignal }\n ) {\n const results = await parallelClient.beta.extract(\n { ...args },\n {\n signal: abortSignal,\n headers: { 'parallel-beta': 'search-extract-2025-10-10' },\n }\n );\n\n return {\n searchParams: args,\n answer: results,\n };\n },\n});\n"]}