web-search-plus-plugin 1.2.2 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.env.template CHANGED
@@ -4,6 +4,9 @@ SERPER_API_KEY=your-serper-key-here
4
4
  # Tavily (Research Search) — https://tavily.com
5
5
  TAVILY_API_KEY=your-tavily-key-here
6
6
 
7
+ # Querit (Multilingual AI Search) — https://querit.ai
8
+ QUERIT_API_KEY=your-querit-api-key-here
9
+
7
10
  # Exa (Neural/Deep Search) — https://exa.ai
8
11
  EXA_API_KEY=your-exa-key-here
9
12
 
package/README.md CHANGED
@@ -7,7 +7,7 @@ A standalone OpenClaw plugin that registers `web_search_plus` as a first-class t
7
7
  ## ✨ Features
8
8
 
9
9
  - **Intelligent auto-routing** — analyzes query intent and picks the best provider automatically
10
- - **6 search providers** — use one or all, graceful fallback if any is down
10
+ - **7 search providers** — use one or all, graceful fallback if any is down
11
11
  - **Local result caching** — saves API costs on repeated queries
12
12
  - **Interactive setup wizard** — guided configuration via `python3 scripts/setup.py`
13
13
  - **Native OpenClaw tool** — registers as `web_search_plus`, not a skill
@@ -18,6 +18,7 @@ A standalone OpenClaw plugin that registers `web_search_plus` as a first-class t
18
18
  |----------|----------|-----------|
19
19
  | **Serper** (Google) | Facts, news, shopping, local businesses | 2,500 queries/month |
20
20
  | **Tavily** | Deep research, analysis, explanations | 1,000 queries/month |
21
+ | **Querit** | Multi-lingual AI search with rich metadata and real-time info | 1,000 queries/month |
21
22
  | **Exa** (Neural) | Semantic discovery, finding similar content | 1,000 queries/month |
22
23
  | **Perplexity** | AI-synthesized answers with citations | Via API key |
23
24
  | **You.com** | Real-time RAG, LLM-ready snippets | Limited free tier |
@@ -31,6 +32,7 @@ The plugin analyzes your query and picks the best provider:
31
32
  |-------|-----------|-----|
32
33
  | "iPhone 16 Pro price" | Serper | Shopping intent detected |
33
34
  | "how does TCP/IP work" | Tavily | Research/explanation intent |
35
+ | "latest multilingual EV market updates" | Querit | Real-time AI search with metadata-rich results |
34
36
  | "companies like Stripe" | Exa | Discovery/semantic intent |
35
37
  | "what is quantum computing" | Perplexity | Direct answer intent |
36
38
  | "latest news AI regulation" | Serper | News intent |
@@ -87,6 +89,7 @@ Copy `.env.template` to `.env` and add at least one API key:
87
89
  |----------|----------|---------|
88
90
  | `SERPER_API_KEY` | Serper (Google) | [console.serper.dev](https://console.serper.dev) |
89
91
  | `TAVILY_API_KEY` | Tavily | [tavily.com](https://tavily.com) |
92
+ | `QUERIT_API_KEY` | Querit | [querit.ai](https://querit.ai) |
90
93
  | `EXA_API_KEY` | Exa | [exa.ai](https://exa.ai) |
91
94
  | `PERPLEXITY_API_KEY` | Perplexity | [perplexity.ai](https://docs.perplexity.ai) |
92
95
  | `KILOCODE_API_KEY` | Perplexity via Kilo | [kilocode.ai](https://kilocode.ai) |
@@ -119,7 +122,7 @@ The registered `web_search_plus` tool accepts:
119
122
  | Parameter | Type | Required | Description |
120
123
  |-----------|------|----------|-------------|
121
124
  | `query` | string | ✅ | Search query |
122
- | `provider` | string | ❌ | Force a provider: `serper`, `tavily`, `exa`, `perplexity`, `you`, `searxng`, or `auto` (default) |
125
+ | `provider` | string | ❌ | Force a provider: `serper`, `tavily`, `querit`, `exa`, `perplexity`, `you`, `searxng`, or `auto` (default) |
123
126
  | `count` | number | ❌ | Number of results (default: 5) |
124
127
 
125
128
  ## 🧪 Test Directly
@@ -139,7 +142,7 @@ python3 scripts/search.py -q "your query" --max-results 10
139
142
 
140
143
  ## ❓ FAQ
141
144
 
142
- ### Do I need all 6 providers?
145
+ ### Do I need all 7 providers?
143
146
  No. The plugin works with just one API key. Configure whichever providers you have — the auto-router will use what's available and skip what's not.
144
147
 
145
148
  ### What's the difference between this plugin and the `web-search-plus` skill?
@@ -149,7 +152,7 @@ The **plugin** registers a native tool that any agent can use directly. The **sk
149
152
  Yes, Python 3 is required. The search logic runs as a Python script. Most Linux servers and macOS have Python 3 pre-installed.
150
153
 
151
154
  ### How does auto-routing work?
152
- The router scores each provider based on query signals — keywords like "price" or "buy" boost Serper, research-oriented queries boost Tavily, semantic/discovery queries boost Exa, and direct questions boost Perplexity. The highest-scoring provider wins.
155
+ The router scores each provider based on query signals — keywords like "price" or "buy" boost Serper, deep explanation queries boost Tavily, multilingual or metadata-rich real-time search can favor Querit, semantic/discovery queries boost Exa, and direct questions boost Perplexity. The highest-scoring provider wins.
153
156
 
154
157
  ### Does it cache results?
155
158
  Yes. Results are cached locally in a `.cache/` directory inside the plugin folder. Identical queries return cached results instantly and don't consume API credits. Cache is file-based and survives restarts.
package/index.ts CHANGED
@@ -1,3 +1,4 @@
1
+ import { definePluginEntry } from "openclaw/plugin-sdk/plugin-entry";
1
2
  import { Type } from "@sinclair/typebox";
2
3
  import { spawnSync } from "child_process";
3
4
  import fs from "fs";
@@ -34,183 +35,191 @@ function loadEnvFile(envPath: string): Record<string, string> {
34
35
  const PLUGIN_DIR = getPluginDir();
35
36
  const scriptPath = path.join(PLUGIN_DIR, "scripts", "search.py");
36
37
 
37
- export default function (api: any) {
38
- // Bridge OpenClaw config fields to env vars expected by search.py
39
- const configEnv: Record<string, string> = {};
40
- const pluginConfig: Record<string, string> = (api as any)?.config ?? {};
41
- const configKeyMap: Record<string, string> = {
42
- serperApiKey: "SERPER_API_KEY",
43
- tavilyApiKey: "TAVILY_API_KEY",
44
- exaApiKey: "EXA_API_KEY",
45
- perplexityApiKey: "PERPLEXITY_API_KEY",
46
- kilocodeApiKey: "KILOCODE_API_KEY",
47
- youApiKey: "YOU_API_KEY",
48
- searxngInstanceUrl: "SEARXNG_INSTANCE_URL",
49
- };
50
- for (const [cfgKey, envKey] of Object.entries(configKeyMap)) {
51
- const val = pluginConfig[cfgKey];
52
- if (val && typeof val === "string") configEnv[envKey] = val;
53
- }
54
-
55
- api.registerTool(
56
- {
57
- name: "web_search_plus",
58
- description:
59
- "Search the web using multi-provider intelligent routing (Serper/Google, Tavily/Research, Exa/Neural+Deep, Perplexity, You.com, SearXNG). Automatically selects the best provider based on query intent. Use for ALL web searches. Set depth='deep' for multi-source synthesis, 'deep-reasoning' for complex cross-document analysis.",
60
- parameters: Type.Object({
61
- query: Type.String({ description: "Search query" }),
62
- provider: Type.Optional(
63
- Type.Union(
64
- [
65
- Type.Literal("serper"),
66
- Type.Literal("tavily"),
67
- Type.Literal("exa"),
68
- Type.Literal("perplexity"),
69
- Type.Literal("you"),
70
- Type.Literal("searxng"),
71
- Type.Literal("auto"),
72
- ],
73
- {
74
- description:
75
- "Force a specific provider, or 'auto' for smart routing (default: auto)",
76
- },
38
+ export default definePluginEntry({
39
+ id: "web-search-plus-plugin",
40
+ name: "Web Search Plus",
41
+ description:
42
+ "Multi-provider web search (Serper/Google, Tavily, Querit/Multilingual AI Search, Exa/Neural+Deep, Perplexity, You.com, SearXNG) with intelligent auto-routing",
43
+ register(api) {
44
+ // Bridge OpenClaw config fields to env vars expected by search.py
45
+ const configEnv: Record<string, string> = {};
46
+ const pluginConfig: Record<string, string> = (api as any)?.config ?? {};
47
+ const configKeyMap: Record<string, string> = {
48
+ serperApiKey: "SERPER_API_KEY",
49
+ tavilyApiKey: "TAVILY_API_KEY",
50
+ queritApiKey: "QUERIT_API_KEY",
51
+ exaApiKey: "EXA_API_KEY",
52
+ perplexityApiKey: "PERPLEXITY_API_KEY",
53
+ kilocodeApiKey: "KILOCODE_API_KEY",
54
+ youApiKey: "YOU_API_KEY",
55
+ searxngInstanceUrl: "SEARXNG_INSTANCE_URL",
56
+ };
57
+ for (const [cfgKey, envKey] of Object.entries(configKeyMap)) {
58
+ const val = pluginConfig[cfgKey];
59
+ if (val && typeof val === "string") configEnv[envKey] = val;
60
+ }
61
+
62
+ api.registerTool(
63
+ {
64
+ name: "web_search_plus",
65
+ description:
66
+ "Search the web using multi-provider intelligent routing (Serper/Google, Tavily/Research, Querit/Multilingual AI Search, Exa/Neural+Deep, Perplexity, You.com, SearXNG). Automatically selects the best provider based on query intent. Use for ALL web searches. Set depth='deep' for multi-source synthesis, 'deep-reasoning' for complex cross-document analysis.",
67
+ parameters: Type.Object({
68
+ query: Type.String({ description: "Search query" }),
69
+ provider: Type.Optional(
70
+ Type.Union(
71
+ [
72
+ Type.Literal("serper"),
73
+ Type.Literal("tavily"),
74
+ Type.Literal("querit"),
75
+ Type.Literal("exa"),
76
+ Type.Literal("perplexity"),
77
+ Type.Literal("you"),
78
+ Type.Literal("searxng"),
79
+ Type.Literal("auto"),
80
+ ],
81
+ {
82
+ description:
83
+ "Force a specific provider, or 'auto' for smart routing (default: auto)",
84
+ },
85
+ ),
86
+ ),
87
+ count: Type.Optional(
88
+ Type.Number({ description: "Number of results (default: 5)" }),
89
+ ),
90
+ depth: Type.Optional(
91
+ Type.Union(
92
+ [
93
+ Type.Literal("normal"),
94
+ Type.Literal("deep"),
95
+ Type.Literal("deep-reasoning"),
96
+ ],
97
+ {
98
+ description:
99
+ "Exa search depth: 'deep' synthesizes across sources (4-12s), 'deep-reasoning' for complex cross-reference analysis (12-50s). When provider is auto, depth may be auto-selected based on query complexity.",
100
+ },
101
+ ),
102
+ ),
103
+ time_range: Type.Optional(
104
+ Type.Union(
105
+ [
106
+ Type.Literal("day"),
107
+ Type.Literal("week"),
108
+ Type.Literal("month"),
109
+ Type.Literal("year"),
110
+ ],
111
+ {
112
+ description:
113
+ "Filter results by recency. Applies to Serper (as tbs), Perplexity (as search_recency_filter), Tavily/You.com (as freshness). Useful for news and current events.",
114
+ },
115
+ ),
77
116
  ),
78
- ),
79
- count: Type.Optional(
80
- Type.Number({ description: "Number of results (default: 5)" }),
81
- ),
82
- depth: Type.Optional(
83
- Type.Union(
84
- [
85
- Type.Literal("normal"),
86
- Type.Literal("deep"),
87
- Type.Literal("deep-reasoning"),
88
- ],
89
- {
117
+ include_domains: Type.Optional(
118
+ Type.Array(Type.String(), {
90
119
  description:
91
- "Exa search depth: 'deep' synthesizes across sources (4-12s), 'deep-reasoning' for complex cross-reference analysis (12-50s). When provider is auto, depth may be auto-selected based on query complexity.",
92
- },
120
+ "Only include results from these domains (e.g. ['arxiv.org', 'github.com']). Supported by Tavily and Exa.",
121
+ }),
93
122
  ),
94
- ),
95
- time_range: Type.Optional(
96
- Type.Union(
97
- [
98
- Type.Literal("day"),
99
- Type.Literal("week"),
100
- Type.Literal("month"),
101
- Type.Literal("year"),
102
- ],
103
- {
123
+ exclude_domains: Type.Optional(
124
+ Type.Array(Type.String(), {
104
125
  description:
105
- "Filter results by recency. Applies to Serper (as tbs), Perplexity (as search_recency_filter), Tavily/You.com (as freshness). Useful for news and current events.",
106
- },
126
+ "Exclude results from these domains (e.g. ['reddit.com', 'pinterest.com']). Supported by Tavily and Exa.",
127
+ }),
107
128
  ),
108
- ),
109
- include_domains: Type.Optional(
110
- Type.Array(Type.String(), {
111
- description:
112
- "Only include results from these domains (e.g. ['arxiv.org', 'github.com']). Supported by Tavily and Exa.",
113
- }),
114
- ),
115
- exclude_domains: Type.Optional(
116
- Type.Array(Type.String(), {
117
- description:
118
- "Exclude results from these domains (e.g. ['reddit.com', 'pinterest.com']). Supported by Tavily and Exa.",
119
- }),
120
- ),
121
- }),
122
- async execute(
123
- _id: string,
124
- params: {
125
- query: string;
126
- provider?: string;
127
- count?: number;
128
- depth?: string;
129
- time_range?: string;
130
- include_domains?: string[];
131
- exclude_domains?: string[];
132
- },
133
- ) {
134
- const args = [scriptPath, "--query", params.query, "--compact"];
135
-
136
- if (params.provider && params.provider !== "auto") {
137
- args.push("--provider", params.provider);
138
- }
139
-
140
- if (typeof params.count === "number" && Number.isFinite(params.count)) {
141
- args.push(
142
- "--max-results",
143
- String(Math.max(1, Math.floor(params.count))),
144
- );
145
- }
146
-
147
- if (params.depth && params.depth !== "normal") {
148
- args.push("--exa-depth", params.depth);
149
- }
150
-
151
- if (params.time_range) {
152
- args.push("--time-range", params.time_range);
153
- args.push("--freshness", params.time_range);
154
- }
155
-
156
- if (params.include_domains?.length) {
157
- args.push("--include-domains", ...params.include_domains);
158
- }
159
-
160
- if (params.exclude_domains?.length) {
161
- args.push("--exclude-domains", ...params.exclude_domains);
162
- }
163
-
164
- const envPaths = [
165
- path.join(PLUGIN_DIR, ".env"),
166
- path.join(PLUGIN_DIR, "..", "web-search-plus", ".env"),
167
- ];
168
- const fileEnv: Record<string, string> = {};
169
- for (const envPath of envPaths) {
170
- Object.assign(fileEnv, loadEnvFile(envPath));
171
- }
172
- const childEnv = { ...process.env, ...configEnv, ...fileEnv };
173
-
174
- try {
175
- const child = spawnSync("python3", args, {
176
- timeout: 75000,
177
- env: childEnv,
178
- shell: false,
179
- encoding: "utf8",
180
- });
181
-
182
- if (child.error) {
183
- return {
184
- content: [
185
- { type: "text", text: `Search failed: ${child.error.message}` },
186
- ],
187
- };
129
+ }),
130
+ async execute(
131
+ _id: string,
132
+ params: {
133
+ query: string;
134
+ provider?: string;
135
+ count?: number;
136
+ depth?: string;
137
+ time_range?: string;
138
+ include_domains?: string[];
139
+ exclude_domains?: string[];
140
+ },
141
+ ) {
142
+ const args = [scriptPath, "--query", params.query, "--compact"];
143
+
144
+ if (params.provider && params.provider !== "auto") {
145
+ args.push("--provider", params.provider);
146
+ }
147
+
148
+ if (typeof params.count === "number" && Number.isFinite(params.count)) {
149
+ args.push(
150
+ "--max-results",
151
+ String(Math.max(1, Math.floor(params.count))),
152
+ );
188
153
  }
189
154
 
190
- if (child.status !== 0) {
191
- const stderr = child.stderr?.trim() || "Unknown error";
155
+ if (params.depth && params.depth !== "normal") {
156
+ args.push("--exa-depth", params.depth);
157
+ }
158
+
159
+ if (params.time_range) {
160
+ args.push("--time-range", params.time_range);
161
+ args.push("--freshness", params.time_range);
162
+ }
163
+
164
+ if (params.include_domains?.length) {
165
+ args.push("--include-domains", ...params.include_domains);
166
+ }
167
+
168
+ if (params.exclude_domains?.length) {
169
+ args.push("--exclude-domains", ...params.exclude_domains);
170
+ }
171
+
172
+ const envPaths = [
173
+ path.join(PLUGIN_DIR, ".env"),
174
+ path.join(PLUGIN_DIR, "..", "web-search-plus", ".env"),
175
+ ];
176
+ const fileEnv: Record<string, string> = {};
177
+ for (const envPath of envPaths) {
178
+ Object.assign(fileEnv, loadEnvFile(envPath));
179
+ }
180
+ const childEnv = { ...process.env, ...configEnv, ...fileEnv };
181
+
182
+ try {
183
+ const child = spawnSync("python3", args, {
184
+ timeout: 75000,
185
+ env: childEnv,
186
+ shell: false,
187
+ encoding: "utf8",
188
+ });
189
+
190
+ if (child.error) {
191
+ return {
192
+ content: [
193
+ { type: "text", text: `Search failed: ${child.error.message}` },
194
+ ],
195
+ };
196
+ }
197
+
198
+ if (child.status !== 0) {
199
+ const stderr = child.stderr?.trim() || "Unknown error";
200
+ return {
201
+ content: [
202
+ {
203
+ type: "text",
204
+ text: `Search failed (exit ${child.status}): ${stderr}`,
205
+ },
206
+ ],
207
+ };
208
+ }
209
+
210
+ return {
211
+ content: [{ type: "text", text: child.stdout?.trim() || "{}" }],
212
+ };
213
+ } catch (err: any) {
192
214
  return {
193
215
  content: [
194
- {
195
- type: "text",
196
- text: `Search failed (exit ${child.status}): ${stderr}`,
197
- },
216
+ { type: "text", text: `Search failed: ${err?.message ?? err}` },
198
217
  ],
199
218
  };
200
219
  }
201
-
202
- return {
203
- content: [{ type: "text", text: child.stdout?.trim() || "{}" }],
204
- };
205
- } catch (err: any) {
206
- return {
207
- content: [
208
- { type: "text", text: `Search failed: ${err?.message ?? err}` },
209
- ],
210
- };
211
- }
220
+ },
212
221
  },
213
- },
214
- { optional: true },
215
- );
216
- }
222
+ { optional: true },
223
+ );
224
+ },
225
+ });
@@ -1,15 +1,15 @@
1
1
  {
2
2
  "id": "web-search-plus-plugin",
3
- "kind": "skill",
4
3
  "name": "Web Search Plus",
5
- "version": "1.2.2",
6
- "description": "Multi-provider web search (Serper/Google, Tavily, Exa/Neural+Deep, Perplexity, You.com, SearXNG) with intelligent auto-routing",
4
+ "version": "1.3.0",
5
+ "description": "Multi-provider web search (Serper/Google, Tavily, Querit/Multilingual AI Search, Exa/Neural+Deep, Perplexity, You.com, SearXNG) with intelligent auto-routing",
7
6
  "configSchema": {
8
7
  "type": "object",
9
8
  "additionalProperties": false,
10
9
  "properties": {
11
10
  "serperApiKey": { "type": "string" },
12
11
  "tavilyApiKey": { "type": "string" },
12
+ "queritApiKey": { "type": "string" },
13
13
  "exaApiKey": { "type": "string" },
14
14
  "perplexityApiKey": { "type": "string" },
15
15
  "kilocodeApiKey": { "type": "string" },
@@ -20,6 +20,7 @@
20
20
  "uiHints": {
21
21
  "serperApiKey": { "label": "Serper API Key", "placeholder": "sk-...", "sensitive": true },
22
22
  "tavilyApiKey": { "label": "Tavily API Key", "placeholder": "tvly-...", "sensitive": true },
23
+ "queritApiKey": { "label": "Querit API Key", "placeholder": "querit-sk-...", "sensitive": true },
23
24
  "exaApiKey": { "label": "Exa API Key", "placeholder": "exa-...", "sensitive": true },
24
25
  "perplexityApiKey": { "label": "Perplexity API Key", "placeholder": "pplx-...", "sensitive": true },
25
26
  "kilocodeApiKey": { "label": "Kilo Gateway API Key", "placeholder": "...", "sensitive": true },
package/package.json CHANGED
@@ -1,9 +1,12 @@
1
1
  {
2
2
  "name": "web-search-plus-plugin",
3
- "version": "1.2.2",
4
- "description": "OpenClaw plugin: multi-provider web search (Serper/Google, Tavily, Exa/Neural+Deep, Perplexity, You.com, SearXNG) with intelligent auto-routing",
3
+ "version": "1.3.0",
4
+ "description": "OpenClaw plugin: multi-provider web search (Serper/Google, Tavily, Querit/Multilingual AI Search, Exa/Neural+Deep, Perplexity, You.com, SearXNG) with intelligent auto-routing",
5
5
  "type": "module",
6
6
  "main": "index.ts",
7
+ "openclaw": {
8
+ "extensions": ["./index.ts"]
9
+ },
7
10
  "files": [
8
11
  "index.ts",
9
12
  "openclaw.plugin.json",
@@ -12,7 +15,10 @@
12
15
  "README.md",
13
16
  "LICENSE"
14
17
  ],
15
- "keywords": ["openclaw", "plugin", "search", "serper", "tavily", "exa", "exa-deep", "perplexity", "you", "searxng", "web-search", "auto-routing"],
18
+ "keywords": ["openclaw", "plugin", "search", "serper", "tavily", "querit", "exa", "exa-deep", "perplexity", "you", "searxng", "web-search", "auto-routing"],
19
+ "peerDependencies": {
20
+ "openclaw": ">=2026.3.22"
21
+ },
16
22
  "repository": {
17
23
  "type": "git",
18
24
  "url": "https://github.com/robbyczgw-cla/web-search-plus-plugin"
package/scripts/search.py CHANGED
@@ -1,7 +1,8 @@
1
1
  #!/usr/bin/env python3
2
2
  """
3
3
  Web Search Plus — Unified Multi-Provider Search with Intelligent Auto-Routing
4
- Supports: Serper (Google), Tavily (Research), Exa (Neural), Perplexity (Direct Answers)
4
+ Supports: Serper (Google), Tavily (Research), Querit (Multilingual AI Search),
5
+ Exa (Neural), Perplexity (Direct Answers)
5
6
 
6
7
  Smart Routing uses multi-signal analysis:
7
8
  - Query intent classification (shopping, research, discovery)
@@ -12,7 +13,7 @@ Smart Routing uses multi-signal analysis:
12
13
 
13
14
  Usage:
14
15
  python3 search.py --query "..." # Auto-route based on query
15
- python3 search.py --provider [serper|tavily|exa] --query "..." [options]
16
+ python3 search.py --provider [serper|tavily|querit|exa] --query "..." [options]
16
17
 
17
18
  Examples:
18
19
  python3 search.py -q "iPhone 16 Pro price" # → Serper (shopping intent)
@@ -21,6 +22,7 @@ Examples:
21
22
  """
22
23
 
23
24
  import argparse
25
+ from http.client import IncompleteRead
24
26
  import hashlib
25
27
  import json
26
28
  import os
@@ -275,7 +277,7 @@ DEFAULT_CONFIG = {
275
277
  "auto_routing": {
276
278
  "enabled": True,
277
279
  "fallback_provider": "serper",
278
- "provider_priority": ["tavily", "exa", "perplexity", "serper", "you", "searxng"],
280
+ "provider_priority": ["tavily", "querit", "exa", "perplexity", "serper", "you", "searxng"],
279
281
  "disabled_providers": [],
280
282
  "confidence_threshold": 0.3, # Below this, note low confidence
281
283
  },
@@ -288,6 +290,11 @@ DEFAULT_CONFIG = {
288
290
  "depth": "basic",
289
291
  "topic": "general"
290
292
  },
293
+ "querit": {
294
+ "base_url": "https://api.querit.ai",
295
+ "base_path": "/v1/search",
296
+ "timeout": 10
297
+ },
291
298
  "exa": {
292
299
  "type": "neural",
293
300
  "depth": "normal",
@@ -358,6 +365,7 @@ def get_api_key(provider: str, config: Dict[str, Any] = None) -> Optional[str]:
358
365
  key_map = {
359
366
  "serper": "SERPER_API_KEY",
360
367
  "tavily": "TAVILY_API_KEY",
368
+ "querit": "QUERIT_API_KEY",
361
369
  "exa": "EXA_API_KEY",
362
370
  "you": "YOU_API_KEY",
363
371
  }
@@ -474,7 +482,8 @@ def validate_api_key(provider: str, config: Dict[str, Any] = None) -> str:
474
482
  if not key:
475
483
  env_var = {
476
484
  "serper": "SERPER_API_KEY",
477
- "tavily": "TAVILY_API_KEY",
485
+ "tavily": "TAVILY_API_KEY",
486
+ "querit": "QUERIT_API_KEY",
478
487
  "exa": "EXA_API_KEY",
479
488
  "you": "YOU_API_KEY",
480
489
  "perplexity": "KILOCODE_API_KEY"
@@ -483,6 +492,7 @@ def validate_api_key(provider: str, config: Dict[str, Any] = None) -> str:
483
492
  urls = {
484
493
  "serper": "https://serper.dev",
485
494
  "tavily": "https://tavily.com",
495
+ "querit": "https://querit.ai",
486
496
  "exa": "https://exa.ai",
487
497
  "you": "https://api.you.com",
488
498
  "perplexity": "https://api.kilo.ai"
@@ -1161,6 +1171,7 @@ class QueryAnalyzer:
1161
1171
  provider_scores = {
1162
1172
  "serper": shopping_score + local_news_score + (recency_score * 0.35),
1163
1173
  "tavily": research_score + (complexity["complexity_score"] if not complexity["is_complex"] else 0) + (0.2 * recency_score),
1174
+ "querit": (research_score * 0.65) + (rag_score * 0.35) + (recency_score * 0.45),
1164
1175
  "exa": discovery_score + (1.0 if re.search(r"\b(similar|alternatives?|examples?)\b", query, re.IGNORECASE) else 0.0) + (exa_deep_score * 0.5) + (exa_deep_reasoning_score * 0.5),
1165
1176
  "perplexity": direct_answer_score + (local_news_score * 0.4) + (recency_score * 0.55),
1166
1177
  "you": rag_score + (recency_score * 0.25), # You.com good for real-time + RAG
@@ -1171,6 +1182,7 @@ class QueryAnalyzer:
1171
1182
  provider_matches = {
1172
1183
  "serper": shopping_matches + local_news_matches,
1173
1184
  "tavily": research_matches,
1185
+ "querit": research_matches,
1174
1186
  "exa": discovery_matches + exa_deep_matches + exa_deep_reasoning_matches,
1175
1187
  "perplexity": direct_answer_matches,
1176
1188
  "you": rag_matches,
@@ -1221,7 +1233,7 @@ class QueryAnalyzer:
1221
1233
  total_score = sum(available.values()) or 1.0
1222
1234
 
1223
1235
  # Handle ties using priority
1224
- priority = self.auto_config.get("provider_priority", ["tavily", "exa", "perplexity", "serper", "you", "searxng"])
1236
+ priority = self.auto_config.get("provider_priority", ["tavily", "querit", "exa", "perplexity", "serper", "you", "searxng"])
1225
1237
  winners = [p for p, s in available.items() if s == max_score]
1226
1238
 
1227
1239
  if len(winners) > 1:
@@ -1337,6 +1349,7 @@ def explain_routing(query: str, config: Dict[str, Any]) -> Dict[str, Any]:
1337
1349
  "intent_breakdown": {
1338
1350
  "shopping_signals": len(analysis["provider_matches"]["serper"]),
1339
1351
  "research_signals": len(analysis["provider_matches"]["tavily"]),
1352
+ "querit_signals": len(analysis["provider_matches"]["querit"]),
1340
1353
  "discovery_signals": len(analysis["provider_matches"]["exa"]),
1341
1354
  "rag_signals": len(analysis["provider_matches"]["you"]),
1342
1355
  "exa_deep_score": round(analysis.get("exa_deep_score", 0), 2),
@@ -1358,7 +1371,7 @@ def explain_routing(query: str, config: Dict[str, Any]) -> Dict[str, Any]:
1358
1371
  if matches
1359
1372
  },
1360
1373
  "available_providers": [
1361
- p for p in ["serper", "tavily", "exa", "perplexity", "you", "searxng"]
1374
+ p for p in ["serper", "tavily", "querit", "exa", "perplexity", "you", "searxng"]
1362
1375
  if get_api_key(p, config) and p not in config.get("auto_routing", {}).get("disabled_providers", [])
1363
1376
  ]
1364
1377
  }
@@ -1523,6 +1536,12 @@ def make_request(url: str, headers: dict, body: dict, timeout: int = 30) -> dict
1523
1536
  reason = str(getattr(e, "reason", e))
1524
1537
  is_timeout = "timed out" in reason.lower()
1525
1538
  raise ProviderRequestError(f"Network error: {reason}. Check your internet connection.", transient=is_timeout)
1539
+ except IncompleteRead as e:
1540
+ partial_len = len(getattr(e, "partial", b"") or b"")
1541
+ raise ProviderRequestError(
1542
+ f"Connection interrupted while reading response ({partial_len} bytes received). Please retry.",
1543
+ transient=True,
1544
+ )
1526
1545
  except TimeoutError:
1527
1546
  raise ProviderRequestError(f"Request timed out after {timeout}s. Try again or reduce max_results.", transient=True)
1528
1547
 
@@ -1672,6 +1691,114 @@ def search_tavily(
1672
1691
  }
1673
1692
 
1674
1693
 
1694
+ # =============================================================================
1695
+ # Querit (Multi-lingual search API for AI, with rich metadata and real-time information)
1696
+ # =============================================================================
1697
+
1698
+ def _map_querit_time_range(time_range: Optional[str]) -> Optional[str]:
1699
+ """Map generic time ranges to Querit's compact date filter format."""
1700
+ if not time_range:
1701
+ return None
1702
+ return {
1703
+ "day": "d1",
1704
+ "week": "w1",
1705
+ "month": "m1",
1706
+ "year": "y1",
1707
+ }.get(time_range, time_range)
1708
+
1709
+
1710
+ def search_querit(
1711
+ query: str,
1712
+ api_key: str,
1713
+ max_results: int = 5,
1714
+ language: str = "en",
1715
+ country: str = "us",
1716
+ time_range: Optional[str] = None,
1717
+ include_domains: Optional[List[str]] = None,
1718
+ exclude_domains: Optional[List[str]] = None,
1719
+ base_url: str = "https://api.querit.ai",
1720
+ base_path: str = "/v1/search",
1721
+ timeout: int = 30,
1722
+ ) -> dict:
1723
+ """Search using Querit.
1724
+
1725
+ Mirrors the Querit Python SDK payload shape:
1726
+ - query
1727
+ - count
1728
+ - optional filters: languages, geo, sites, timeRange
1729
+ """
1730
+ endpoint = base_url.rstrip("/") + base_path
1731
+
1732
+ filters: Dict[str, Any] = {}
1733
+ if language:
1734
+ filters["languages"] = {"include": [language.lower()]}
1735
+ if country:
1736
+ filters["geo"] = {"countries": {"include": [country.upper()]}}
1737
+ if include_domains or exclude_domains:
1738
+ sites: Dict[str, List[str]] = {}
1739
+ if include_domains:
1740
+ sites["include"] = include_domains
1741
+ if exclude_domains:
1742
+ sites["exclude"] = exclude_domains
1743
+ filters["sites"] = sites
1744
+
1745
+ querit_time_range = _map_querit_time_range(time_range)
1746
+ if querit_time_range:
1747
+ filters["timeRange"] = {"date": querit_time_range}
1748
+
1749
+ body: Dict[str, Any] = {
1750
+ "query": query,
1751
+ "count": max_results,
1752
+ }
1753
+ if filters:
1754
+ body["filters"] = filters
1755
+
1756
+ headers = {
1757
+ "Authorization": f"Bearer {api_key}",
1758
+ "Content-Type": "application/json",
1759
+ }
1760
+
1761
+ data = make_request(endpoint, headers, body, timeout=timeout)
1762
+
1763
+ error_code = data.get("error_code")
1764
+ error_msg = data.get("error_msg")
1765
+ if error_msg or (error_code not in (None, 0, 200)):
1766
+ message = error_msg or f"Querit request failed with error_code={error_code}"
1767
+ raise ProviderRequestError(message)
1768
+
1769
+ raw_results = ((data.get("results") or {}).get("result")) or []
1770
+ results = []
1771
+ for i, item in enumerate(raw_results[:max_results]):
1772
+ snippet = item.get("snippet") or item.get("page_age") or ""
1773
+ result = {
1774
+ "title": item.get("title") or _title_from_url(item.get("url", "")),
1775
+ "url": item.get("url", ""),
1776
+ "snippet": snippet,
1777
+ "score": round(1.0 - i * 0.05, 3),
1778
+ }
1779
+ if item.get("page_time") is not None:
1780
+ result["page_time"] = item["page_time"]
1781
+ if item.get("page_age"):
1782
+ result["date"] = item["page_age"]
1783
+ if item.get("language") is not None:
1784
+ result["language"] = item["language"]
1785
+ results.append(result)
1786
+
1787
+ answer = results[0]["snippet"] if results else ""
1788
+
1789
+ return {
1790
+ "provider": "querit",
1791
+ "query": query,
1792
+ "results": results,
1793
+ "images": [],
1794
+ "answer": answer,
1795
+ "metadata": {
1796
+ "search_id": data.get("search_id"),
1797
+ "time_range": querit_time_range,
1798
+ }
1799
+ }
1800
+
1801
+
1675
1802
  # =============================================================================
1676
1803
  # Exa (Neural/Semantic/Deep Search)
1677
1804
  # =============================================================================
@@ -2273,6 +2400,9 @@ Intelligent Auto-Routing:
2273
2400
 
2274
2401
  Research Intent → Tavily
2275
2402
  "how does", "explain", "what is", analysis, pros/cons, tutorials
2403
+
2404
+ Multilingual + Real-Time AI Search → Querit
2405
+ multilingual search, metadata-rich results, current information for AI workflows
2276
2406
 
2277
2407
  Discovery Intent → Exa (Neural)
2278
2408
  "similar to", "companies like", "alternatives", URLs, startups, papers
@@ -2293,7 +2423,7 @@ Full docs: See README.md and SKILL.md
2293
2423
  # Common arguments
2294
2424
  parser.add_argument(
2295
2425
  "--provider", "-p",
2296
- choices=["serper", "tavily", "exa", "perplexity", "you", "searxng", "auto"],
2426
+ choices=["serper", "tavily", "querit", "exa", "perplexity", "you", "searxng", "auto"],
2297
2427
  help="Search provider (auto=intelligent routing)"
2298
2428
  )
2299
2429
  parser.add_argument(
@@ -2353,6 +2483,19 @@ Full docs: See README.md and SKILL.md
2353
2483
  )
2354
2484
  parser.add_argument("--raw-content", action="store_true")
2355
2485
 
2486
+ # Querit-specific
2487
+ querit_config = config.get("querit", {})
2488
+ parser.add_argument(
2489
+ "--querit-base-url",
2490
+ default=querit_config.get("base_url", "https://api.querit.ai"),
2491
+ help="Querit API base URL"
2492
+ )
2493
+ parser.add_argument(
2494
+ "--querit-base-path",
2495
+ default=querit_config.get("base_path", "/v1/search"),
2496
+ help="Querit API path"
2497
+ )
2498
+
2356
2499
  # Exa-specific
2357
2500
  exa_config = config.get("exa", {})
2358
2501
  parser.add_argument(
@@ -2520,7 +2663,7 @@ Full docs: See README.md and SKILL.md
2520
2663
 
2521
2664
  # Build provider fallback list
2522
2665
  auto_config = config.get("auto_routing", {})
2523
- provider_priority = auto_config.get("provider_priority", ["tavily", "exa", "perplexity", "serper"])
2666
+ provider_priority = auto_config.get("provider_priority", ["tavily", "querit", "exa", "perplexity", "serper", "you", "searxng"])
2524
2667
  disabled_providers = auto_config.get("disabled_providers", [])
2525
2668
 
2526
2669
  # Start with the selected provider, then try others in priority order
@@ -2570,6 +2713,20 @@ Full docs: See README.md and SKILL.md
2570
2713
  include_images=args.images,
2571
2714
  include_raw_content=args.raw_content,
2572
2715
  )
2716
+ elif prov == "querit":
2717
+ return search_querit(
2718
+ query=args.query,
2719
+ api_key=key,
2720
+ max_results=args.max_results,
2721
+ language=args.language,
2722
+ country=args.country,
2723
+ time_range=args.time_range or args.freshness,
2724
+ include_domains=args.include_domains,
2725
+ exclude_domains=args.exclude_domains,
2726
+ base_url=args.querit_base_url,
2727
+ base_path=args.querit_base_path,
2728
+ timeout=int(querit_config.get("timeout", 30)),
2729
+ )
2573
2730
  elif prov == "exa":
2574
2731
  # CLI --exa-depth overrides; fallback to auto-routing suggestion
2575
2732
  exa_depth = args.exa_depth
@@ -2653,6 +2810,8 @@ Full docs: See README.md and SKILL.md
2653
2810
  "locale": f"{args.country}:{args.language}",
2654
2811
  "freshness": args.freshness,
2655
2812
  "time_range": args.time_range,
2813
+ "include_domains": sorted(args.include_domains) if args.include_domains else None,
2814
+ "exclude_domains": sorted(args.exclude_domains) if args.exclude_domains else None,
2656
2815
  "topic": args.topic,
2657
2816
  "search_engines": sorted(args.engines) if args.engines else None,
2658
2817
  "include_news": not args.no_news,
package/scripts/setup.py CHANGED
@@ -65,6 +65,14 @@ def print_provider_info():
65
65
  "signup": "https://tavily.com",
66
66
  "strengths": ["AI-synthesized answers", "Full page content", "Domain filtering", "Academic research"]
67
67
  },
68
+ {
69
+ "name": "Querit",
70
+ "emoji": "🗂️",
71
+ "best_for": "Multi-lingual AI search with rich metadata and real-time information",
72
+ "free_tier": "1,000 queries/month",
73
+ "signup": "https://querit.ai",
74
+ "strengths": ["Multi-lingual search", "Rich metadata", "Real-time information", "AI-ready results"]
75
+ },
68
76
  {
69
77
  "name": "Exa",
70
78
  "emoji": "🧠",
@@ -294,11 +302,12 @@ def run_setup(skill_dir: Path, force_reset: bool = False):
294
302
  "auto_routing": {"enabled": True, "fallback_provider": "serper"},
295
303
  "serper": {},
296
304
  "tavily": {},
305
+ "querit": {},
297
306
  "exa": {}
298
307
  }
299
308
 
300
309
  # Remove any existing API keys from example
301
- for provider in ["serper", "tavily", "exa"]:
310
+ for provider in ["serper", "tavily", "querit", "exa"]:
302
311
  if provider in config:
303
312
  config[provider].pop("api_key", None)
304
313
 
@@ -314,6 +323,7 @@ def run_setup(skill_dir: Path, force_reset: bool = False):
314
323
  providers_info = {
315
324
  "serper": ("Serper", "https://serper.dev", "Google results, shopping, local"),
316
325
  "tavily": ("Tavily", "https://tavily.com", "Research, explanations, analysis"),
326
+ "querit": ("Querit", "https://querit.ai", "Multi-lingual AI search, rich metadata, real-time info"),
317
327
  "exa": ("Exa", "https://exa.ai", "Semantic search, similar content"),
318
328
  "you": ("You.com", "https://api.you.com", "RAG applications, real-time info"),
319
329
  "searxng": ("SearXNG", "https://docs.searxng.org/admin/installation.html", "Privacy-first, self-hosted, $0 cost")
@@ -393,7 +403,7 @@ def run_setup(skill_dir: Path, force_reset: bool = False):
393
403
  config["defaults"]["max_results"] = max_results
394
404
 
395
405
  # Set disabled providers
396
- all_providers = ["serper", "tavily", "exa", "you", "searxng"]
406
+ all_providers = ["serper", "tavily", "querit", "exa", "you", "searxng"]
397
407
  disabled = [p for p in all_providers if p not in enabled_providers]
398
408
  config["auto_routing"]["disabled_providers"] = disabled
399
409