@doufunao123/ai-search 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +46 -21
  2. package/dist/index.js +435 -228
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # @doufunao123/ai-search
2
2
 
3
- AI-powered web search CLI (npm package). Uses AI models with native web search capabilities via the xiaomao proxy.
3
+ Gateway client for AI Search Gateway. The npm CLI is a thin HTTP client that talks to `search.xiaomao.chat` instead of calling model APIs directly.
4
4
 
5
5
  ## Install
6
6
 
@@ -11,43 +11,68 @@ npm install -g @doufunao123/ai-search
11
11
  ## Setup
12
12
 
13
13
  ```bash
14
- ai-search config set api_key your_api_key
15
- # Or
16
- export AI_SEARCH_KEY=your_api_key
14
+ ai-search auth set your_gateway_token
15
+
16
+ # Or use environment variables
17
+ export AI_SEARCH_TOKEN=your_gateway_token
18
+ export AI_SEARCH_GATEWAY_URL=https://search.xiaomao.chat
17
19
  ```
18
20
 
19
21
  ## Usage
20
22
 
21
23
  ```bash
22
- # Simple search
24
+ # Default fast search
23
25
  ai-search "latest AI news"
24
26
 
25
- # Choose model
26
- ai-search "query" --model grok-4.1-fast
27
+ # Deep multi-source search
28
+ ai-search "compare latest AI browser agents" --mode deep
27
29
 
28
- # Complex search with query splitting
29
- ai-search "comprehensive analysis of X" --split 3
30
+ # Answer mode
31
+ ai-search "what changed in Bun this month" --mode answer
30
32
 
31
- # JSON output (default in non-TTY)
32
- ai-search "query" --json
33
+ # Choose model and query splitting
34
+ ai-search "comprehensive analysis of X" --model grok-4.1-fast --split 3 --num 5
33
35
 
34
- # Human-readable output
35
- ai-search "query" --human
36
+ # stdin JSON input
37
+ printf '%s\n' '{"query":"latest AI infra news","mode":"deep","num":5}' | ai-search search --stdin
36
38
 
37
- # List models
39
+ # List gateway metadata
38
40
  ai-search models
41
+ ai-search providers
42
+ ai-search providers health
43
+ ai-search health
39
44
 
40
- # Show config
45
+ # Config and auth
46
+ ai-search auth status
47
+ ai-search config set gateway_url https://search.xiaomao.chat
41
48
  ai-search config show
42
49
  ```
43
50
 
44
51
  ## Configuration
45
52
 
46
- Config file: `~/.config/ai-search/config.json`
53
+ Config file: `~/.config/ai-search/auth.json`
54
+
55
+ Saved format:
56
+
57
+ ```json
58
+ {
59
+ "token": "asg_...",
60
+ "gateway_url": "https://search.xiaomao.chat"
61
+ }
62
+ ```
63
+
64
+ Environment variables:
65
+ - `AI_SEARCH_TOKEN` — gateway token
66
+ - `AI_SEARCH_GATEWAY_URL` — gateway URL
67
+
68
+ Priority order:
69
+ 1. CLI flags (`--gateway-url`, `--token`)
70
+ 2. Environment variables
71
+ 3. `~/.config/ai-search/auth.json`
72
+ 4. Default gateway URL `https://search.xiaomao.chat`
47
73
 
48
- Environment variables (override config):
49
- - `AI_SEARCH_KEY` — API key
50
- - `AI_SEARCH_URL` — API base URL (default: `https://grok.xiaomao.chat`)
51
- - `AI_SEARCH_MODEL` — Default search model (default: `grok-4.1-expert`)
74
+ ## Search Modes
52
75
 
53
- Shares credentials with the Rust CLI (`~/.config/ai-search/config.toml`).
76
+ - `fast` Grok only, default mode
77
+ - `deep` — Grok + Exa + Tavily in parallel
78
+ - `answer` — Tavily answer mode with AI summary
package/dist/index.js CHANGED
@@ -1,202 +1,248 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/index.ts
4
+ import { existsSync as existsSync2, unlinkSync } from "fs";
4
5
  import { Command } from "commander";
5
6
 
7
+ // src/errors.ts
8
+ var GatewayError = class extends Error {
9
+ code;
10
+ exitCode;
11
+ suggestion;
12
+ constructor(message, options) {
13
+ super(message);
14
+ this.name = "GatewayError";
15
+ this.code = options.code;
16
+ this.exitCode = options.exitCode ?? 1;
17
+ this.suggestion = options.suggestion;
18
+ }
19
+ };
20
+ function inputError(message, suggestion) {
21
+ return new GatewayError(message, {
22
+ code: "INPUT_ERROR",
23
+ exitCode: 1,
24
+ suggestion
25
+ });
26
+ }
27
+ function configError(message, suggestion) {
28
+ return new GatewayError(message, {
29
+ code: "CONFIG_ERROR",
30
+ exitCode: 1,
31
+ suggestion: suggestion ?? "Run ai-search auth set <token> to configure credentials"
32
+ });
33
+ }
34
+ function notFoundError(message) {
35
+ return new GatewayError(message, {
36
+ code: "NOT_FOUND",
37
+ exitCode: 1
38
+ });
39
+ }
40
+ function apiError(message, suggestion) {
41
+ return new GatewayError(message, {
42
+ code: "GATEWAY_API_ERROR",
43
+ exitCode: 3,
44
+ suggestion: suggestion ?? "Check if the gateway is running: ai-search health"
45
+ });
46
+ }
47
+ function httpClientError(message, suggestion) {
48
+ return new GatewayError(message, {
49
+ code: "HTTP_CLIENT_ERROR",
50
+ exitCode: 3,
51
+ suggestion: suggestion ?? "Check network connectivity to the gateway"
52
+ });
53
+ }
54
+ function internalError(message) {
55
+ return new GatewayError(message, {
56
+ code: "INTERNAL_ERROR",
57
+ exitCode: 2
58
+ });
59
+ }
60
+ function normalizeError(error2) {
61
+ if (error2 instanceof GatewayError) {
62
+ return error2;
63
+ }
64
+ if (error2 instanceof Error) {
65
+ return internalError(error2.message);
66
+ }
67
+ return internalError(String(error2));
68
+ }
69
+
70
+ // src/client.ts
71
+ var GatewayClient = class {
72
+ constructor(baseUrl, token) {
73
+ this.baseUrl = baseUrl;
74
+ this.token = token;
75
+ }
76
+ async search(query, options = {}) {
77
+ const payload = await this.post("/api/search", {
78
+ query,
79
+ mode: options.mode ?? "fast",
80
+ ...options.model ? { model: options.model } : {},
81
+ ...options.split !== void 0 ? { split: options.split } : {},
82
+ ...options.num !== void 0 ? { num: options.num } : {}
83
+ });
84
+ return payload;
85
+ }
86
+ async models() {
87
+ const payload = await this.get("/api/models");
88
+ return extractArray(payload, ["models"]);
89
+ }
90
+ async providers() {
91
+ const payload = await this.get("/api/providers");
92
+ return extractArray(payload, ["providers"]);
93
+ }
94
+ async providersHealth() {
95
+ const payload = await this.get("/api/providers/health");
96
+ return extractArray(payload, ["providers", "health"]);
97
+ }
98
+ async health() {
99
+ const payload = await this.get("/health");
100
+ return isRecord(payload) && payload.status === "ok";
101
+ }
102
+ async get(path) {
103
+ return this.request("GET", path);
104
+ }
105
+ async post(path, body) {
106
+ return this.request("POST", path, { body });
107
+ }
108
+ async request(method, path, options = {}) {
109
+ const url = new URL(path, ensureTrailingSlash(this.baseUrl));
110
+ const headers = new Headers(options.headers);
111
+ if (this.token) {
112
+ headers.set("authorization", `Bearer ${this.token}`);
113
+ }
114
+ let body;
115
+ if (options.body !== void 0) {
116
+ headers.set("content-type", "application/json");
117
+ body = JSON.stringify(options.body);
118
+ }
119
+ let response;
120
+ try {
121
+ response = await fetch(url, { method, headers, body });
122
+ } catch (error2) {
123
+ throw httpClientError(error2 instanceof Error ? error2.message : String(error2));
124
+ }
125
+ const text = await response.text();
126
+ const payload = parseResponse(text);
127
+ if (!response.ok) {
128
+ const preview = typeof payload === "string" ? payload : JSON.stringify(payload);
129
+ if (response.status === 404) {
130
+ throw notFoundError(`HTTP 404 - ${truncate(preview, 512)}`);
131
+ }
132
+ throw apiError(`HTTP ${response.status} - ${truncate(preview, 512)}`);
133
+ }
134
+ if (isEnvelope(payload)) {
135
+ if (!payload.ok) {
136
+ const message = typeof payload.error === "object" && payload.error !== null ? JSON.stringify(payload.error) : String(payload.error);
137
+ throw apiError(message);
138
+ }
139
+ return payload.data;
140
+ }
141
+ return payload;
142
+ }
143
+ };
144
+ function extractArray(payload, keys) {
145
+ if (Array.isArray(payload)) {
146
+ return payload;
147
+ }
148
+ if (isRecord(payload)) {
149
+ for (const key of keys) {
150
+ const value = payload[key];
151
+ if (Array.isArray(value)) {
152
+ return value;
153
+ }
154
+ }
155
+ const arrayValue = Object.values(payload).find(Array.isArray);
156
+ if (Array.isArray(arrayValue)) {
157
+ return arrayValue;
158
+ }
159
+ }
160
+ throw apiError(`Unexpected response shape for ${keys.join("/")}`);
161
+ }
162
+ function isEnvelope(value) {
163
+ return isRecord(value) && "ok" in value;
164
+ }
165
+ function isRecord(value) {
166
+ return typeof value === "object" && value !== null && !Array.isArray(value);
167
+ }
168
+ function ensureTrailingSlash(url) {
169
+ return url.endsWith("/") ? url : `${url}/`;
170
+ }
171
+ function parseResponse(text) {
172
+ if (!text) {
173
+ return null;
174
+ }
175
+ try {
176
+ return JSON.parse(text);
177
+ } catch {
178
+ return text;
179
+ }
180
+ }
181
+ function truncate(value, maxLength) {
182
+ if (value.length <= maxLength) {
183
+ return value;
184
+ }
185
+ return `${value.slice(0, maxLength)}...[truncated]`;
186
+ }
187
+
188
+ // src/config.ts
189
+ import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
190
+ import { dirname, join } from "path";
191
+
6
192
  // src/meta.ts
7
- var CLI_VERSION = "0.1.0";
8
- var DEFAULT_API_URL = "https://grok.xiaomao.chat";
9
- var DEFAULT_SEARCH_MODEL = "grok-4.1-expert";
10
- var DEFAULT_ANALYSIS_MODEL = "grok-4.1-fast";
193
+ var CLI_NAME = "ai-search";
194
+ var CLI_VERSION = "0.2.0";
195
+ var DEFAULT_GATEWAY_URL = "https://search.xiaomao.chat";
196
+ var SEARCH_MODES = ["fast", "deep", "answer"];
11
197
  var SEARCH_MODELS = [
12
- "grok-4.1-expert",
13
198
  "grok-4.1-fast",
199
+ "grok-4.1-expert",
14
200
  "grok-4.20-beta",
15
201
  "grok-4",
16
202
  "grok-4-thinking"
17
203
  ];
18
204
 
19
205
  // src/config.ts
20
- import { existsSync, readFileSync, mkdirSync, writeFileSync, chmodSync } from "fs";
21
- import { homedir } from "os";
22
- import { join, dirname } from "path";
23
206
  function configDir() {
24
- return join(homedir(), ".config", "ai-search");
207
+ const home = process.env.HOME;
208
+ if (!home) {
209
+ throw configError("HOME environment variable is not set");
210
+ }
211
+ return join(home, ".config", "ai-search");
25
212
  }
26
213
  function configPath() {
27
- return join(configDir(), "config.json");
214
+ return join(configDir(), "auth.json");
28
215
  }
29
- function loadConfig() {
30
- const defaults = {
31
- api_url: DEFAULT_API_URL,
32
- api_key: "",
33
- search_model: DEFAULT_SEARCH_MODEL,
34
- analysis_model: DEFAULT_ANALYSIS_MODEL,
35
- max_split: 1,
36
- timeout_ms: 12e4
37
- };
216
+ function loadAuthConfig() {
38
217
  const path = configPath();
39
- if (existsSync(path)) {
40
- try {
41
- const content = readFileSync(path, "utf8");
42
- const parsed = JSON.parse(content);
43
- Object.assign(defaults, parsed);
44
- } catch {
45
- }
218
+ if (!existsSync(path)) {
219
+ return {};
46
220
  }
47
- if (!defaults.api_key) {
48
- const tomlPath = join(configDir(), "config.toml");
49
- if (existsSync(tomlPath)) {
50
- try {
51
- const toml = readFileSync(tomlPath, "utf8");
52
- const keyMatch = toml.match(/^api_key\s*=\s*"([^"]*)"/m);
53
- if (keyMatch?.[1]) defaults.api_key = keyMatch[1];
54
- } catch {
55
- }
56
- }
221
+ const content = readFileSync(path, "utf8");
222
+ if (!content.trim()) {
223
+ return {};
224
+ }
225
+ try {
226
+ const parsed = JSON.parse(content);
227
+ return { token: parsed.token, gateway_url: parsed.gateway_url };
228
+ } catch (error2) {
229
+ throw configError(
230
+ `Failed to parse auth config at ${path}: ${error2 instanceof Error ? error2.message : String(error2)}`
231
+ );
57
232
  }
58
- if (process.env.AI_SEARCH_URL) defaults.api_url = process.env.AI_SEARCH_URL;
59
- if (process.env.AI_SEARCH_KEY) defaults.api_key = process.env.AI_SEARCH_KEY;
60
- if (process.env.AI_SEARCH_MODEL) defaults.search_model = process.env.AI_SEARCH_MODEL;
61
- if (process.env.AI_SEARCH_ANALYSIS_MODEL) defaults.analysis_model = process.env.AI_SEARCH_ANALYSIS_MODEL;
62
- return defaults;
63
233
  }
64
- function saveConfig(config) {
234
+ function saveAuthConfig(config) {
65
235
  const path = configPath();
66
236
  mkdirSync(dirname(path), { recursive: true });
67
- let existing = {};
68
- if (existsSync(path)) {
69
- try {
70
- existing = JSON.parse(readFileSync(path, "utf8"));
71
- } catch {
72
- }
73
- }
74
- const merged = { ...existing, ...config };
75
- writeFileSync(path, JSON.stringify(merged, null, 2) + "\n", { mode: 384 });
237
+ writeFileSync(path, JSON.stringify(config, null, 2) + "\n", { mode: 384 });
76
238
  chmodSync(path, 384);
77
239
  }
78
-
79
- // src/client.ts
80
- var THINKING_RE = /<think(?:ing)?>[\s\S]*?<\/think(?:ing)?>/g;
81
- var SYSTEM_PROMPT = `You are a real-time web search assistant with direct access to the internet. When the user asks a question:
82
-
83
- 1. Search the web for the most current, accurate information available
84
- 2. Synthesize results into a clear, well-structured answer
85
- 3. Always include sources with URLs and dates when available
86
- 4. Be concise but comprehensive \u2014 cover key facts without unnecessary padding
87
- 5. If the query is in Chinese, respond entirely in Chinese; otherwise match the query language
88
- 6. When results conflict, note the discrepancy and indicate which sources are more authoritative
89
- 7. Format sources as a numbered list at the end of your response`;
90
- function stripThinking(text) {
91
- return text.replace(THINKING_RE, "").trim();
240
+ function resolveGatewayUrl(overrides) {
241
+ return overrides?.gatewayUrl ?? process.env.AI_SEARCH_GATEWAY_URL ?? loadAuthConfig().gateway_url ?? DEFAULT_GATEWAY_URL;
242
+ }
243
+ function resolveToken(overrides) {
244
+ return overrides?.token ?? process.env.AI_SEARCH_TOKEN ?? loadAuthConfig().token;
92
245
  }
93
- var AIClient = class {
94
- constructor(config) {
95
- this.config = config;
96
- }
97
- async chat(messages, model) {
98
- const url = `${this.config.api_url.replace(/\/+$/, "")}/v1/chat/completions`;
99
- const resp = await fetch(url, {
100
- method: "POST",
101
- headers: {
102
- "Content-Type": "application/json",
103
- Authorization: `Bearer ${this.config.api_key}`
104
- },
105
- body: JSON.stringify({ model, messages, stream: false }),
106
- signal: AbortSignal.timeout(this.config.timeout_ms)
107
- });
108
- if (!resp.ok) {
109
- const body = await resp.text().catch(() => "");
110
- throw new Error(`AI API HTTP ${resp.status}: ${body.slice(0, 500)}`);
111
- }
112
- const data = await resp.json();
113
- const content = stripThinking(data.choices?.[0]?.message?.content ?? "");
114
- const tokens = data.usage?.total_tokens ?? 0;
115
- return { content, tokens };
116
- }
117
- async search(query, model, split = 1) {
118
- if (split <= 1) {
119
- const { content, tokens } = await this.chat(
120
- [
121
- { role: "system", content: SYSTEM_PROMPT },
122
- { role: "user", content: query }
123
- ],
124
- model
125
- );
126
- return { query, model, content, tokens };
127
- }
128
- const subQueries = await this.splitQuery(query, split);
129
- const subResults = await Promise.all(
130
- subQueries.map(async (sq) => {
131
- const { content, tokens } = await this.chat(
132
- [
133
- { role: "system", content: SYSTEM_PROMPT },
134
- { role: "user", content: sq }
135
- ],
136
- model
137
- );
138
- return { sub_query: sq, content, tokens };
139
- })
140
- );
141
- const totalSubTokens = subResults.reduce((sum, r) => sum + r.tokens, 0);
142
- const { content: merged, tokens: mergeTokens } = await this.mergeResults(query, subResults);
143
- return {
144
- query,
145
- model,
146
- content: merged,
147
- tokens: totalSubTokens + mergeTokens,
148
- sub_results: subResults
149
- };
150
- }
151
- async splitQuery(query, maxSplit) {
152
- const { content } = await this.chat(
153
- [
154
- {
155
- role: "system",
156
- content: "You are a query analysis assistant. You split complex questions into independent sub-queries for parallel web search. Always respond with a valid JSON array of strings."
157
- },
158
- {
159
- role: "user",
160
- content: `Split the following complex search query into ${maxSplit} or fewer independent sub-questions that can be searched separately. Return ONLY a JSON array of strings, nothing else.
161
-
162
- Query: ${query}`
163
- }
164
- ],
165
- this.config.analysis_model
166
- );
167
- try {
168
- let jsonStr = content.trim();
169
- if (jsonStr.startsWith("```")) {
170
- jsonStr = jsonStr.split("\n").slice(1).filter((l) => !l.startsWith("```")).join("\n");
171
- }
172
- const parsed = JSON.parse(jsonStr);
173
- if (Array.isArray(parsed) && parsed.length > 0) return parsed;
174
- } catch {
175
- }
176
- return [query];
177
- }
178
- async mergeResults(originalQuery, subResults) {
179
- const context = subResults.map((r, i) => `--- Sub-query ${i + 1}: ${r.sub_query} ---
180
- ${r.content}`).join("\n\n");
181
- return this.chat(
182
- [
183
- {
184
- role: "system",
185
- content: "You are a research synthesis assistant. Merge multiple search results into a single coherent answer. Keep all sources and citations. Match the language of the original query."
186
- },
187
- {
188
- role: "user",
189
- content: `Based on the following search results for sub-queries, provide a comprehensive answer to the original question. Synthesize the information, remove duplicates, and present a coherent response with sources.
190
-
191
- Original question: ${originalQuery}
192
-
193
- ${context}`
194
- }
195
- ],
196
- this.config.analysis_model
197
- );
198
- }
199
- };
200
246
 
201
247
  // src/output.ts
202
248
  function success(command, data) {
@@ -228,75 +274,236 @@ function output(result, human = false) {
228
274
  }
229
275
 
230
276
  // src/index.ts
231
- var program = new Command().name("ai-search").description("AI-powered web search CLI").version(CLI_VERSION);
232
- program.argument("[query...]", "Search query").option("-m, --model <model>", "Model to use").option("--split <n>", "Max sub-queries for query splitting", "1").option("--json", "Force JSON output").option("--human", "Human-readable output").action(async (queryParts, opts) => {
233
- const query = queryParts.join(" ");
234
- if (!query) {
277
+ var program = new Command().name(CLI_NAME).description("AI Search Gateway CLI client").version(CLI_VERSION).option(
278
+ "--gateway-url <url>",
279
+ `Gateway URL (default: $AI_SEARCH_GATEWAY_URL, auth config, or ${DEFAULT_GATEWAY_URL})`
280
+ ).option("--token <token>", "Gateway token for authentication").option("--human", "Human-readable output instead of JSON");
281
+ program.argument("[query...]", "Search query").option("--mode <mode>", `Search mode (${SEARCH_MODES.join(", ")}; default: fast)`).option("-m, --model <model>", "Model to use").option("--split <n>", "Max sub-queries for query splitting").option("--num <n>", "Max search results to request").action(async function(queryParts) {
282
+ if (queryParts.length === 0) {
235
283
  program.help();
236
284
  return;
237
285
  }
238
- await doSearch(query, opts);
286
+ await runSearchCommand(queryParts, this, "search");
239
287
  });
240
- program.command("search <query...>").description("Search the web").option("-m, --model <model>", "Model to use").option("--split <n>", "Max sub-queries for query splitting", "1").option("--json", "Force JSON output").option("--human", "Human-readable output").option("--stdin", "Read query from stdin JSON").action(async (queryParts, opts) => {
241
- let query;
242
- if (opts.stdin) {
243
- const chunks = [];
244
- for await (const chunk of process.stdin) chunks.push(chunk);
245
- const input = JSON.parse(Buffer.concat(chunks).toString());
246
- query = input.query ?? "";
247
- if (input.model) opts.model = input.model;
248
- if (input.split) opts.split = String(input.split);
249
- } else {
250
- query = queryParts.join(" ");
288
+ program.command("search [query...]").description("Search the web through the gateway").option("--mode <mode>", `Search mode (${SEARCH_MODES.join(", ")}; default: fast)`).option("-m, --model <model>", "Model to use").option("--split <n>", "Max sub-queries for query splitting").option("--num <n>", "Max search results to request").option("--stdin", "Read query from stdin JSON").action(async function(queryParts) {
289
+ await runSearchCommand(queryParts, this, "search");
290
+ });
291
+ program.command("models").description("List available gateway models").action(async function() {
292
+ const globals = this.optsWithGlobals();
293
+ try {
294
+ const client = createClient(globals);
295
+ const models = await client.models();
296
+ output(success("models", { models }), Boolean(globals.human));
297
+ } catch (err) {
298
+ printError("models", err, Boolean(globals.human));
251
299
  }
252
- await doSearch(query, opts);
253
300
  });
254
- program.command("models").description("List available search models").action(() => {
255
- output(success("models", { models: SEARCH_MODELS }));
301
+ program.command("providers").description("List gateway search providers").action(async function() {
302
+ const globals = this.optsWithGlobals();
303
+ try {
304
+ const client = createClient(globals);
305
+ const providers = await client.providers();
306
+ output(success("providers", { providers }), Boolean(globals.human));
307
+ } catch (err) {
308
+ printError("providers", err, Boolean(globals.human));
309
+ }
310
+ }).addCommand(
311
+ new Command("health").description("Check provider health").action(async function() {
312
+ const globals = this.optsWithGlobals();
313
+ try {
314
+ const client = createClient(globals);
315
+ const providers = await client.providersHealth();
316
+ output(success("providers.health", { providers }), Boolean(globals.human));
317
+ } catch (err) {
318
+ printError("providers.health", err, Boolean(globals.human));
319
+ }
320
+ })
321
+ );
322
+ program.command("health").description("Check gateway health").action(async function() {
323
+ const globals = this.optsWithGlobals();
324
+ try {
325
+ const client = createClient(globals, false);
326
+ const healthy = await client.health();
327
+ output(success("health", { status: healthy ? "ok" : "unexpected" }), Boolean(globals.human));
328
+ } catch (err) {
329
+ printError("health", err, Boolean(globals.human));
330
+ }
256
331
  });
332
+ program.command("auth").description("Credential management").addCommand(
333
+ new Command("set").description("Save token and gateway URL locally").argument("<token>", "Gateway token").action(function(token) {
334
+ const globals = this.optsWithGlobals();
335
+ try {
336
+ const current = loadAuthConfig();
337
+ const gatewayUrl = resolveGatewayUrl({ gatewayUrl: globals.gatewayUrl });
338
+ saveAuthConfig({ ...current, token, gateway_url: gatewayUrl });
339
+ output(success("auth.set", { saved: true, gateway_url: gatewayUrl }), Boolean(globals.human));
340
+ } catch (err) {
341
+ printError("auth.set", err, Boolean(globals.human));
342
+ }
343
+ })
344
+ ).addCommand(
345
+ new Command("status").description("Show saved authentication status").action(function() {
346
+ const globals = this.optsWithGlobals();
347
+ try {
348
+ const config = loadAuthConfig();
349
+ output(
350
+ success("auth.status", {
351
+ config_path: configPath(),
352
+ has_token: Boolean(config.token),
353
+ token_preview: config.token ? mask(config.token) : null,
354
+ gateway_url: config.gateway_url ?? null
355
+ }),
356
+ Boolean(globals.human)
357
+ );
358
+ } catch (err) {
359
+ printError("auth.status", err, Boolean(globals.human));
360
+ }
361
+ })
362
+ ).addCommand(
363
+ new Command("clear").description("Remove saved credentials").action(function() {
364
+ const globals = this.optsWithGlobals();
365
+ try {
366
+ const path = configPath();
367
+ if (existsSync2(path)) {
368
+ unlinkSync(path);
369
+ }
370
+ output(success("auth.clear", { cleared: true }), Boolean(globals.human));
371
+ } catch (err) {
372
+ printError("auth.clear", err, Boolean(globals.human));
373
+ }
374
+ })
375
+ );
257
376
  program.command("config").description("Manage configuration").addCommand(
258
- new Command("set").description("Set a config value").argument("<key>", "Config key (api_url, api_key, search_model, analysis_model)").argument("<value>", "Config value").action((key, value) => {
259
- saveConfig({ [key]: value });
260
- output(success("config.set", { key, value: key === "api_key" ? "***" : value }));
377
+ new Command("set").description("Set a config value").argument("<key>", "Config key (gateway_url)").argument("<value>", "Config value").action(function(key, value) {
378
+ const globals = this.optsWithGlobals();
379
+ try {
380
+ if (key !== "gateway_url") {
381
+ throw inputError(`Unsupported config key: ${key}`);
382
+ }
383
+ const current = loadAuthConfig();
384
+ saveAuthConfig({ ...current, gateway_url: value });
385
+ output(success("config.set", { key, value }), Boolean(globals.human));
386
+ } catch (err) {
387
+ printError("config.set", err, Boolean(globals.human));
388
+ }
261
389
  })
262
390
  ).addCommand(
263
- new Command("show").description("Show current configuration").action(() => {
264
- const config = loadConfig();
265
- output(
266
- success("config.show", {
267
- ...config,
268
- api_key: config.api_key ? `${config.api_key.slice(0, 8)}...` : "(not set)"
269
- })
270
- );
391
+ new Command("show").description("Show current configuration").action(function() {
392
+ const globals = this.optsWithGlobals();
393
+ try {
394
+ const token = resolveToken({ token: globals.token });
395
+ output(
396
+ success("config.show", {
397
+ config_path: configPath(),
398
+ gateway_url: resolveGatewayUrl({ gatewayUrl: globals.gatewayUrl }),
399
+ has_token: Boolean(token),
400
+ token_preview: token ? mask(token) : null,
401
+ search_modes: SEARCH_MODES,
402
+ search_models: SEARCH_MODELS
403
+ }),
404
+ Boolean(globals.human)
405
+ );
406
+ } catch (err) {
407
+ printError("config.show", err, Boolean(globals.human));
408
+ }
271
409
  })
272
410
  );
273
- async function doSearch(query, opts) {
274
- const config = loadConfig();
275
- if (!config.api_key) {
276
- output(error("search", "NO_API_KEY", "API key not configured", "Run: ai-search config set api_key <key> OR export AI_SEARCH_KEY=<key>"));
277
- process.exit(1);
278
- }
279
- const client = new AIClient(config);
280
- const model = opts.model ?? config.search_model;
281
- const split = parseInt(opts.split ?? "1", 10);
411
+ await program.parseAsync(process.argv);
412
+ async function runSearchCommand(queryParts, command, commandName) {
413
+ const options = command.optsWithGlobals();
282
414
  try {
283
- const result = await client.search(query, model, split);
284
- const isHuman = opts.human || process.stdout.isTTY && !opts.json;
285
- if (isHuman) {
286
- process.stderr.write(`
287
- \u{1F50D} [${result.model}] Search: ${result.query}
288
-
289
- `);
290
- process.stdout.write(result.content + "\n");
291
- process.stderr.write(`
292
- \u{1F4CA} Tokens: ${result.tokens}
293
- `);
294
- } else {
295
- output(success("search", result));
415
+ const request = await resolveSearchRequest(queryParts, options);
416
+ const client = createClient(options);
417
+ const result = await client.search(request.query, {
418
+ mode: request.mode,
419
+ model: request.model,
420
+ split: request.split,
421
+ num: request.num
422
+ });
423
+ output(success(commandName, result), Boolean(options.human));
424
+ } catch (err) {
425
+ printError(commandName, err, Boolean(options.human));
426
+ }
427
+ }
428
+ function createClient(globals, requireAuth = true) {
429
+ const gatewayUrl = resolveGatewayUrl({ gatewayUrl: globals.gatewayUrl });
430
+ const token = resolveToken({ token: globals.token });
431
+ if (requireAuth && !token) {
432
+ throw configError(
433
+ "Authentication required. Use --token, set AI_SEARCH_TOKEN, or run ai-search auth set <token>.",
434
+ "ai-search auth set <token>"
435
+ );
436
+ }
437
+ return new GatewayClient(gatewayUrl, token);
438
+ }
439
+ async function resolveSearchRequest(queryParts, options) {
440
+ const input = options.stdin ? await readStdinInput() : {};
441
+ const query = queryParts.join(" ") || stringValue(input.query) || "";
442
+ if (!query) {
443
+ throw inputError("Search query is required");
444
+ }
445
+ const modeValue = options.mode ?? stringValue(input.mode) ?? "fast";
446
+ const split = parsePositiveInteger(options.split ?? input.split, "split");
447
+ const num = parsePositiveInteger(options.num ?? input.num, "num");
448
+ return {
449
+ query,
450
+ mode: parseMode(modeValue),
451
+ model: options.model ?? stringValue(input.model) ?? void 0,
452
+ ...split !== void 0 ? { split } : {},
453
+ ...num !== void 0 ? { num } : {}
454
+ };
455
+ }
456
+ async function readStdinInput() {
457
+ const chunks = [];
458
+ for await (const chunk of process.stdin) {
459
+ chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
460
+ }
461
+ const text = Buffer.concat(chunks).toString("utf8").trim();
462
+ if (!text) {
463
+ throw inputError("Expected JSON on stdin because --stdin was provided");
464
+ }
465
+ try {
466
+ const parsed = JSON.parse(text);
467
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
468
+ throw new Error("stdin JSON must be an object");
296
469
  }
297
- } catch (e) {
298
- output(error("search", "SEARCH_FAILED", e instanceof Error ? e.message : String(e)));
299
- process.exit(3);
470
+ return parsed;
471
+ } catch (err) {
472
+ throw inputError(
473
+ `Failed to parse stdin JSON: ${err instanceof Error ? err.message : String(err)}`
474
+ );
300
475
  }
301
476
  }
302
- await program.parseAsync(process.argv);
477
+ function parseMode(value) {
478
+ if (SEARCH_MODES.includes(value)) {
479
+ return value;
480
+ }
481
+ throw inputError(`Unsupported search mode: ${value}`);
482
+ }
483
+ function parsePositiveInteger(value, name) {
484
+ if (value === void 0 || value === null || value === "") {
485
+ return void 0;
486
+ }
487
+ const normalized = typeof value === "number" ? String(value) : String(value).trim();
488
+ if (!/^[1-9]\d*$/.test(normalized)) {
489
+ throw inputError(`${name} must be a positive integer`);
490
+ }
491
+ return Number.parseInt(normalized, 10);
492
+ }
493
+ function stringValue(value) {
494
+ return typeof value === "string" && value.length > 0 ? value : void 0;
495
+ }
496
+ function mask(token) {
497
+ if (token.length <= 12) {
498
+ return "***";
499
+ }
500
+ return `${token.slice(0, 8)}...${token.slice(-4)}`;
501
+ }
502
+ function printError(commandName, err, human = false) {
503
+ const normalized = normalizeError(err);
504
+ output(
505
+ error(commandName, normalized.code, normalized.message, normalized.suggestion),
506
+ human
507
+ );
508
+ process.exit(normalized.exitCode);
509
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@doufunao123/ai-search",
3
- "version": "0.1.0",
3
+ "version": "0.2.0",
4
4
  "description": "AI-powered web search CLI with query splitting",
5
5
  "type": "module",
6
6
  "bin": {