@xcrawl/cli 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. package/CHANGELOG.md +23 -0
  2. package/README.md +115 -0
  3. package/dist/api/client.d.ts +14 -0
  4. package/dist/api/client.js +92 -0
  5. package/dist/api/client.js.map +1 -0
  6. package/dist/api/crawl.d.ts +4 -0
  7. package/dist/api/crawl.js +63 -0
  8. package/dist/api/crawl.js.map +1 -0
  9. package/dist/api/credits.d.ts +2 -0
  10. package/dist/api/credits.js +7 -0
  11. package/dist/api/credits.js.map +1 -0
  12. package/dist/api/map.d.ts +2 -0
  13. package/dist/api/map.js +29 -0
  14. package/dist/api/map.js.map +1 -0
  15. package/dist/api/scrape.d.ts +2 -0
  16. package/dist/api/scrape.js +75 -0
  17. package/dist/api/scrape.js.map +1 -0
  18. package/dist/api/search.d.ts +2 -0
  19. package/dist/api/search.js +25 -0
  20. package/dist/api/search.js.map +1 -0
  21. package/dist/api/whoami.d.ts +2 -0
  22. package/dist/api/whoami.js +7 -0
  23. package/dist/api/whoami.js.map +1 -0
  24. package/dist/commands/config.d.ts +3 -0
  25. package/dist/commands/config.js +64 -0
  26. package/dist/commands/config.js.map +1 -0
  27. package/dist/commands/crawl.d.ts +3 -0
  28. package/dist/commands/crawl.js +91 -0
  29. package/dist/commands/crawl.js.map +1 -0
  30. package/dist/commands/credits.d.ts +3 -0
  31. package/dist/commands/credits.js +60 -0
  32. package/dist/commands/credits.js.map +1 -0
  33. package/dist/commands/doctor.d.ts +3 -0
  34. package/dist/commands/doctor.js +115 -0
  35. package/dist/commands/doctor.js.map +1 -0
  36. package/dist/commands/init.d.ts +2 -0
  37. package/dist/commands/init.js +9 -0
  38. package/dist/commands/init.js.map +1 -0
  39. package/dist/commands/login.d.ts +3 -0
  40. package/dist/commands/login.js +27 -0
  41. package/dist/commands/login.js.map +1 -0
  42. package/dist/commands/map.d.ts +3 -0
  43. package/dist/commands/map.js +48 -0
  44. package/dist/commands/map.js.map +1 -0
  45. package/dist/commands/scrape.d.ts +3 -0
  46. package/dist/commands/scrape.js +118 -0
  47. package/dist/commands/scrape.js.map +1 -0
  48. package/dist/commands/search.d.ts +3 -0
  49. package/dist/commands/search.js +64 -0
  50. package/dist/commands/search.js.map +1 -0
  51. package/dist/commands/shared.d.ts +14 -0
  52. package/dist/commands/shared.js +41 -0
  53. package/dist/commands/shared.js.map +1 -0
  54. package/dist/commands/whoami.d.ts +3 -0
  55. package/dist/commands/whoami.js +61 -0
  56. package/dist/commands/whoami.js.map +1 -0
  57. package/dist/core/auth.d.ts +1 -0
  58. package/dist/core/auth.js +11 -0
  59. package/dist/core/auth.js.map +1 -0
  60. package/dist/core/config.d.ts +20 -0
  61. package/dist/core/config.js +134 -0
  62. package/dist/core/config.js.map +1 -0
  63. package/dist/core/constants.d.ts +3 -0
  64. package/dist/core/constants.js +12 -0
  65. package/dist/core/constants.js.map +1 -0
  66. package/dist/core/env.d.ts +2 -0
  67. package/dist/core/env.js +34 -0
  68. package/dist/core/env.js.map +1 -0
  69. package/dist/core/errors.d.ts +33 -0
  70. package/dist/core/errors.js +66 -0
  71. package/dist/core/errors.js.map +1 -0
  72. package/dist/core/files.d.ts +3 -0
  73. package/dist/core/files.js +37 -0
  74. package/dist/core/files.js.map +1 -0
  75. package/dist/core/logger.d.ts +14 -0
  76. package/dist/core/logger.js +32 -0
  77. package/dist/core/logger.js.map +1 -0
  78. package/dist/core/output.d.ts +17 -0
  79. package/dist/core/output.js +29 -0
  80. package/dist/core/output.js.map +1 -0
  81. package/dist/formatters/json.d.ts +1 -0
  82. package/dist/formatters/json.js +18 -0
  83. package/dist/formatters/json.js.map +1 -0
  84. package/dist/formatters/markdown.d.ts +1 -0
  85. package/dist/formatters/markdown.js +7 -0
  86. package/dist/formatters/markdown.js.map +1 -0
  87. package/dist/formatters/table.d.ts +5 -0
  88. package/dist/formatters/table.js +23 -0
  89. package/dist/formatters/table.js.map +1 -0
  90. package/dist/formatters/text.d.ts +21 -0
  91. package/dist/formatters/text.js +78 -0
  92. package/dist/formatters/text.js.map +1 -0
  93. package/dist/index.d.ts +7 -0
  94. package/dist/index.js +117 -0
  95. package/dist/index.js.map +1 -0
  96. package/dist/types/api.d.ts +159 -0
  97. package/dist/types/api.js +3 -0
  98. package/dist/types/api.js.map +1 -0
  99. package/dist/types/cli.d.ts +12 -0
  100. package/dist/types/cli.js +3 -0
  101. package/dist/types/cli.js.map +1 -0
  102. package/dist/types/config.d.ts +19 -0
  103. package/dist/types/config.js +3 -0
  104. package/dist/types/config.js.map +1 -0
  105. package/dist/utils/concurrency.d.ts +1 -0
  106. package/dist/utils/concurrency.js +21 -0
  107. package/dist/utils/concurrency.js.map +1 -0
  108. package/dist/utils/time.d.ts +1 -0
  109. package/dist/utils/time.js +7 -0
  110. package/dist/utils/time.js.map +1 -0
  111. package/dist/utils/validate.d.ts +3 -0
  112. package/dist/utils/validate.js +48 -0
  113. package/dist/utils/validate.js.map +1 -0
  114. package/package.json +47 -0
package/CHANGELOG.md ADDED
@@ -0,0 +1,23 @@
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ ## [0.2.0] - 2026-03-12
6
+
7
+ ### Added
8
+ - Added `map` command and API integration.
9
+ - Added `crawl` command with `crawl status` and optional `--wait` polling.
10
+ - Added `config get`, `config set`, and `config keys` commands.
11
+ - Added batch scraping support with `--input` and `--concurrency`.
12
+ - Added real API smoke test script: `npm run smoke`.
13
+ - Added GitHub Actions workflow for build/test/lint and optional smoke test.
14
+
15
+ ### Changed
16
+ - Updated default API base URL to `https://run.xcrawl.com`.
17
+ - Aligned API method and response mapping for `scrape`, `search`, `map`, and `crawl`.
18
+ - Improved `doctor` connectivity behavior for public API account endpoint limitations.
19
+ - Improved `whoami` and `credits` with actionable fallback output when account endpoints are not exposed by public API.
20
+
21
+ ### Quality
22
+ - Expanded integration test coverage for new commands and fallback behavior.
23
+ - Current automated checks: `npm run build`, `npm run test`, and `npm run lint`.
package/README.md ADDED
@@ -0,0 +1,115 @@
1
+ # XCrawl CLI
2
+
3
+ XCrawl CLI is a Node.js command-line tool for scraping, searching, mapping, and crawling websites.
4
+
5
+ ## Install
6
+
7
+ ### Run with npx (no global install)
8
+
9
+ ```bash
10
+ npx -y @xcrawl/cli@latest doctor
11
+ ```
12
+
13
+ ### Install globally with npm
14
+
15
+ ```bash
16
+ npm install -g @xcrawl/cli
17
+ xcrawl --help
18
+ ```
19
+
20
+ ## Authenticate
21
+
22
+ Use either login command or environment variable.
23
+
24
+ ```bash
25
+ # Save API key locally
26
+ xcrawl login --api-key <your_api_key>
27
+
28
+ # Or use env var
29
+ export XCRAWL_API_KEY=<your_api_key>
30
+ ```
31
+
32
+ ## Quickstart
33
+
34
+ ```bash
35
+ # Scrape a page
36
+ xcrawl scrape https://example.com --format markdown
37
+
38
+ # Search
39
+ xcrawl search "xcrawl cli" --limit 10
40
+
41
+ # Map links in a site
42
+ xcrawl map https://example.com --limit 10
43
+
44
+ # Start a crawl
45
+ xcrawl crawl https://example.com
46
+
47
+ # Check crawl status
48
+ xcrawl crawl status <job-id>
49
+ ```
50
+
51
+ Default shortcut:
52
+
53
+ ```bash
54
+ xcrawl https://example.com
55
+ # same as:
56
+ xcrawl scrape https://example.com
57
+ ```
58
+
59
+ ## Batch Scraping
60
+
61
+ ```bash
62
+ xcrawl scrape --input ./urls.txt --concurrency 3 --json
63
+ ```
64
+
65
+ `urls.txt` should contain one URL per line. Lines starting with `#` are ignored.
66
+
67
+ ## Config
68
+
69
+ ```bash
70
+ xcrawl config keys
71
+ xcrawl config get api-base-url
72
+ xcrawl config set api-base-url https://run.xcrawl.com
73
+ ```
74
+
75
+ Config priority:
76
+ 1. CLI flags
77
+ 2. Environment variables
78
+ 3. Local config file `~/.xcrawl/config.json`
79
+ 4. Defaults
80
+
81
+ Environment variables:
82
+ - `XCRAWL_API_KEY`
83
+ - `XCRAWL_API_BASE_URL`
84
+ - `XCRAWL_TIMEOUT_MS`
85
+ - `XCRAWL_OUTPUT_DIR`
86
+ - `XCRAWL_DEBUG`
87
+
88
+ ## Output
89
+
90
+ - Default: human-readable text
91
+ - `--json`: machine-readable JSON
92
+ - `--output`: write output to file
93
+ - Multi-URL scrape defaults to `.xcrawl/` when no output path is provided
94
+
95
+ ## Public API Notes
96
+
97
+ - Default API base URL is `https://run.xcrawl.com`.
98
+ - Public API currently does not expose standalone `whoami` / `credits` endpoints.
99
+ - CLI handles this with explicit fallback output instead of hard failure.
100
+
101
+ ## Local Development
102
+
103
+ ```bash
104
+ npm install
105
+ npm run build
106
+ npm run test
107
+ npm run lint
108
+ ```
109
+
110
+ Real API smoke test:
111
+
112
+ ```bash
113
+ export XCRAWL_API_KEY=<your_api_key>
114
+ npm run smoke
115
+ ```
@@ -0,0 +1,14 @@
1
+ import type { ApiRequestOptions, ApiTransport } from '../types/api';
2
+ export interface ApiClientOptions {
3
+ baseUrl: string;
4
+ apiKey?: string;
5
+ timeoutMs?: number;
6
+ debug?: boolean;
7
+ }
8
+ export declare class ApiClient implements ApiTransport {
9
+ private readonly options;
10
+ constructor(options: ApiClientOptions);
11
+ get<T>(path: string, options?: ApiRequestOptions): Promise<T>;
12
+ post<T>(path: string, options?: ApiRequestOptions): Promise<T>;
13
+ private request;
14
+ }
@@ -0,0 +1,92 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ApiClient = void 0;
4
+ const errors_1 = require("../core/errors");
5
+ function buildUrl(baseUrl, path, query) {
6
+ const url = new URL(path, baseUrl.endsWith('/') ? baseUrl : `${baseUrl}/`);
7
+ for (const [key, value] of Object.entries(query ?? {})) {
8
+ if (value !== undefined) {
9
+ url.searchParams.set(key, String(value));
10
+ }
11
+ }
12
+ return url.toString();
13
+ }
14
+ class ApiClient {
15
+ constructor(options) {
16
+ this.options = {
17
+ timeoutMs: 30000,
18
+ debug: false,
19
+ ...options
20
+ };
21
+ }
22
+ async get(path, options = {}) {
23
+ return this.request('GET', path, options);
24
+ }
25
+ async post(path, options = {}) {
26
+ return this.request('POST', path, options);
27
+ }
28
+ async request(method, path, options) {
29
+ const timeoutMs = options.timeoutMs ?? this.options.timeoutMs ?? 30000;
30
+ const controller = new AbortController();
31
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
32
+ const headers = {
33
+ 'content-type': 'application/json',
34
+ ...(options.headers ?? {})
35
+ };
36
+ if (this.options.apiKey) {
37
+ headers.authorization = `Bearer ${this.options.apiKey}`;
38
+ }
39
+ const url = buildUrl(this.options.baseUrl, path, options.query);
40
+ try {
41
+ const response = await fetch(url, {
42
+ method,
43
+ headers,
44
+ body: options.body ? JSON.stringify(options.body) : undefined,
45
+ signal: controller.signal
46
+ });
47
+ const text = await response.text();
48
+ const parsed = text ? tryJsonParse(text) : undefined;
49
+ if (!response.ok) {
50
+ const message = extractApiMessage(parsed) ?? `Request failed (${response.status})`;
51
+ throw new errors_1.ApiError(message, 'Check your API key and request arguments, or retry later. You can run `xcrawl doctor` for diagnostics.', response.status, parsed);
52
+ }
53
+ if (parsed === undefined) {
54
+ return {};
55
+ }
56
+ return parsed;
57
+ }
58
+ catch (error) {
59
+ if (error instanceof errors_1.ApiError) {
60
+ throw error;
61
+ }
62
+ if (error instanceof DOMException && error.name === 'AbortError') {
63
+ throw new errors_1.NetworkError('Request timed out.', 'Increase the timeout with `--timeout`.', error);
64
+ }
65
+ throw new errors_1.NetworkError('Network request failed.', 'Check your network connection and API base URL configuration.', error);
66
+ }
67
+ finally {
68
+ clearTimeout(timeout);
69
+ }
70
+ }
71
+ }
72
+ exports.ApiClient = ApiClient;
73
+ function tryJsonParse(text) {
74
+ try {
75
+ return JSON.parse(text);
76
+ }
77
+ catch {
78
+ return text;
79
+ }
80
+ }
81
+ function extractApiMessage(parsed) {
82
+ if (!parsed || typeof parsed !== 'object') {
83
+ return undefined;
84
+ }
85
+ const shape = parsed;
86
+ const candidate = shape.message ?? shape.error;
87
+ if (typeof candidate === 'string' && candidate.trim().length > 0) {
88
+ return candidate;
89
+ }
90
+ return undefined;
91
+ }
92
+ //# sourceMappingURL=client.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"client.js","sourceRoot":"","sources":["../../src/api/client.ts"],"names":[],"mappings":";;;AAAA,2CAAwD;AAUxD,SAAS,QAAQ,CAAC,OAAe,EAAE,IAAY,EAAE,KAA6D;IAC5G,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,GAAG,CAAC,CAAC;IAE3E,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC,EAAE,CAAC;QACvD,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;YACxB,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;QAC3C,CAAC;IACH,CAAC;IAED,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,MAAa,SAAS;IAGpB,YAAY,OAAyB;QACnC,IAAI,CAAC,OAAO,GAAG;YACb,SAAS,EAAE,KAAK;YAChB,KAAK,EAAE,KAAK;YACZ,GAAG,OAAO;SACX,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,GAAG,CAAI,IAAY,EAAE,UAA6B,EAAE;QACxD,OAAO,IAAI,CAAC,OAAO,CAAI,KAAK,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,CAAC,IAAI,CAAI,IAAY,EAAE,UAA6B,EAAE;QACzD,OAAO,IAAI,CAAC,OAAO,CAAI,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IAChD,CAAC;IAEO,KAAK,CAAC,OAAO,CAAI,MAAc,EAAE,IAAY,EAAE,OAA0B;QAC/E,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,OAAO,CAAC,SAAS,IAAI,KAAK,CAAC;QACvE,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;QACzC,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,EAAE,SAAS,CAAC,CAAC;QAEhE,MAAM,OAAO,GAA2B;YACtC,cAAc,EAAE,kBAAkB;YAClC,GAAG,CAAC,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC;SAC3B,CAAC;QAEF,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC;YACxB,OAAO,CAAC,aAAa,GAAG,UAAU,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC;QAC1D,CAAC;QAED,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC;QAEhE,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;gBAChC,MAAM;gBACN,OAAO;gBACP,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS;gBAC7D,MAAM,EAAE,UAAU,CAAC,MAAM;aAC1B,CAAC,CAAC;YAEH,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YACnC,MAAM,MAAM,GAAG,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;YAErD,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACjB,MAAM,OAAO,GAAG,iBAAiB,CAAC,MAAM,CAAC,IAAI,mBAAmB,QAAQ,CAAC,MAAM,GAAG,CAAC;gBACnF,MAAM,IAAI,iBAAQ,CAChB,OAAO,EACP,wGAAwG,EACxG,QAAQ,CAAC,MAAM,EACf,MAAM,CACP,CAAC;YACJ,CAAC;YAED,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,OAAO,EAAO,CAAC;YACjB,CAAC;YAED,OAAO,MAAW,CAAC;QACrB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,IAAI,KAAK,YAAY,iBAAQ,EAAE,CAAC;gBAC9B,MAAM,KAAK,CAAC;YACd,CAAC;YAED,IAAI,KAAK,YAAY,YAAY,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;gBACjE,MAAM,IAAI,qBAAY,CAAC,oBAAoB,EAAE,wCAAwC,EAAE,KAAK,CAAC,CAAC;YAChG,CAAC;YAED,MAAM,IAAI,qBAAY,CAAC,yBAAyB,EAAE,+DAA+D,EAAE,KAAK,CAAC,CAAC;QAC5H,CAAC;gBAAS,CAAC;YACT,YAAY,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;IACH,CAAC;CACF;AA3ED,8BA2EC;AAED,SAAS,YAAY,CAAC,IAAY;IAChC,IAAI,CAAC;QACH,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAY,CAAC;IACrC,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,IAAI,CAAC;IACd,CAAC;AACH,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAe;IACxC,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE,CAAC;QAC1C,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,MAAM,KAAK,GAAG,MAAiC,CAAC;IAChD,MAAM,SAAS,GAAG,KAAK,CAAC,OAAO,IAAI,KAAK,CAAC,KAAK,CAAC;IAC/C,IAAI,OAAO,SAAS,KAAK,QAAQ,IAAI,SAAS,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACjE,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC"}
@@ -0,0 +1,4 @@
1
+ import type { ApiTransport, CrawlStartRequest, CrawlStartResponse, CrawlStatusResponse } from '../types/api';
2
+ export declare function startCrawl(client: ApiTransport, request: CrawlStartRequest): Promise<CrawlStartResponse>;
3
+ export declare function fetchCrawlStatus(client: ApiTransport, jobId: string): Promise<CrawlStatusResponse>;
4
+ export declare function waitForCrawlCompletion(client: ApiTransport, jobId: string, intervalMs: number, timeoutMs: number): Promise<CrawlStatusResponse>;
@@ -0,0 +1,63 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.startCrawl = startCrawl;
4
+ exports.fetchCrawlStatus = fetchCrawlStatus;
5
+ exports.waitForCrawlCompletion = waitForCrawlCompletion;
6
+ const promises_1 = require("node:timers/promises");
7
+ const errors_1 = require("../core/errors");
8
+ const TERMINAL_CRAWL_STATUSES = new Set(['completed', 'failed']);
9
+ function normalizeCrawlStatus(status) {
10
+ if (!status) {
11
+ return 'pending';
12
+ }
13
+ if (status === 'pending' || status === 'crawling' || status === 'completed' || status === 'failed') {
14
+ return status;
15
+ }
16
+ if (status === 'queued') {
17
+ return 'pending';
18
+ }
19
+ if (status === 'running') {
20
+ return 'crawling';
21
+ }
22
+ return 'pending';
23
+ }
24
+ async function startCrawl(client, request) {
25
+ const raw = await client.post('/v1/crawl', {
26
+ body: {
27
+ url: request.url,
28
+ crawler: {
29
+ limit: request.maxPages
30
+ }
31
+ }
32
+ });
33
+ return {
34
+ jobId: raw.crawl_id ?? '',
35
+ url: raw.url ?? request.url,
36
+ status: normalizeCrawlStatus(raw.status)
37
+ };
38
+ }
39
+ async function fetchCrawlStatus(client, jobId) {
40
+ const raw = await client.get(`/v1/crawl/${encodeURIComponent(jobId)}`);
41
+ const pages = raw.data?.data ?? [];
42
+ return {
43
+ jobId: raw.crawl_id ?? jobId,
44
+ url: raw.url ?? '',
45
+ status: normalizeCrawlStatus(raw.status),
46
+ completedPages: Array.isArray(pages) ? pages.length : undefined,
47
+ failedPages: 0,
48
+ startedAt: raw.started_at,
49
+ finishedAt: raw.ended_at
50
+ };
51
+ }
52
+ async function waitForCrawlCompletion(client, jobId, intervalMs, timeoutMs) {
53
+ const startedAt = Date.now();
54
+ while (Date.now() - startedAt <= timeoutMs) {
55
+ const status = await fetchCrawlStatus(client, jobId);
56
+ if (TERMINAL_CRAWL_STATUSES.has(status.status)) {
57
+ return status;
58
+ }
59
+ await (0, promises_1.setTimeout)(intervalMs);
60
+ }
61
+ throw new errors_1.ApiError(`Crawl job did not finish in time: ${jobId}`, 'Increase --wait-timeout or check progress with `xcrawl crawl status <job-id>`.');
62
+ }
63
+ //# sourceMappingURL=crawl.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"crawl.js","sourceRoot":"","sources":["../../src/api/crawl.ts"],"names":[],"mappings":";;AAmCA,gCAeC;AAED,4CAcC;AAED,wDAqBC;AAzFD,mDAA2D;AAE3D,2CAA0C;AAW1C,MAAM,uBAAuB,GAAG,IAAI,GAAG,CAAiB,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC,CAAC;AAEjF,SAAS,oBAAoB,CAAC,MAA0B;IACtD,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,UAAU,IAAI,MAAM,KAAK,WAAW,IAAI,MAAM,KAAK,QAAQ,EAAE,CAAC;QACnG,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,IAAI,MAAM,KAAK,QAAQ,EAAE,CAAC;QACxB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACzB,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,UAAU,CAAC,MAAoB,EAAE,OAA0B;IAC/E,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,IAAI,CAAwB,WAAW,EAAE;QAChE,IAAI,EAAE;YACJ,GAAG,EAAE,OAAO,CAAC,GAAG;YAChB,OAAO,EAAE;gBACP,KAAK,EAAE,OAAO,CAAC,QAAQ;aACxB;SACF;KACF,CAAC,CAAC;IAEH,OAAO;QACL,KAAK,EAAE,GAAG,CAAC,QAAQ,IAAI,EAAE;QACzB,GAAG,EAAE,GAAG,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG;QAC3B,MAAM,EAAE,oBAAoB,CAAC,GAAG,CAAC,MAAM,CAAC;KACzC,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,gBAAgB,CAAC,MAAoB,EAAE,KAAa;IACxE,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,GAAG,CAAyB,aAAa,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAE/F,MAAM,KAAK,GAAG,GAAG,CAAC,IAAI,EAAE,IAAI,IAAI,EAAE,CAAC;IAEnC,OAAO;QACL,KAAK,EAAE,GAAG,CAAC,QAAQ,IAAI,KAAK;QAC5B,GAAG,EAAE,GAAG,CAAC,GAAG,IAAI,EAAE;QAClB,MAAM,EAAE,oBAAoB,CAAC,GAAG,CAAC,MAAM,CAAC;QACxC,cAAc,EAAE,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS;QAC/D,WAAW,EAAE,CAAC;QACd,SAAS,EAAE,GAAG,CAAC,UAAU;QACzB,UAAU,EAAE,GAAG,CAAC,QAAQ;KACzB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,sBAAsB,CAC1C,MAAoB,EACpB,KAAa,EACb,UAAkB,EAClB,SAAiB;IAEjB,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAE7B,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,IAAI,SAAS,EAAE,CAAC;QAC3C,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACrD,IAAI,uBAAuB,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC;YAC/C,OAAO,MAAM,CAAC;QAChB,CAAC;QAED,MAAM,IAAA,qBAAK,EAAC,UAAU,CAAC,CAAC;IAC1B,CAAC;IAED,MAAM,IAAI,iBAAQ,CAChB,qCAAqC,KAAK,EAAE,EAC5C,gFAAgF,CACjF,CAAC;AACJ,CAAC"}
@@ -0,0 +1,2 @@
1
+ import type { ApiTransport, CreditsResponse } from '../types/api';
2
+ export declare function fetchCredits(client: ApiTransport): Promise<CreditsResponse>;
@@ -0,0 +1,7 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.fetchCredits = fetchCredits;
4
+ async function fetchCredits(client) {
5
+ return client.get('/v1/credits');
6
+ }
7
+ //# sourceMappingURL=credits.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"credits.js","sourceRoot":"","sources":["../../src/api/credits.ts"],"names":[],"mappings":";;AAEA,oCAEC;AAFM,KAAK,UAAU,YAAY,CAAC,MAAoB;IACrD,OAAO,MAAM,CAAC,GAAG,CAAkB,aAAa,CAAC,CAAC;AACpD,CAAC"}
@@ -0,0 +1,2 @@
1
+ import type { ApiTransport, MapRequest, MapResponse } from '../types/api';
2
+ export declare function fetchMap(client: ApiTransport, request: MapRequest): Promise<MapResponse>;
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.fetchMap = fetchMap;
4
+ async function fetchMap(client, request) {
5
+ const raw = await client.post('/v1/map', {
6
+ body: {
7
+ url: request.url,
8
+ limit: request.limit
9
+ }
10
+ });
11
+ const rawLinks = raw.data?.links ?? [];
12
+ const links = rawLinks
13
+ .map((item) => {
14
+ if (typeof item === 'string') {
15
+ return { url: item };
16
+ }
17
+ if (item?.url) {
18
+ return { url: item.url, title: item.title };
19
+ }
20
+ return undefined;
21
+ })
22
+ .filter((item) => Boolean(item));
23
+ return {
24
+ url: raw.url ?? request.url,
25
+ links,
26
+ total: raw.data?.total_links ?? links.length
27
+ };
28
+ }
29
+ //# sourceMappingURL=map.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"map.js","sourceRoot":"","sources":["../../src/api/map.ts"],"names":[],"mappings":";;AAEA,4BA4BC;AA5BM,KAAK,UAAU,QAAQ,CAAC,MAAoB,EAAE,OAAmB;IACtE,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,IAAI,CAAiB,SAAS,EAAE;QACvD,IAAI,EAAE;YACJ,GAAG,EAAE,OAAO,CAAC,GAAG;YAChB,KAAK,EAAE,OAAO,CAAC,KAAK;SACrB;KACF,CAAC,CAAC;IAEH,MAAM,QAAQ,GAAG,GAAG,CAAC,IAAI,EAAE,KAAK,IAAI,EAAE,CAAC;IACvC,MAAM,KAAK,GAAG,QAAQ;SACnB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACZ,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC7B,OAAO,EAAE,GAAG,EAAE,IAAI,EAAE,CAAC;QACvB,CAAC;QAED,IAAI,IAAI,EAAE,GAAG,EAAE,CAAC;YACd,OAAO,EAAE,GAAG,EAAE,IAAI,CAAC,GAAG,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC;QAC9C,CAAC;QAED,OAAO,SAAS,CAAC;IACnB,CAAC,CAAC;SACD,MAAM,CAAC,CAAC,IAAI,EAA2C,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;IAE5E,OAAO;QACL,GAAG,EAAE,GAAG,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG;QAC3B,KAAK;QACL,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,WAAW,IAAI,KAAK,CAAC,MAAM;KAC7C,CAAC;AACJ,CAAC"}
@@ -0,0 +1,2 @@
1
+ import type { ApiTransport, ScrapeRequest, ScrapeResponse } from '../types/api';
2
+ export declare function scrapeUrl(client: ApiTransport, request: ScrapeRequest): Promise<ScrapeResponse>;
@@ -0,0 +1,75 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.scrapeUrl = scrapeUrl;
4
+ function mapFormat(format) {
5
+ switch (format) {
6
+ case 'html':
7
+ return 'html';
8
+ case 'screenshot':
9
+ return 'screenshot';
10
+ case 'json':
11
+ return 'json';
12
+ default:
13
+ return 'markdown';
14
+ }
15
+ }
16
+ function parseCookieString(input) {
17
+ if (!input) {
18
+ return undefined;
19
+ }
20
+ const cookies = {};
21
+ for (const pair of input.split(';')) {
22
+ const [rawKey, ...rawValue] = pair.trim().split('=');
23
+ const key = rawKey?.trim();
24
+ const value = rawValue.join('=').trim();
25
+ if (key && value) {
26
+ cookies[key] = value;
27
+ }
28
+ }
29
+ return Object.keys(cookies).length > 0 ? cookies : undefined;
30
+ }
31
+ function pickContent(raw, format) {
32
+ const data = raw.data ?? {};
33
+ if (format === 'html') {
34
+ return data.html ?? data.raw_html ?? '';
35
+ }
36
+ if (format === 'screenshot') {
37
+ return data.screenshot ?? '';
38
+ }
39
+ if (format === 'json') {
40
+ return data.json !== undefined ? JSON.stringify(data.json, null, 2) : '';
41
+ }
42
+ return data.markdown ?? data.summary ?? data.html ?? '';
43
+ }
44
+ async function scrapeUrl(client, request) {
45
+ const format = request.format ?? 'markdown';
46
+ const body = {
47
+ url: request.url,
48
+ output: {
49
+ formats: [mapFormat(format)]
50
+ }
51
+ };
52
+ const cookies = parseCookieString(request.cookies);
53
+ if (request.headers || cookies) {
54
+ body.request = {
55
+ headers: request.headers,
56
+ cookies
57
+ };
58
+ }
59
+ if (request.proxy) {
60
+ body.proxy = {
61
+ location: request.proxy
62
+ };
63
+ }
64
+ const raw = await client.post('/v1/scrape', {
65
+ body,
66
+ timeoutMs: request.timeoutMs
67
+ });
68
+ return {
69
+ url: raw.url ?? request.url,
70
+ format,
71
+ content: pickContent(raw, format),
72
+ metadata: raw.data?.metadata
73
+ };
74
+ }
75
+ //# sourceMappingURL=scrape.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrape.js","sourceRoot":"","sources":["../../src/api/scrape.ts"],"names":[],"mappings":";;AAoDA,8BAmCC;AApFD,SAAS,SAAS,CAAC,MAAoB;IACrC,QAAQ,MAAM,EAAE,CAAC;QACf,KAAK,MAAM;YACT,OAAO,MAAM,CAAC;QAChB,KAAK,YAAY;YACf,OAAO,YAAY,CAAC;QACtB,KAAK,MAAM;YACT,OAAO,MAAM,CAAC;QAChB;YACE,OAAO,UAAU,CAAC;IACtB,CAAC;AACH,CAAC;AAED,SAAS,iBAAiB,CAAC,KAAyB;IAClD,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,MAAM,OAAO,GAA2B,EAAE,CAAC;IAC3C,KAAK,MAAM,IAAI,IAAI,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC;QACpC,MAAM,CAAC,MAAM,EAAE,GAAG,QAAQ,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACrD,MAAM,GAAG,GAAG,MAAM,EAAE,IAAI,EAAE,CAAC;QAC3B,MAAM,KAAK,GAAG,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC;QACxC,IAAI,GAAG,IAAI,KAAK,EAAE,CAAC;YACjB,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;QACvB,CAAC;IACH,CAAC;IAED,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC;AAC/D,CAAC;AAED,SAAS,WAAW,CAAC,GAAsB,EAAE,MAAoB;IAC/D,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,CAAC;IAE5B,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;QACtB,OAAO,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,IAAI,EAAE,CAAC;IAC1C,CAAC;IAED,IAAI,MAAM,KAAK,YAAY,EAAE,CAAC;QAC5B,OAAO,IAAI,CAAC,UAAU,IAAI,EAAE,CAAC;IAC/B,CAAC;IAED,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;QACtB,OAAO,IAAI,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IAC3E,CAAC;IAED,OAAO,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;AAC1D,CAAC;AAEM,KAAK,UAAU,SAAS,CAAC,MAAoB,EAAE,OAAsB;IAC1E,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,IAAI,UAAU,CAAC;IAE5C,MAAM,IAAI,GAA4B;QACpC,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,MAAM,EAAE;YACN,OAAO,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;SAC7B;KACF,CAAC;IAEF,MAAM,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACnD,IAAI,OAAO,CAAC,OAAO,IAAI,OAAO,EAAE,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG;YACb,OAAO,EAAE,OAAO,CAAC,OAAO;YACxB,OAAO;SACR,CAAC;IACJ,CAAC;IAED,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;QAClB,IAAI,CAAC,KAAK,GAAG;YACX,QAAQ,EAAE,OAAO,CAAC,KAAK;SACxB,CAAC;IACJ,CAAC;IAED,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,IAAI,CAAoB,YAAY,EAAE;QAC7D,IAAI;QACJ,SAAS,EAAE,OAAO,CAAC,SAAS;KAC7B,CAAC,CAAC;IAEH,OAAO;QACL,GAAG,EAAE,GAAG,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG;QAC3B,MAAM;QACN,OAAO,EAAE,WAAW,CAAC,GAAG,EAAE,MAAM,CAAC;QACjC,QAAQ,EAAE,GAAG,CAAC,IAAI,EAAE,QAAQ;KAC7B,CAAC;AACJ,CAAC"}
@@ -0,0 +1,2 @@
1
+ import type { ApiTransport, SearchRequest, SearchResponse } from '../types/api';
2
+ export declare function searchWeb(client: ApiTransport, request: SearchRequest): Promise<SearchResponse>;
@@ -0,0 +1,25 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.searchWeb = searchWeb;
4
+ async function searchWeb(client, request) {
5
+ const raw = await client.post('/v1/search', {
6
+ body: {
7
+ query: request.query,
8
+ limit: request.limit,
9
+ location: request.country,
10
+ language: request.language
11
+ }
12
+ });
13
+ const rawResults = raw.data?.data ?? [];
14
+ return {
15
+ query: raw.query ?? request.query,
16
+ results: rawResults
17
+ .filter((item) => typeof item.url === 'string' && item.url.length > 0)
18
+ .map((item) => ({
19
+ title: item.title?.trim() || 'Untitled',
20
+ url: item.url,
21
+ snippet: item.description
22
+ }))
23
+ };
24
+ }
25
+ //# sourceMappingURL=search.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"search.js","sourceRoot":"","sources":["../../src/api/search.ts"],"names":[],"mappings":";;AAEA,8BAsBC;AAtBM,KAAK,UAAU,SAAS,CAAC,MAAoB,EAAE,OAAsB;IAC1E,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,IAAI,CAAoB,YAAY,EAAE;QAC7D,IAAI,EAAE;YACJ,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,QAAQ,EAAE,OAAO,CAAC,OAAO;YACzB,QAAQ,EAAE,OAAO,CAAC,QAAQ;SAC3B;KACF,CAAC,CAAC;IAEH,MAAM,UAAU,GAAG,GAAG,CAAC,IAAI,EAAE,IAAI,IAAI,EAAE,CAAC;IAExC,OAAO;QACL,KAAK,EAAE,GAAG,CAAC,KAAK,IAAI,OAAO,CAAC,KAAK;QACjC,OAAO,EAAE,UAAU;aAChB,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,IAAI,CAAC,GAAG,KAAK,QAAQ,IAAI,IAAI,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC;aACrE,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;YACd,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,UAAU;YACvC,GAAG,EAAE,IAAI,CAAC,GAAa;YACvB,OAAO,EAAE,IAAI,CAAC,WAAW;SAC1B,CAAC,CAAC;KACN,CAAC;AACJ,CAAC"}
@@ -0,0 +1,2 @@
1
+ import type { ApiTransport, WhoAmIResponse } from '../types/api';
2
+ export declare function fetchWhoAmI(client: ApiTransport): Promise<WhoAmIResponse>;
@@ -0,0 +1,7 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.fetchWhoAmI = fetchWhoAmI;
4
+ async function fetchWhoAmI(client) {
5
+ return client.get('/v1/whoami');
6
+ }
7
+ //# sourceMappingURL=whoami.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"whoami.js","sourceRoot":"","sources":["../../src/api/whoami.ts"],"names":[],"mappings":";;AAEA,kCAEC;AAFM,KAAK,UAAU,WAAW,CAAC,MAAoB;IACpD,OAAO,MAAM,CAAC,GAAG,CAAiB,YAAY,CAAC,CAAC;AAClD,CAAC"}
@@ -0,0 +1,3 @@
1
+ import type { Command } from 'commander';
2
+ import type { CliContext } from '../types/cli';
3
+ export declare function registerConfigCommand(program: Command, context: CliContext): void;
@@ -0,0 +1,64 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.registerConfigCommand = registerConfigCommand;
4
+ const config_1 = require("../core/config");
5
+ const errors_1 = require("../core/errors");
6
+ const output_1 = require("../core/output");
7
+ const text_1 = require("../formatters/text");
8
+ const SUPPORTED_CONFIG_KEYS = ['api-key', 'api-base-url', 'default-format', 'output-dir', 'timeout-ms', 'debug'];
9
+ function registerConfigCommand(program, context) {
10
+ const config = program.command('config').description('Read and update local CLI config');
11
+ config
12
+ .command('get')
13
+ .description('Get a config value by key')
14
+ .argument('<key>', `Config key (${SUPPORTED_CONFIG_KEYS.join(', ')})`)
15
+ .option('--json', 'Output result as JSON')
16
+ .action(async (key, options) => {
17
+ if (!(0, config_1.isConfigKey)(key)) {
18
+ throw new errors_1.ValidationError(`Unsupported config key: ${key}`, `Use one of: ${SUPPORTED_CONFIG_KEYS.join(', ')}.`);
19
+ }
20
+ const value = await (0, config_1.getConfigValue)(key, context.homeDir);
21
+ await (0, output_1.renderOutput)({
22
+ ctx: { stdout: context.stdout },
23
+ data: { key, value },
24
+ json: options.json,
25
+ renderText: () => (0, text_1.formatConfigGet)(key, value)
26
+ });
27
+ });
28
+ config
29
+ .command('set')
30
+ .description('Set a config value by key')
31
+ .argument('<key>', `Config key (${SUPPORTED_CONFIG_KEYS.join(', ')})`)
32
+ .argument('<value>', 'Config value')
33
+ .option('--json', 'Output result as JSON')
34
+ .action(async (key, value, options) => {
35
+ if (!(0, config_1.isConfigKey)(key)) {
36
+ throw new errors_1.ValidationError(`Unsupported config key: ${key}`, `Use one of: ${SUPPORTED_CONFIG_KEYS.join(', ')}.`);
37
+ }
38
+ const result = await (0, config_1.setConfigValue)(key, value, context.homeDir);
39
+ await (0, output_1.renderOutput)({
40
+ ctx: { stdout: context.stdout },
41
+ data: {
42
+ key,
43
+ field: result.field,
44
+ value: result.value,
45
+ configPath: result.configPath
46
+ },
47
+ json: options.json,
48
+ renderText: () => (0, text_1.formatConfigSet)(key, result.value, result.configPath)
49
+ });
50
+ });
51
+ config
52
+ .command('keys')
53
+ .description('List all supported config keys')
54
+ .option('--json', 'Output result as JSON')
55
+ .action(async (options) => {
56
+ await (0, output_1.renderOutput)({
57
+ ctx: { stdout: context.stdout },
58
+ data: { keys: SUPPORTED_CONFIG_KEYS },
59
+ json: options.json,
60
+ renderText: () => SUPPORTED_CONFIG_KEYS.join('\n')
61
+ });
62
+ });
63
+ }
64
+ //# sourceMappingURL=config.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/commands/config.ts"],"names":[],"mappings":";;AAcA,sDAmEC;AA/ED,2CAA6E;AAC7E,2CAAiD;AACjD,2CAA8C;AAC9C,6CAAsE;AAOtE,MAAM,qBAAqB,GAAG,CAAC,SAAS,EAAE,cAAc,EAAE,gBAAgB,EAAE,YAAY,EAAE,YAAY,EAAE,OAAO,CAAC,CAAC;AAEjH,SAAgB,qBAAqB,CAAC,OAAgB,EAAE,OAAmB;IACzE,MAAM,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,WAAW,CAAC,kCAAkC,CAAC,CAAC;IAEzF,MAAM;SACH,OAAO,CAAC,KAAK,CAAC;SACd,WAAW,CAAC,2BAA2B,CAAC;SACxC,QAAQ,CAAC,OAAO,EAAE,eAAe,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC;SACrE,MAAM,CAAC,QAAQ,EAAE,uBAAuB,CAAC;SACzC,MAAM,CAAC,KAAK,EAAE,GAAW,EAAE,OAA6B,EAAE,EAAE;QAC3D,IAAI,CAAC,IAAA,oBAAW,EAAC,GAAG,CAAC,EAAE,CAAC;YACtB,MAAM,IAAI,wBAAe,CACvB,2BAA2B,GAAG,EAAE,EAChC,eAAe,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CACnD,CAAC;QACJ,CAAC;QAED,MAAM,KAAK,GAAG,MAAM,IAAA,uBAAc,EAAC,GAAG,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;QAEzD,MAAM,IAAA,qBAAY,EAAC;YACjB,GAAG,EAAE,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE;YAC/B,IAAI,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE;YACpB,IAAI,EAAE,OAAO,CAAC,IAAI;YAClB,UAAU,EAAE,GAAG,EAAE,CAAC,IAAA,sBAAe,EAAC,GAAG,EAAE,KAAK,CAAC;SAC9C,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEL,MAAM;SACH,OAAO,CAAC,KAAK,CAAC;SACd,WAAW,CAAC,2BAA2B,CAAC;SACxC,QAAQ,CAAC,OAAO,EAAE,eAAe,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC;SACrE,QAAQ,CAAC,SAAS,EAAE,cAAc,CAAC;SACnC,MAAM,CAAC,QAAQ,EAAE,uBAAuB,CAAC;SACzC,MAAM,CAAC,KAAK,EAAE,GAAW,EAAE,KAAa,EAAE,OAA6B,EAAE,EAAE;QAC1E,IAAI,CAAC,IAAA,oBAAW,EAAC,GAAG,CAAC,EAAE,CAAC;YACtB,MAAM,IAAI,wBAAe,CACvB,2BAA2B,GAAG,EAAE,EAChC,eAAe,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CACnD,CAAC;QACJ,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,IAAA,uBAAc,EAAC,GAAG,EAAE,KAAK,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;QAEjE,MAAM,IAAA,qBAAY,EAAC;YACjB,GAAG,EAAE,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE;YAC/B,IAAI,EAAE;gBACJ,GAAG;gBACH,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,UAAU,EAAE,MAAM,CAAC,UAAU;aAC9B;YACD,IAAI,EAAE,OAAO,CAAC,IAAI;YAClB,UAAU,EAAE,GAAG,EAAE,CAAC,IAAA,sBAAe,EAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,UAAU,CAAC;SACxE,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEL,MAAM;SACH,OAAO,CAAC,MAAM,CAAC;SACf,WAAW,CAAC,gCAAgC,CAAC;SAC7C,MAAM,CAAC,QAAQ,EAAE,uBAAuB,CAAC;SACzC,MAAM,CAAC,KAAK,EAAE,OAA6B,EAAE,EAAE;QAC9C,MAAM,IAAA,qBAAY,EAAC;YACjB,GAAG,EAAE,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE;YAC/B,IAAI,EAAE,EAAE,IAAI,EAAE,qBAAqB,EAAE;YACrC,IAAI,EAAE,OAAO,CAAC,IAAI;YAClB,UAAU,EAAE,GAAG,EAAE,CAAC,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC;SACnD,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACP,CAAC"}
@@ -0,0 +1,3 @@
1
+ import type { Command } from 'commander';
2
+ import type { CliContext } from '../types/cli';
3
+ export declare function registerCrawlCommand(program: Command, context: CliContext): void;