openclaw-crawleo-skill 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/client.js ADDED
@@ -0,0 +1,155 @@
1
+ import { createEndpointMethods } from './endpoints.js';
2
+ import { CRAWLEO_BASE_URL, getEndpointByPath } from './contract.js';
3
+ import { CrawleoError, CRAWLEO_ERROR_CODES, errorCodeForStatus, redactSecret } from './errors.js';
4
+
5
+ export function createCrawleoClient(options = {}) {
6
+ const apiKey = options.apiKey ?? process.env.CRAWLEO_API_KEY;
7
+ const fetchImpl = options.fetch ?? globalThis.fetch;
8
+ const baseUrl = normalizeBaseUrl(options.baseUrl ?? CRAWLEO_BASE_URL);
9
+
10
+ const client = {
11
+ baseUrl,
12
+ request(endpointPath, params = {}) {
13
+ return requestCrawleo({ apiKey, fetchImpl, baseUrl, endpointPath, params });
14
+ }
15
+ };
16
+
17
+ return Object.freeze({
18
+ ...client,
19
+ ...createEndpointMethods(client)
20
+ });
21
+ }
22
+
23
+ export async function requestCrawleo({ apiKey, fetchImpl, baseUrl = CRAWLEO_BASE_URL, endpointPath, params = {} }) {
24
+ const endpoint = getEndpointByPath(endpointPath);
25
+ if (!endpoint) {
26
+ throw new CrawleoError(`Unknown Crawleo endpoint: ${endpointPath}`, {
27
+ code: CRAWLEO_ERROR_CODES.VALIDATION,
28
+ endpoint: endpointPath,
29
+ field: 'endpointPath'
30
+ });
31
+ }
32
+
33
+ if (!apiKey) {
34
+ throw new CrawleoError('CRAWLEO_API_KEY is required for live Crawleo REST calls.', {
35
+ code: CRAWLEO_ERROR_CODES.MISSING_API_KEY,
36
+ endpoint: endpointPath
37
+ });
38
+ }
39
+
40
+ if (typeof fetchImpl !== 'function') {
41
+ throw new CrawleoError('A fetch implementation is required for Crawleo REST calls.', {
42
+ code: CRAWLEO_ERROR_CODES.MISSING_FETCH,
43
+ endpoint: endpointPath,
44
+ secrets: [apiKey]
45
+ });
46
+ }
47
+
48
+ const url = buildCrawleoUrl(baseUrl, endpoint.path, params);
49
+ let response;
50
+
51
+ try {
52
+ response = await fetchImpl(url, {
53
+ method: endpoint.method,
54
+ headers: {
55
+ 'x-api-key': apiKey,
56
+ accept: 'application/json'
57
+ }
58
+ });
59
+ } catch (error) {
60
+ throw new CrawleoError('Crawleo request failed before receiving a response.', {
61
+ code: CRAWLEO_ERROR_CODES.TRANSPORT,
62
+ endpoint: endpoint.path,
63
+ details: { cause: error instanceof Error ? error.message : String(error) },
64
+ secrets: [apiKey]
65
+ });
66
+ }
67
+
68
+ return parseCrawleoResponse(response, { endpointPath: endpoint.path, apiKey });
69
+ }
70
+
71
+ export function buildCrawleoUrl(baseUrl, endpointPath, params = {}) {
72
+ const url = new URL(endpointPath, `${normalizeBaseUrl(baseUrl)}/`);
73
+
74
+ for (const [key, value] of Object.entries(params)) {
75
+ if (value === undefined || value === null) continue;
76
+ const encodedValue = Array.isArray(value) ? value.join(',') : String(value);
77
+ url.searchParams.set(key, encodedValue);
78
+ }
79
+
80
+ return url;
81
+ }
82
+
83
+ async function parseCrawleoResponse(response, { endpointPath, apiKey }) {
84
+ const status = response.status;
85
+ const ok = response.ok ?? (status >= 200 && status < 300);
86
+ const rawBody = await readResponseBody(response);
87
+ const contentType = typeof response.headers?.get === 'function' ? response.headers.get('content-type') || '' : '';
88
+ const expectsJson = contentType.includes('application/json') || rawBody.trim().startsWith('{') || rawBody.trim().startsWith('[');
89
+
90
+ let parsedBody = null;
91
+ let malformedJson = false;
92
+
93
+ if (rawBody && expectsJson) {
94
+ try {
95
+ parsedBody = JSON.parse(rawBody);
96
+ } catch {
97
+ malformedJson = true;
98
+ }
99
+ }
100
+
101
+ if (!ok) {
102
+ throw new CrawleoError(`Crawleo request failed with HTTP ${status}.`, {
103
+ code: errorCodeForStatus(status),
104
+ endpoint: endpointPath,
105
+ status,
106
+ details: summarizeResponseBody(parsedBody, rawBody, contentType, apiKey),
107
+ secrets: [apiKey]
108
+ });
109
+ }
110
+
111
+ if (malformedJson) {
112
+ throw new CrawleoError('Crawleo returned malformed JSON.', {
113
+ code: CRAWLEO_ERROR_CODES.RESPONSE_MALFORMED_JSON,
114
+ endpoint: endpointPath,
115
+ status,
116
+ details: {
117
+ contentType,
118
+ bodyPreview: redactSecret(rawBody.slice(0, 500), [apiKey])
119
+ },
120
+ secrets: [apiKey]
121
+ });
122
+ }
123
+
124
+ return parsedBody;
125
+ }
126
+
127
+ async function readResponseBody(response) {
128
+ if (typeof response.text === 'function') return response.text();
129
+ if (typeof response.json === 'function') return JSON.stringify(await response.json());
130
+ return '';
131
+ }
132
+
133
+ function summarizeResponseBody(body, rawBody, contentType, apiKey) {
134
+ if (body == null) {
135
+ if (!rawBody) return null;
136
+ return {
137
+ contentType,
138
+ bodyPreview: redactSecret(rawBody.slice(0, 500), [apiKey])
139
+ };
140
+ }
141
+ if (typeof body !== 'object') return { body };
142
+
143
+ const summary = {};
144
+ for (const key of ['error', 'message', 'code', 'details']) {
145
+ if (Object.prototype.hasOwnProperty.call(body, key)) {
146
+ summary[key] = body[key];
147
+ }
148
+ }
149
+
150
+ return Object.keys(summary).length > 0 ? summary : { body };
151
+ }
152
+
153
+ function normalizeBaseUrl(baseUrl) {
154
+ return String(baseUrl).replace(/\/+$/, '');
155
+ }
@@ -0,0 +1,50 @@
1
+ export const CRAWLEO_BASE_URL = 'https://api.crawleo.dev';
2
+
3
+ export const CRAWLEO_ENDPOINTS = Object.freeze({
4
+ search: Object.freeze({
5
+ id: 'search',
6
+ path: '/search',
7
+ method: 'GET',
8
+ requiredQuery: ['query'],
9
+ enumQuery: Object.freeze({ device: ['desktop', 'mobile', 'tablet'] })
10
+ }),
11
+ googleSearch: Object.freeze({
12
+ id: 'google_search',
13
+ path: '/google-search',
14
+ method: 'GET',
15
+ requiredQuery: ['q'],
16
+ enumQuery: Object.freeze({
17
+ tbs: ['qdr:h', 'qdr:d', 'qdr:w', 'qdr:m', 'qdr:y'],
18
+ type: ['search', 'news', 'images', 'places', 'shopping']
19
+ })
20
+ }),
21
+ googleMaps: Object.freeze({
22
+ id: 'google_maps',
23
+ path: '/google-maps',
24
+ method: 'GET',
25
+ requiredQuery: ['q'],
26
+ enumQuery: Object.freeze({})
27
+ }),
28
+ crawl: Object.freeze({
29
+ id: 'crawl',
30
+ path: '/crawl',
31
+ method: 'GET',
32
+ requiredQuery: ['urls'],
33
+ enumQuery: Object.freeze({})
34
+ }),
35
+ headfulBrowser: Object.freeze({
36
+ id: 'headful_browser',
37
+ path: '/headful-browser',
38
+ method: 'GET',
39
+ requiredQuery: ['urls'],
40
+ enumQuery: Object.freeze({ output_format: ['markdown', 'enhanced_html', 'raw_html', 'page_text'] })
41
+ })
42
+ });
43
+
44
+ export const CRAWLEO_ENDPOINTS_BY_PATH = Object.freeze(
45
+ Object.fromEntries(Object.values(CRAWLEO_ENDPOINTS).map((endpoint) => [endpoint.path, endpoint]))
46
+ );
47
+
48
+ export function getEndpointByPath(path) {
49
+ return CRAWLEO_ENDPOINTS_BY_PATH[path];
50
+ }
@@ -0,0 +1,78 @@
1
+ import { CRAWLEO_ENDPOINTS } from './contract.js';
2
+ import { CrawleoError, CRAWLEO_ERROR_CODES } from './errors.js';
3
+
4
+ export function search(client, params = {}) {
5
+ return callEndpoint(client, CRAWLEO_ENDPOINTS.search, params);
6
+ }
7
+
8
+ export function googleSearch(client, params = {}) {
9
+ return callEndpoint(client, CRAWLEO_ENDPOINTS.googleSearch, params);
10
+ }
11
+
12
+ export function googleMaps(client, params = {}) {
13
+ return callEndpoint(client, CRAWLEO_ENDPOINTS.googleMaps, params);
14
+ }
15
+
16
+ export function crawl(client, params = {}) {
17
+ return callEndpoint(client, CRAWLEO_ENDPOINTS.crawl, normalizeUrlListParams(params));
18
+ }
19
+
20
+ export function headfulBrowser(client, params = {}) {
21
+ return callEndpoint(client, CRAWLEO_ENDPOINTS.headfulBrowser, normalizeUrlListParams(params));
22
+ }
23
+
24
+ export function validateEndpointParams(endpoint, params = {}) {
25
+ for (const field of endpoint.requiredQuery) {
26
+ const value = params[field];
27
+ if (value === undefined || value === null || value === '' || (Array.isArray(value) && value.length === 0)) {
28
+ throw new CrawleoError(`Missing required Crawleo parameter: ${field}`, {
29
+ code: CRAWLEO_ERROR_CODES.VALIDATION,
30
+ endpoint: endpoint.path,
31
+ field
32
+ });
33
+ }
34
+ }
35
+
36
+ for (const [field, allowedValues] of Object.entries(endpoint.enumQuery)) {
37
+ const value = params[field];
38
+ if (value !== undefined && value !== null && !allowedValues.includes(value)) {
39
+ throw new CrawleoError(`Invalid Crawleo parameter ${field}: expected one of ${allowedValues.join(', ')}`, {
40
+ code: CRAWLEO_ERROR_CODES.VALIDATION,
41
+ endpoint: endpoint.path,
42
+ field,
43
+ details: { allowedValues, received: value }
44
+ });
45
+ }
46
+ }
47
+ }
48
+
49
+ export function createEndpointMethods(client) {
50
+ return Object.freeze({
51
+ search: (params) => search(client, params),
52
+ googleSearch: (params) => googleSearch(client, params),
53
+ googleMaps: (params) => googleMaps(client, params),
54
+ crawl: (params) => crawl(client, params),
55
+ headfulBrowser: (params) => headfulBrowser(client, params)
56
+ });
57
+ }
58
+
59
+ function callEndpoint(client, endpoint, params) {
60
+ if (!client || typeof client.request !== 'function') {
61
+ throw new CrawleoError('A Crawleo client with a request method is required.', {
62
+ code: CRAWLEO_ERROR_CODES.VALIDATION,
63
+ endpoint: endpoint.path,
64
+ field: 'client'
65
+ });
66
+ }
67
+
68
+ validateEndpointParams(endpoint, params);
69
+ return client.request(endpoint.path, params);
70
+ }
71
+
72
+ function normalizeUrlListParams(params) {
73
+ if (!Object.prototype.hasOwnProperty.call(params, 'urls')) return params;
74
+ return {
75
+ ...params,
76
+ urls: Array.isArray(params.urls) ? params.urls.join(',') : params.urls
77
+ };
78
+ }
package/src/errors.js ADDED
@@ -0,0 +1,89 @@
1
+ export const CRAWLEO_ERROR_CODES = Object.freeze({
2
+ MISSING_API_KEY: 'CRAWLEO_CONFIG_MISSING_API_KEY',
3
+ MISSING_FETCH: 'CRAWLEO_CONFIG_MISSING_FETCH',
4
+ VALIDATION: 'CRAWLEO_VALIDATION_ERROR',
5
+ HTTP_BAD_REQUEST: 'CRAWLEO_HTTP_BAD_REQUEST',
6
+ HTTP_AUTH: 'CRAWLEO_HTTP_AUTH',
7
+ HTTP_PAYMENT_REQUIRED: 'CRAWLEO_HTTP_PAYMENT_REQUIRED',
8
+ HTTP_FORBIDDEN: 'CRAWLEO_HTTP_FORBIDDEN',
9
+ HTTP_RATE_LIMIT: 'CRAWLEO_HTTP_RATE_LIMIT',
10
+ HTTP_UPSTREAM: 'CRAWLEO_HTTP_UPSTREAM',
11
+ RESPONSE_MALFORMED_JSON: 'CRAWLEO_RESPONSE_MALFORMED_JSON',
12
+ TRANSPORT: 'CRAWLEO_TRANSPORT_ERROR'
13
+ });
14
+
15
+ const SECRET_PATTERNS = [
16
+ /x-api-key\s*[:=]\s*[^\s,}]+/gi,
17
+ /authorization\s*[:=]\s*bearer\s+[^\s,}]+/gi,
18
+ /api[_-]?key\s*[:=]\s*[^\s,}]+/gi
19
+ ];
20
+
21
+ export function redactSecret(value, explicitSecrets = []) {
22
+ if (value == null) return value;
23
+
24
+ let redacted = String(value);
25
+ for (const secret of explicitSecrets) {
26
+ if (secret) {
27
+ redacted = redacted.split(String(secret)).join('[REDACTED]');
28
+ }
29
+ }
30
+
31
+ for (const pattern of SECRET_PATTERNS) {
32
+ redacted = redacted.replace(pattern, (match) => {
33
+ const separator = match.includes('=') ? '=' : ':';
34
+ const [name] = match.split(separator);
35
+ return `${name}${separator} [REDACTED]`;
36
+ });
37
+ }
38
+
39
+ return redacted;
40
+ }
41
+
42
+ export class CrawleoError extends Error {
43
+ constructor(message, options = {}) {
44
+ super(redactSecret(message, options.secrets));
45
+ this.name = 'CrawleoError';
46
+ this.code = options.code || CRAWLEO_ERROR_CODES.HTTP_UPSTREAM;
47
+ this.endpoint = options.endpoint;
48
+ this.status = options.status;
49
+ this.field = options.field;
50
+ this.details = redactDetails(options.details, options.secrets);
51
+
52
+ if (options.cause) {
53
+ this.cause = options.cause;
54
+ }
55
+ }
56
+
57
+ toJSON() {
58
+ return {
59
+ name: this.name,
60
+ code: this.code,
61
+ message: this.message,
62
+ endpoint: this.endpoint,
63
+ status: this.status,
64
+ field: this.field,
65
+ details: this.details
66
+ };
67
+ }
68
+ }
69
+
70
+ function redactDetails(details, secrets = []) {
71
+ if (details == null) return details;
72
+ if (typeof details === 'string') return redactSecret(details, secrets);
73
+ if (Array.isArray(details)) return details.map((item) => redactDetails(item, secrets));
74
+ if (typeof details === 'object') {
75
+ return Object.fromEntries(
76
+ Object.entries(details).map(([key, value]) => [key, redactDetails(value, secrets)])
77
+ );
78
+ }
79
+ return details;
80
+ }
81
+
82
+ export function errorCodeForStatus(status) {
83
+ if (status === 400) return CRAWLEO_ERROR_CODES.HTTP_BAD_REQUEST;
84
+ if (status === 401) return CRAWLEO_ERROR_CODES.HTTP_AUTH;
85
+ if (status === 402) return CRAWLEO_ERROR_CODES.HTTP_PAYMENT_REQUIRED;
86
+ if (status === 403) return CRAWLEO_ERROR_CODES.HTTP_FORBIDDEN;
87
+ if (status === 429) return CRAWLEO_ERROR_CODES.HTTP_RATE_LIMIT;
88
+ return CRAWLEO_ERROR_CODES.HTTP_UPSTREAM;
89
+ }
package/src/index.js ADDED
@@ -0,0 +1,37 @@
1
+ export { createCrawleoClient, requestCrawleo, buildCrawleoUrl } from './client.js';
2
+ export { search, googleSearch, googleMaps, crawl, headfulBrowser, validateEndpointParams } from './endpoints.js';
3
+ export { CRAWLEO_BASE_URL, CRAWLEO_ENDPOINTS, CRAWLEO_ENDPOINTS_BY_PATH, getEndpointByPath } from './contract.js';
4
+ export { CrawleoError, CRAWLEO_ERROR_CODES, redactSecret } from './errors.js';
5
+
6
+ export const CRAWLEO_CONTRACT_PATH = 'contracts/crawleo-endpoints.json';
7
+ export const CRAWLEO_SKILL_MANIFEST_PATH = 'skill.json';
8
+ export const CRAWLEO_SKILL_INSTRUCTIONS_PATH = 'SKILL.md';
9
+
10
+ export const CRAWLEO_REST_ENDPOINTS = Object.freeze([
11
+ '/search',
12
+ '/google-search',
13
+ '/google-maps',
14
+ '/crawl',
15
+ '/headful-browser'
16
+ ]);
17
+
18
+ export const CRAWLEO_MCP_TOOLS = Object.freeze([
19
+ 'search_web',
20
+ 'google_search',
21
+ 'google_maps',
22
+ 'crawl_web',
23
+ 'headful_browser'
24
+ ]);
25
+
26
+ export function getScaffoldStatus() {
27
+ return Object.freeze({
28
+ packageName: 'openclaw-crawleo-skill',
29
+ implementationStatus: 'rest-wrappers-implemented',
30
+ contractPath: CRAWLEO_CONTRACT_PATH,
31
+ skillManifestPath: CRAWLEO_SKILL_MANIFEST_PATH,
32
+ skillInstructionsPath: CRAWLEO_SKILL_INSTRUCTIONS_PATH,
33
+ restEndpoints: CRAWLEO_REST_ENDPOINTS,
34
+ mcpTools: CRAWLEO_MCP_TOOLS,
35
+ liveCrawleoCallsEnabledByDefault: false
36
+ });
37
+ }
@@ -0,0 +1,104 @@
1
+ import assert from 'node:assert/strict';
2
+ import test from 'node:test';
3
+
4
+ import { CrawleoError, CRAWLEO_ERROR_CODES, buildCrawleoUrl, createCrawleoClient } from '../src/index.js';
5
+
6
+ function jsonResponse(body, init = {}) {
7
+ return {
8
+ ok: init.ok ?? true,
9
+ status: init.status ?? 200,
10
+ headers: new Map([['content-type', 'application/json']]),
11
+ async text() {
12
+ return JSON.stringify(body);
13
+ }
14
+ };
15
+ }
16
+
17
+ test('buildCrawleoUrl constructs Crawleo URLs and comma-encodes array values', () => {
18
+ const url = buildCrawleoUrl('https://api.crawleo.dev/', '/crawl', {
19
+ urls: ['https://example.com/a', 'https://example.com/b'],
20
+ markdown: true,
21
+ empty: null
22
+ });
23
+
24
+ assert.equal(url.toString(), 'https://api.crawleo.dev/crawl?urls=https%3A%2F%2Fexample.com%2Fa%2Chttps%3A%2F%2Fexample.com%2Fb&markdown=true');
25
+ });
26
+
27
+ test('client request injects x-api-key and returns parsed JSON through an injected fetch', async () => {
28
+ const calls = [];
29
+ const client = createCrawleoClient({
30
+ apiKey: 'secret-test-key',
31
+ fetch: async (url, init) => {
32
+ calls.push({ url, init });
33
+ return jsonResponse({ ok: true, query: 'ai agents' });
34
+ }
35
+ });
36
+
37
+ const result = await client.request('/search', { query: 'ai agents', max_pages: 1 });
38
+
39
+ assert.deepEqual(result, { ok: true, query: 'ai agents' });
40
+ assert.equal(calls.length, 1);
41
+ assert.equal(calls[0].url.toString(), 'https://api.crawleo.dev/search?query=ai+agents&max_pages=1');
42
+ assert.equal(calls[0].init.method, 'GET');
43
+ assert.equal(calls[0].init.headers['x-api-key'], 'secret-test-key');
44
+ assert.equal(calls[0].init.headers.accept, 'application/json');
45
+ });
46
+
47
+ test('client request reports missing API key as a structured CrawleoError', async () => {
48
+ const client = createCrawleoClient({ apiKey: '', fetch: async () => jsonResponse({}) });
49
+
50
+ await assert.rejects(
51
+ () => client.request('/search', { query: 'ai agents' }),
52
+ (error) => {
53
+ assert.ok(error instanceof CrawleoError);
54
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.MISSING_API_KEY);
55
+ assert.equal(error.endpoint, '/search');
56
+ assert.deepEqual(error.toJSON(), {
57
+ name: 'CrawleoError',
58
+ code: CRAWLEO_ERROR_CODES.MISSING_API_KEY,
59
+ message: 'CRAWLEO_API_KEY is required for live Crawleo REST calls.',
60
+ endpoint: '/search',
61
+ status: undefined,
62
+ field: undefined,
63
+ details: undefined
64
+ });
65
+ return true;
66
+ }
67
+ );
68
+ });
69
+
70
+ test('transport failures are redacted before serialization', async () => {
71
+ const apiKey = 'secret-transport-key';
72
+ const client = createCrawleoClient({
73
+ apiKey,
74
+ fetch: async () => {
75
+ throw new Error(`network failed with apiKey=${apiKey}`);
76
+ }
77
+ });
78
+
79
+ await assert.rejects(
80
+ () => client.request('/search', { query: 'ai agents' }),
81
+ (error) => {
82
+ const serialized = JSON.stringify(error);
83
+ assert.ok(error instanceof CrawleoError);
84
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.TRANSPORT);
85
+ assert.equal(error.endpoint, '/search');
86
+ assert.equal(serialized.includes(apiKey), false);
87
+ assert.match(serialized, /\[REDACTED\]/);
88
+ return true;
89
+ }
90
+ );
91
+ });
92
+
93
+ test('unknown endpoint paths are validation errors', async () => {
94
+ const client = createCrawleoClient({ apiKey: 'secret-test-key', fetch: async () => jsonResponse({}) });
95
+
96
+ await assert.rejects(
97
+ () => client.request('/not-crawleo', {}),
98
+ (error) => {
99
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.VALIDATION);
100
+ assert.equal(error.field, 'endpointPath');
101
+ return true;
102
+ }
103
+ );
104
+ });
@@ -0,0 +1,130 @@
1
+ import assert from 'node:assert/strict';
2
+ import test from 'node:test';
3
+
4
+ import {
5
+ CRAWLEO_ERROR_CODES,
6
+ crawl,
7
+ createCrawleoClient,
8
+ googleMaps,
9
+ googleSearch,
10
+ headfulBrowser,
11
+ search
12
+ } from '../src/index.js';
13
+
14
+ function makeClient() {
15
+ const calls = [];
16
+ const client = createCrawleoClient({
17
+ apiKey: 'secret-wrapper-key',
18
+ fetch: async (url, init) => {
19
+ calls.push({ url, init });
20
+ return {
21
+ ok: true,
22
+ status: 200,
23
+ headers: new Map([['content-type', 'application/json']]),
24
+ async text() {
25
+ return JSON.stringify({ ok: true, path: new URL(url).pathname });
26
+ }
27
+ };
28
+ }
29
+ });
30
+ return { client, calls };
31
+ }
32
+
33
+ test('client exposes methods for every documented Crawleo endpoint', async () => {
34
+ const { client, calls } = makeClient();
35
+
36
+ await client.search({ query: 'ai agents', device: 'desktop' });
37
+ await client.googleSearch({ q: 'best CRM', type: 'news', tbs: 'qdr:d' });
38
+ await client.googleMaps({ q: 'restaurants in Paris', hl: 'fr' });
39
+ await client.crawl({ urls: ['https://example.com/a', 'https://example.com/b'], markdown: true });
40
+ await client.headfulBrowser({ urls: 'https://example.com', output_format: 'markdown' });
41
+
42
+ assert.deepEqual(
43
+ calls.map((call) => new URL(call.url).pathname),
44
+ ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser']
45
+ );
46
+ assert.equal(new URL(calls[3].url).searchParams.get('urls'), 'https://example.com/a,https://example.com/b');
47
+ });
48
+
49
+ test('top-level endpoint wrappers accept a client object without accepting secrets', async () => {
50
+ const { client, calls } = makeClient();
51
+
52
+ await search(client, { query: 'ai agents' });
53
+ await googleSearch(client, { q: 'ai agents' });
54
+ await googleMaps(client, { q: 'coffee near Paris' });
55
+ await crawl(client, { urls: 'https://example.com' });
56
+ await headfulBrowser(client, { urls: ['https://example.com'], output_format: 'page_text' });
57
+
58
+ assert.deepEqual(
59
+ calls.map((call) => new URL(call.url).pathname),
60
+ ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser']
61
+ );
62
+ });
63
+
64
+ test('wrappers reject missing required parameters with endpoint and field diagnostics', () => {
65
+ const { client } = makeClient();
66
+
67
+ assert.throws(
68
+ () => client.googleSearch({ type: 'news' }),
69
+ (error) => {
70
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.VALIDATION);
71
+ assert.equal(error.endpoint, '/google-search');
72
+ assert.equal(error.field, 'q');
73
+ return true;
74
+ }
75
+ );
76
+ });
77
+
78
+ test('wrappers validate only documented enum parameters', () => {
79
+ const { client } = makeClient();
80
+
81
+ assert.throws(
82
+ () => client.search({ query: 'ai agents', device: 'watch' }),
83
+ (error) => {
84
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.VALIDATION);
85
+ assert.equal(error.endpoint, '/search');
86
+ assert.equal(error.field, 'device');
87
+ assert.deepEqual(error.details.allowedValues, ['desktop', 'mobile', 'tablet']);
88
+ return true;
89
+ }
90
+ );
91
+
92
+ assert.throws(
93
+ () => client.googleSearch({ q: 'ai agents', type: 'videos' }),
94
+ (error) => {
95
+ assert.equal(error.endpoint, '/google-search');
96
+ assert.equal(error.field, 'type');
97
+ return true;
98
+ }
99
+ );
100
+
101
+ assert.throws(
102
+ () => client.googleSearch({ q: 'ai agents', tbs: 'qdr:decade' }),
103
+ (error) => {
104
+ assert.equal(error.endpoint, '/google-search');
105
+ assert.equal(error.field, 'tbs');
106
+ return true;
107
+ }
108
+ );
109
+
110
+ assert.throws(
111
+ () => client.headfulBrowser({ urls: 'https://example.com', output_format: 'pdf' }),
112
+ (error) => {
113
+ assert.equal(error.endpoint, '/headful-browser');
114
+ assert.equal(error.field, 'output_format');
115
+ return true;
116
+ }
117
+ );
118
+ });
119
+
120
+ test('wrappers reject an invalid client object before making a request', () => {
121
+ assert.throws(
122
+ () => search({}, { query: 'ai agents' }),
123
+ (error) => {
124
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.VALIDATION);
125
+ assert.equal(error.endpoint, '/search');
126
+ assert.equal(error.field, 'client');
127
+ return true;
128
+ }
129
+ );
130
+ });