openclaw-crawleo-skill 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,151 @@
1
+ import assert from 'node:assert/strict';
2
+ import test from 'node:test';
3
+
4
+ import {
5
+ CRAWLEO_ERROR_CODES,
6
+ CrawleoError,
7
+ createCrawleoClient,
8
+ redactSecret,
9
+ requestCrawleo
10
+ } from '../src/index.js';
11
+
12
+ const FIXTURE_SECRET = 'fixture-secret-key-never-real';
13
+
14
+ function response({ status, body, contentType = 'application/json' }) {
15
+ return {
16
+ ok: status >= 200 && status < 300,
17
+ status,
18
+ headers: new Map([['content-type', contentType]]),
19
+ async text() {
20
+ return typeof body === 'string' ? body : JSON.stringify(body);
21
+ }
22
+ };
23
+ }
24
+
25
+ async function getErrorFromClient(fetchImpl, apiKey = FIXTURE_SECRET) {
26
+ const client = createCrawleoClient({ apiKey, fetch: fetchImpl });
27
+
28
+ try {
29
+ await client.search({ query: 'fixture query' });
30
+ } catch (error) {
31
+ return error;
32
+ }
33
+
34
+ throw new Error('Expected Crawleo client call to fail');
35
+ }
36
+
37
+ test('HTTP error fixture table maps status codes to stable Crawleo error codes', async () => {
38
+ const fixtures = [
39
+ [400, CRAWLEO_ERROR_CODES.HTTP_BAD_REQUEST, 'Bad request fixture'],
40
+ [401, CRAWLEO_ERROR_CODES.HTTP_AUTH, 'Invalid API key fixture'],
41
+ [402, CRAWLEO_ERROR_CODES.HTTP_PAYMENT_REQUIRED, 'Insufficient credits fixture'],
42
+ [403, CRAWLEO_ERROR_CODES.HTTP_FORBIDDEN, 'Inactive subscription fixture'],
43
+ [429, CRAWLEO_ERROR_CODES.HTTP_RATE_LIMIT, 'Rate limit fixture'],
44
+ [500, CRAWLEO_ERROR_CODES.HTTP_UPSTREAM, 'Internal server fixture']
45
+ ];
46
+
47
+ for (const [status, expectedCode, message] of fixtures) {
48
+ const error = await getErrorFromClient(async () => response({
49
+ status,
50
+ body: {
51
+ error: message,
52
+ code: `fixture_${status}`,
53
+ details: { endpoint: '/search', status }
54
+ }
55
+ }));
56
+
57
+ assert.ok(error instanceof CrawleoError);
58
+ assert.equal(error.code, expectedCode);
59
+ assert.equal(error.endpoint, '/search');
60
+ assert.equal(error.status, status);
61
+ assert.equal(error.details.error, message);
62
+ assert.equal(error.details.code, `fixture_${status}`);
63
+ }
64
+ });
65
+
66
+ test('missing API key and missing fetch produce distinct configuration diagnostics', async () => {
67
+ await assert.rejects(
68
+ () => requestCrawleo({ apiKey: '', fetchImpl: async () => response({ status: 200, body: {} }), endpointPath: '/search', params: { query: 'fixture' } }),
69
+ (error) => {
70
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.MISSING_API_KEY);
71
+ assert.equal(error.endpoint, '/search');
72
+ assert.equal(error.status, undefined);
73
+ return true;
74
+ }
75
+ );
76
+
77
+ await assert.rejects(
78
+ () => requestCrawleo({ apiKey: FIXTURE_SECRET, fetchImpl: undefined, endpointPath: '/search', params: { query: 'fixture' } }),
79
+ (error) => {
80
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.MISSING_FETCH);
81
+ assert.equal(error.endpoint, '/search');
82
+ assert.equal(JSON.stringify(error).includes(FIXTURE_SECRET), false);
83
+ return true;
84
+ }
85
+ );
86
+ });
87
+
88
+ test('malformed successful JSON response carries redacted bounded body preview', async () => {
89
+ const error = await getErrorFromClient(async () => response({
90
+ status: 200,
91
+ body: `{ "message": "token ${FIXTURE_SECRET}"`,
92
+ contentType: 'application/json'
93
+ }));
94
+
95
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.RESPONSE_MALFORMED_JSON);
96
+ assert.equal(error.status, 200);
97
+ assert.equal(error.details.contentType, 'application/json');
98
+ assert.equal(error.details.bodyPreview.includes(FIXTURE_SECRET), false);
99
+ assert.match(error.details.bodyPreview, /\[REDACTED\]/);
100
+ });
101
+
102
+ test('text HTTP error bodies are summarized and redacted without becoming malformed JSON errors', async () => {
103
+ const error = await getErrorFromClient(async () => response({
104
+ status: 500,
105
+ body: `upstream failure x-api-key: ${FIXTURE_SECRET}`,
106
+ contentType: 'text/plain'
107
+ }));
108
+
109
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.HTTP_UPSTREAM);
110
+ assert.equal(error.details.contentType, 'text/plain');
111
+ assert.equal(error.details.bodyPreview.includes(FIXTURE_SECRET), false);
112
+ assert.match(error.details.bodyPreview, /x-api-key: \[REDACTED\]/);
113
+ });
114
+
115
+ test('transport error fixture preserves transport classification and redacts thrown messages', async () => {
116
+ const error = await getErrorFromClient(async () => {
117
+ throw new Error(`socket closed with Authorization: Bearer ${FIXTURE_SECRET}`);
118
+ });
119
+
120
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.TRANSPORT);
121
+ assert.equal(error.status, undefined);
122
+ assert.equal(error.details.cause.includes(FIXTURE_SECRET), false);
123
+ assert.match(error.details.cause, /Authorization: \[REDACTED\]/i);
124
+ });
125
+
126
+ test('CrawleoError serialization redacts nested details and explicit secret values', () => {
127
+ const error = new CrawleoError(`top-level message ${FIXTURE_SECRET}`, {
128
+ code: CRAWLEO_ERROR_CODES.HTTP_AUTH,
129
+ endpoint: '/search',
130
+ status: 401,
131
+ secrets: [FIXTURE_SECRET],
132
+ details: {
133
+ header: `x-api-key=${FIXTURE_SECRET}`,
134
+ nested: [{ authorization: `Authorization: Bearer ${FIXTURE_SECRET}` }]
135
+ }
136
+ });
137
+
138
+ const serialized = JSON.stringify(error);
139
+
140
+ assert.equal(serialized.includes(FIXTURE_SECRET), false);
141
+ assert.match(error.message, /\[REDACTED\]/);
142
+ assert.match(error.details.header, /x-api-key= \[REDACTED\]/);
143
+ assert.match(error.details.nested[0].authorization, /Authorization: \[REDACTED\]/i);
144
+ });
145
+
146
+ test('redactSecret handles common API key spellings without explicit secret input', () => {
147
+ assert.equal(redactSecret('apiKey=abc123'), 'apiKey= [REDACTED]');
148
+ assert.equal(redactSecret('api-key: abc123'), 'api-key: [REDACTED]');
149
+ assert.equal(redactSecret('x-api-key: abc123'), 'x-api-key: [REDACTED]');
150
+ assert.equal(redactSecret('Authorization: Bearer abc123'), 'Authorization: [REDACTED]');
151
+ });
@@ -0,0 +1,116 @@
1
+ import assert from 'node:assert/strict';
2
+ import test from 'node:test';
3
+
4
+ import { CRAWLEO_ERROR_CODES, CrawleoError, createCrawleoClient } from '../src/index.js';
5
+
6
+ function response({ status, body, contentType = 'application/json' }) {
7
+ return {
8
+ ok: status >= 200 && status < 300,
9
+ status,
10
+ headers: new Map([['content-type', contentType]]),
11
+ async text() {
12
+ return typeof body === 'string' ? body : JSON.stringify(body);
13
+ }
14
+ };
15
+ }
16
+
17
+ async function captureError(status, body, contentType, apiKey = `secret-key-${status}`) {
18
+ const client = createCrawleoClient({
19
+ apiKey,
20
+ fetch: async () => response({ status, body, contentType })
21
+ });
22
+
23
+ try {
24
+ await client.search({ query: 'ai agents' });
25
+ } catch (error) {
26
+ return { error, apiKey };
27
+ }
28
+
29
+ throw new Error('Expected request to fail');
30
+ }
31
+
32
+ test('HTTP 400 bad requests map to a stable Crawleo error code', async () => {
33
+ const { error } = await captureError(400, { error: 'Missing required parameter q' });
34
+
35
+ assert.ok(error instanceof CrawleoError);
36
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.HTTP_BAD_REQUEST);
37
+ assert.equal(error.endpoint, '/search');
38
+ assert.equal(error.status, 400);
39
+ assert.deepEqual(error.details, { error: 'Missing required parameter q' });
40
+ });
41
+
42
+ test('HTTP 401 auth failures are normalized without leaking the API key', async () => {
43
+ const apiKey = 'secret-key-401';
44
+ const { error } = await captureError(401, {
45
+ error: `Invalid x-api-key: ${apiKey}`,
46
+ code: 'invalid_key'
47
+ }, undefined, apiKey);
48
+
49
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.HTTP_AUTH);
50
+ assert.equal(JSON.stringify(error).includes(apiKey), false);
51
+ assert.match(JSON.stringify(error), /\[REDACTED\]/);
52
+ });
53
+
54
+ test('HTTP 402 quota failures and HTTP 429 rate limits get distinct codes', async () => {
55
+ const quota = await captureError(402, { error: 'Insufficient credits' });
56
+ const rateLimit = await captureError(429, { error: 'Rate limit exceeded' });
57
+
58
+ assert.equal(quota.error.code, CRAWLEO_ERROR_CODES.HTTP_PAYMENT_REQUIRED);
59
+ assert.equal(rateLimit.error.code, CRAWLEO_ERROR_CODES.HTTP_RATE_LIMIT);
60
+ });
61
+
62
+ test('HTTP 403 forbidden and 5xx upstream failures get stable codes', async () => {
63
+ const forbidden = await captureError(403, { error: 'Inactive account or expired subscription' });
64
+ const upstream = await captureError(500, { error: 'Internal server error' });
65
+
66
+ assert.equal(forbidden.error.code, CRAWLEO_ERROR_CODES.HTTP_FORBIDDEN);
67
+ assert.equal(upstream.error.code, CRAWLEO_ERROR_CODES.HTTP_UPSTREAM);
68
+ });
69
+
70
+ test('non-JSON HTTP errors preserve a bounded redacted body preview', async () => {
71
+ const apiKey = 'secret-key-500-text';
72
+ const { error } = await captureError(500, `server failed with api_key=${apiKey}`, 'text/plain', apiKey);
73
+
74
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.HTTP_UPSTREAM);
75
+ assert.equal(error.details.contentType, 'text/plain');
76
+ assert.equal(error.details.bodyPreview.includes(apiKey), false);
77
+ assert.match(error.details.bodyPreview, /\[REDACTED\]/);
78
+ });
79
+
80
+ test('successful malformed JSON responses are response diagnostics, not HTTP errors', async () => {
81
+ const apiKey = 'secret-malformed-key';
82
+ const client = createCrawleoClient({
83
+ apiKey,
84
+ fetch: async () => response({ status: 200, body: `{ "token": "${apiKey}"`, contentType: 'application/json' })
85
+ });
86
+
87
+ await assert.rejects(
88
+ () => client.search({ query: 'ai agents' }),
89
+ (error) => {
90
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.RESPONSE_MALFORMED_JSON);
91
+ assert.equal(error.status, 200);
92
+ assert.equal(JSON.stringify(error).includes(apiKey), false);
93
+ return true;
94
+ }
95
+ );
96
+ });
97
+
98
+ test('transport failures remain distinct from HTTP failures and are redacted', async () => {
99
+ const apiKey = 'secret-network-key';
100
+ const client = createCrawleoClient({
101
+ apiKey,
102
+ fetch: async () => {
103
+ throw new Error(`socket closed for ${apiKey}`);
104
+ }
105
+ });
106
+
107
+ await assert.rejects(
108
+ () => client.search({ query: 'ai agents' }),
109
+ (error) => {
110
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.TRANSPORT);
111
+ assert.equal(error.status, undefined);
112
+ assert.equal(JSON.stringify(error).includes(apiKey), false);
113
+ return true;
114
+ }
115
+ );
116
+ });
@@ -0,0 +1,28 @@
1
+ import assert from 'node:assert/strict';
2
+ import test from 'node:test';
3
+
4
+ import { CrawleoError, createCrawleoClient } from '../src/index.js';
5
+
6
+ const liveEnabled = process.env.CRAWLEO_ENABLE_LIVE_TESTS === '1';
7
+ const apiKey = process.env.CRAWLEO_API_KEY;
8
+ const skipReason = 'Set CRAWLEO_ENABLE_LIVE_TESTS=1 and CRAWLEO_API_KEY to run live Crawleo tests.';
9
+
10
+ test('live Crawleo /search smoke test is explicitly gated', { skip: liveEnabled && apiKey ? false : skipReason }, async () => {
11
+ const client = createCrawleoClient({ apiKey });
12
+
13
+ try {
14
+ const result = await client.search({
15
+ query: 'Crawleo web intelligence',
16
+ max_pages: 1
17
+ });
18
+
19
+ assert.equal(typeof result, 'object');
20
+ assert.notEqual(result, null);
21
+ } catch (error) {
22
+ if (error instanceof CrawleoError) {
23
+ throw new Error(`Live Crawleo smoke test failed: ${JSON.stringify(error.toJSON())}`);
24
+ }
25
+
26
+ throw error;
27
+ }
28
+ });
@@ -0,0 +1,47 @@
1
+ import assert from 'node:assert/strict';
2
+ import test from 'node:test';
3
+
4
+ import {
5
+ CRAWLEO_CONTRACT_PATH,
6
+ CRAWLEO_MCP_TOOLS,
7
+ CRAWLEO_REST_ENDPOINTS,
8
+ CRAWLEO_SKILL_INSTRUCTIONS_PATH,
9
+ CRAWLEO_SKILL_MANIFEST_PATH,
10
+ createCrawleoClient,
11
+ crawl,
12
+ getScaffoldStatus,
13
+ googleMaps,
14
+ googleSearch,
15
+ headfulBrowser,
16
+ search
17
+ } from '../src/index.js';
18
+
19
+ test('scaffold exports the Crawleo contract location and documented capability names', () => {
20
+ assert.equal(CRAWLEO_CONTRACT_PATH, 'contracts/crawleo-endpoints.json');
21
+ assert.equal(CRAWLEO_SKILL_MANIFEST_PATH, 'skill.json');
22
+ assert.equal(CRAWLEO_SKILL_INSTRUCTIONS_PATH, 'SKILL.md');
23
+ assert.deepEqual(CRAWLEO_REST_ENDPOINTS, ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser']);
24
+ assert.deepEqual(CRAWLEO_MCP_TOOLS, ['search_web', 'google_search', 'google_maps', 'crawl_web', 'headful_browser']);
25
+ });
26
+
27
+ test('scaffold status is explicit that runtime wrappers are implemented and live calls are opt-in', () => {
28
+ assert.deepEqual(getScaffoldStatus(), {
29
+ packageName: 'openclaw-crawleo-skill',
30
+ implementationStatus: 'rest-wrappers-implemented',
31
+ contractPath: 'contracts/crawleo-endpoints.json',
32
+ skillManifestPath: 'skill.json',
33
+ skillInstructionsPath: 'SKILL.md',
34
+ restEndpoints: ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser'],
35
+ mcpTools: ['search_web', 'google_search', 'google_maps', 'crawl_web', 'headful_browser'],
36
+ liveCrawleoCallsEnabledByDefault: false
37
+ });
38
+ });
39
+
40
+ test('public API exports client factory and all endpoint wrapper functions', () => {
41
+ assert.equal(typeof createCrawleoClient, 'function');
42
+ assert.equal(typeof search, 'function');
43
+ assert.equal(typeof googleSearch, 'function');
44
+ assert.equal(typeof googleMaps, 'function');
45
+ assert.equal(typeof crawl, 'function');
46
+ assert.equal(typeof headfulBrowser, 'function');
47
+ });
@@ -0,0 +1,227 @@
1
+ import assert from 'node:assert/strict';
2
+ import test from 'node:test';
3
+
4
+ import { CRAWLEO_ERROR_CODES, createCrawleoClient } from '../src/index.js';
5
+
6
+ function createRecordingClient() {
7
+ const calls = [];
8
+ const client = createCrawleoClient({
9
+ apiKey: 'offline-wrapper-fixture-key',
10
+ fetch: async (url, init) => {
11
+ const requestUrl = new URL(url);
12
+ calls.push({ url: requestUrl, init });
13
+ return {
14
+ ok: true,
15
+ status: 200,
16
+ headers: new Map([['content-type', 'application/json']]),
17
+ async text() {
18
+ return JSON.stringify({
19
+ ok: true,
20
+ path: requestUrl.pathname,
21
+ query: Object.fromEntries(requestUrl.searchParams.entries())
22
+ });
23
+ }
24
+ };
25
+ }
26
+ });
27
+
28
+ return { client, calls };
29
+ }
30
+
31
+ async function assertSingleRequest(wrapperCall, { path, query }) {
32
+ const { client, calls } = createRecordingClient();
33
+ const result = await wrapperCall(client);
34
+
35
+ assert.equal(calls.length, 1);
36
+ assert.equal(calls[0].url.origin, 'https://api.crawleo.dev');
37
+ assert.equal(calls[0].url.pathname, path);
38
+ assert.equal(calls[0].init.method, 'GET');
39
+ assert.equal(calls[0].init.headers['x-api-key'], 'offline-wrapper-fixture-key');
40
+ assert.equal(calls[0].init.headers.accept, 'application/json');
41
+
42
+ for (const [key, value] of Object.entries(query)) {
43
+ assert.equal(calls[0].url.searchParams.get(key), String(value));
44
+ }
45
+
46
+ assert.equal(result.path, path);
47
+ return calls[0].url;
48
+ }
49
+
50
+ function assertValidationError(fn, { endpoint, field }) {
51
+ assert.throws(
52
+ fn,
53
+ (error) => {
54
+ assert.equal(error.code, CRAWLEO_ERROR_CODES.VALIDATION);
55
+ assert.equal(error.endpoint, endpoint);
56
+ assert.equal(error.field, field);
57
+ return true;
58
+ }
59
+ );
60
+ }
61
+
62
+ test('search wrapper constructs /search URL with documented query params', async () => {
63
+ await assertSingleRequest(
64
+ (client) => client.search({
65
+ query: 'machine learning',
66
+ max_pages: 2,
67
+ device: 'mobile',
68
+ markdown: true,
69
+ auto_crawling: false
70
+ }),
71
+ {
72
+ path: '/search',
73
+ query: {
74
+ query: 'machine learning',
75
+ max_pages: '2',
76
+ device: 'mobile',
77
+ markdown: 'true',
78
+ auto_crawling: 'false'
79
+ }
80
+ }
81
+ );
82
+ });
83
+
84
+ test('googleSearch wrapper constructs /google-search URL with documented query params', async () => {
85
+ await assertSingleRequest(
86
+ (client) => client.googleSearch({
87
+ q: 'best CRM software',
88
+ gl: 'us',
89
+ hl: 'en',
90
+ type: 'shopping',
91
+ tbs: 'qdr:w',
92
+ num: 10,
93
+ page: 2
94
+ }),
95
+ {
96
+ path: '/google-search',
97
+ query: {
98
+ q: 'best CRM software',
99
+ gl: 'us',
100
+ hl: 'en',
101
+ type: 'shopping',
102
+ tbs: 'qdr:w',
103
+ num: '10',
104
+ page: '2'
105
+ }
106
+ }
107
+ );
108
+ });
109
+
110
+ test('googleMaps wrapper constructs /google-maps URL with documented query params', async () => {
111
+ await assertSingleRequest(
112
+ (client) => client.googleMaps({
113
+ q: 'restaurants in Paris',
114
+ hl: 'fr',
115
+ ll: '@48.8566,2.3522,15z',
116
+ placeId: 'ChIJLU7jZClu5kcR4PcOOO6p3I0'
117
+ }),
118
+ {
119
+ path: '/google-maps',
120
+ query: {
121
+ q: 'restaurants in Paris',
122
+ hl: 'fr',
123
+ ll: '@48.8566,2.3522,15z',
124
+ placeId: 'ChIJLU7jZClu5kcR4PcOOO6p3I0'
125
+ }
126
+ }
127
+ );
128
+ });
129
+
130
+ test('crawl wrapper serializes string and array urls as documented comma-separated query values', async () => {
131
+ await assertSingleRequest(
132
+ (client) => client.crawl({ urls: 'https://example.com/a', markdown: true }),
133
+ {
134
+ path: '/crawl',
135
+ query: {
136
+ urls: 'https://example.com/a',
137
+ markdown: 'true'
138
+ }
139
+ }
140
+ );
141
+
142
+ await assertSingleRequest(
143
+ (client) => client.crawl({ urls: ['https://example.com/a', 'https://example.com/b'], render_js: true, screenshot: true }),
144
+ {
145
+ path: '/crawl',
146
+ query: {
147
+ urls: 'https://example.com/a,https://example.com/b',
148
+ render_js: 'true',
149
+ screenshot: 'true'
150
+ }
151
+ }
152
+ );
153
+ });
154
+
155
+ test('headfulBrowser wrapper serializes urls and output options for /headful-browser', async () => {
156
+ await assertSingleRequest(
157
+ (client) => client.headfulBrowser({
158
+ urls: ['https://example.com/a', 'https://example.com/b'],
159
+ country: 'gb',
160
+ output_format: 'page_text',
161
+ screenshot: false
162
+ }),
163
+ {
164
+ path: '/headful-browser',
165
+ query: {
166
+ urls: 'https://example.com/a,https://example.com/b',
167
+ country: 'gb',
168
+ output_format: 'page_text',
169
+ screenshot: 'false'
170
+ }
171
+ }
172
+ );
173
+ });
174
+
175
+ test('required parameter validation identifies every documented required field', () => {
176
+ const { client } = createRecordingClient();
177
+
178
+ assertValidationError(() => client.search({}), { endpoint: '/search', field: 'query' });
179
+ assertValidationError(() => client.search({ query: '' }), { endpoint: '/search', field: 'query' });
180
+ assertValidationError(() => client.googleSearch({}), { endpoint: '/google-search', field: 'q' });
181
+ assertValidationError(() => client.googleSearch({ q: '' }), { endpoint: '/google-search', field: 'q' });
182
+ assertValidationError(() => client.googleMaps({}), { endpoint: '/google-maps', field: 'q' });
183
+ assertValidationError(() => client.googleMaps({ q: '' }), { endpoint: '/google-maps', field: 'q' });
184
+ assertValidationError(() => client.crawl({}), { endpoint: '/crawl', field: 'urls' });
185
+ assertValidationError(() => client.crawl({ urls: [] }), { endpoint: '/crawl', field: 'urls' });
186
+ assertValidationError(() => client.headfulBrowser({}), { endpoint: '/headful-browser', field: 'urls' });
187
+ assertValidationError(() => client.headfulBrowser({ urls: [] }), { endpoint: '/headful-browser', field: 'urls' });
188
+ });
189
+
190
+ test('documented enum validators accept all allowed values', async () => {
191
+ for (const device of ['desktop', 'mobile', 'tablet']) {
192
+ await assertSingleRequest((client) => client.search({ query: 'ai agents', device }), {
193
+ path: '/search',
194
+ query: { query: 'ai agents', device }
195
+ });
196
+ }
197
+
198
+ for (const type of ['search', 'news', 'images', 'places', 'shopping']) {
199
+ await assertSingleRequest((client) => client.googleSearch({ q: 'ai agents', type }), {
200
+ path: '/google-search',
201
+ query: { q: 'ai agents', type }
202
+ });
203
+ }
204
+
205
+ for (const tbs of ['qdr:h', 'qdr:d', 'qdr:w', 'qdr:m', 'qdr:y']) {
206
+ await assertSingleRequest((client) => client.googleSearch({ q: 'ai agents', tbs }), {
207
+ path: '/google-search',
208
+ query: { q: 'ai agents', tbs }
209
+ });
210
+ }
211
+
212
+ for (const output_format of ['markdown', 'enhanced_html', 'raw_html', 'page_text']) {
213
+ await assertSingleRequest((client) => client.headfulBrowser({ urls: 'https://example.com', output_format }), {
214
+ path: '/headful-browser',
215
+ query: { urls: 'https://example.com', output_format }
216
+ });
217
+ }
218
+ });
219
+
220
+ test('documented enum validators reject invalid values with field diagnostics', () => {
221
+ const { client } = createRecordingClient();
222
+
223
+ assertValidationError(() => client.search({ query: 'ai agents', device: 'watch' }), { endpoint: '/search', field: 'device' });
224
+ assertValidationError(() => client.googleSearch({ q: 'ai agents', type: 'videos' }), { endpoint: '/google-search', field: 'type' });
225
+ assertValidationError(() => client.googleSearch({ q: 'ai agents', tbs: 'qdr:decade' }), { endpoint: '/google-search', field: 'tbs' });
226
+ assertValidationError(() => client.headfulBrowser({ urls: 'https://example.com', output_format: 'pdf' }), { endpoint: '/headful-browser', field: 'output_format' });
227
+ });