openclaw-crawleo-skill 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +236 -0
- package/SKILL.md +70 -0
- package/contracts/coverage-checklist.md +22 -0
- package/contracts/crawleo-endpoint-evidence.md +137 -0
- package/contracts/crawleo-endpoints.json +237 -0
- package/contracts/crawleo-endpoints.md +268 -0
- package/contracts/final-assembly-report.md +84 -0
- package/examples/README.md +37 -0
- package/examples/live-usage-template.js +34 -0
- package/examples/offline-fake-fetch.js +41 -0
- package/package.json +42 -0
- package/scripts/verify-contracts.js +97 -0
- package/scripts/verify-final.js +162 -0
- package/scripts/verify-scaffold.js +166 -0
- package/skill.json +47 -0
- package/src/client.js +155 -0
- package/src/contract.js +50 -0
- package/src/endpoints.js +78 -0
- package/src/errors.js +89 -0
- package/src/index.js +37 -0
- package/test/client.test.js +104 -0
- package/test/endpoints.test.js +130 -0
- package/test/error-fixtures.test.js +151 -0
- package/test/errors.test.js +116 -0
- package/test/live.test.js +28 -0
- package/test/scaffold.test.js +47 -0
- package/test/wrapper-fixtures.test.js +227 -0
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { createCrawleoClient, CrawleoError } from '../src/index.js';
|
|
4
|
+
|
|
5
|
+
const enableLiveExample = process.env.CRAWLEO_ENABLE_LIVE_EXAMPLE === '1';
|
|
6
|
+
const apiKey = process.env.CRAWLEO_API_KEY;
|
|
7
|
+
|
|
8
|
+
if (!enableLiveExample || !apiKey) {
|
|
9
|
+
console.log('Skipping live Crawleo example: set CRAWLEO_ENABLE_LIVE_EXAMPLE=1 and CRAWLEO_API_KEY to run it.');
|
|
10
|
+
process.exit(0);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
const client = createCrawleoClient({ apiKey });
|
|
14
|
+
|
|
15
|
+
try {
|
|
16
|
+
const result = await client.search({
|
|
17
|
+
query: 'Crawleo web intelligence',
|
|
18
|
+
max_pages: 1
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
console.log(JSON.stringify({
|
|
22
|
+
ok: true,
|
|
23
|
+
endpoint: '/search',
|
|
24
|
+
topLevelFields: Object.keys(result || {})
|
|
25
|
+
}, null, 2));
|
|
26
|
+
} catch (error) {
|
|
27
|
+
if (error instanceof CrawleoError) {
|
|
28
|
+
console.error(JSON.stringify(error.toJSON(), null, 2));
|
|
29
|
+
process.exit(1);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
console.error('Unexpected live Crawleo example failure.');
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { createCrawleoClient } from '../src/index.js';
|
|
4
|
+
|
|
5
|
+
const calls = [];
|
|
6
|
+
|
|
7
|
+
const client = createCrawleoClient({
|
|
8
|
+
apiKey: 'offline-example-key',
|
|
9
|
+
fetch: async (url, init) => {
|
|
10
|
+
const requestUrl = new URL(url);
|
|
11
|
+
calls.push({ path: requestUrl.pathname, method: init.method });
|
|
12
|
+
|
|
13
|
+
return {
|
|
14
|
+
ok: true,
|
|
15
|
+
status: 200,
|
|
16
|
+
headers: new Map([['content-type', 'application/json']]),
|
|
17
|
+
async text() {
|
|
18
|
+
return JSON.stringify({
|
|
19
|
+
ok: true,
|
|
20
|
+
path: requestUrl.pathname,
|
|
21
|
+
query: Object.fromEntries(requestUrl.searchParams.entries())
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
await client.search({ query: 'ai agents', max_pages: 1, markdown: true });
|
|
29
|
+
await client.googleSearch({ q: 'best CRM software', type: 'news', tbs: 'qdr:d' });
|
|
30
|
+
await client.googleMaps({ q: 'restaurants in Paris', hl: 'fr' });
|
|
31
|
+
await client.crawl({ urls: ['https://example.com'], markdown: true });
|
|
32
|
+
await client.headfulBrowser({ urls: 'https://example.com', output_format: 'markdown' });
|
|
33
|
+
|
|
34
|
+
const expectedPaths = ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser'];
|
|
35
|
+
const actualPaths = calls.map((call) => call.path);
|
|
36
|
+
|
|
37
|
+
if (JSON.stringify(actualPaths) !== JSON.stringify(expectedPaths)) {
|
|
38
|
+
throw new Error(`Unexpected wrapper paths: ${actualPaths.join(', ')}`);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
console.log(`Offline Crawleo wrapper example passed for ${calls.length} endpoints.`);
|
package/package.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "openclaw-crawleo-skill",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Self-contained OpenClaw skill package for Crawleo web search and crawling capabilities.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"private": false,
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"engines": {
|
|
9
|
+
"node": ">=18"
|
|
10
|
+
},
|
|
11
|
+
"exports": {
|
|
12
|
+
".": "./src/index.js"
|
|
13
|
+
},
|
|
14
|
+
"files": [
|
|
15
|
+
"LICENSE",
|
|
16
|
+
"SKILL.md",
|
|
17
|
+
"skill.json",
|
|
18
|
+
"contracts/",
|
|
19
|
+
"examples/",
|
|
20
|
+
"scripts/",
|
|
21
|
+
"src/",
|
|
22
|
+
"test/",
|
|
23
|
+
"README.md"
|
|
24
|
+
],
|
|
25
|
+
"scripts": {
|
|
26
|
+
"test": "node --test",
|
|
27
|
+
"test:live": "node --test test/live.test.js",
|
|
28
|
+
"verify:contracts": "node scripts/verify-contracts.js",
|
|
29
|
+
"verify:examples": "node examples/offline-fake-fetch.js && node examples/live-usage-template.js",
|
|
30
|
+
"verify:final": "node scripts/verify-final.js",
|
|
31
|
+
"verify:scaffold": "node scripts/verify-scaffold.js"
|
|
32
|
+
},
|
|
33
|
+
"keywords": [
|
|
34
|
+
"openclaw",
|
|
35
|
+
"crawleo",
|
|
36
|
+
"skill",
|
|
37
|
+
"web-search",
|
|
38
|
+
"web-crawling"
|
|
39
|
+
],
|
|
40
|
+
"dependencies": {},
|
|
41
|
+
"devDependencies": {}
|
|
42
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
|
|
6
|
+
const contractPath = path.join('contracts', 'crawleo-endpoints.json');
|
|
7
|
+
const markdownPath = path.join('contracts', 'crawleo-endpoints.md');
|
|
8
|
+
|
|
9
|
+
const requiredEndpoints = ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser'];
|
|
10
|
+
const requiredTools = ['search_web', 'google_search', 'google_maps', 'crawl_web', 'headful_browser'];
|
|
11
|
+
const requiredEndpointFields = [
|
|
12
|
+
'id',
|
|
13
|
+
'name',
|
|
14
|
+
'method',
|
|
15
|
+
'path',
|
|
16
|
+
'url',
|
|
17
|
+
'mcp_tool',
|
|
18
|
+
'description',
|
|
19
|
+
'sources',
|
|
20
|
+
'cost',
|
|
21
|
+
'parameters',
|
|
22
|
+
'examples',
|
|
23
|
+
'response_shape',
|
|
24
|
+
'errors',
|
|
25
|
+
'ambiguities'
|
|
26
|
+
];
|
|
27
|
+
|
|
28
|
+
function fail(message) {
|
|
29
|
+
console.error(`FAIL: ${message}`);
|
|
30
|
+
process.exitCode = 1;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function assert(condition, message) {
|
|
34
|
+
if (!condition) fail(message);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function countValues(values) {
|
|
38
|
+
return values.reduce((counts, value) => {
|
|
39
|
+
counts[value] = (counts[value] || 0) + 1;
|
|
40
|
+
return counts;
|
|
41
|
+
}, {});
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
assert(fs.existsSync(contractPath), `${contractPath} does not exist`);
|
|
45
|
+
assert(fs.existsSync(markdownPath), `${markdownPath} does not exist`);
|
|
46
|
+
|
|
47
|
+
let contract;
|
|
48
|
+
try {
|
|
49
|
+
contract = JSON.parse(fs.readFileSync(contractPath, 'utf8'));
|
|
50
|
+
} catch (error) {
|
|
51
|
+
fail(`${contractPath} is not valid JSON: ${error.message}`);
|
|
52
|
+
contract = { endpoints: [] };
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const markdown = fs.existsSync(markdownPath) ? fs.readFileSync(markdownPath, 'utf8') : '';
|
|
56
|
+
const endpoints = Array.isArray(contract.endpoints) ? contract.endpoints : [];
|
|
57
|
+
const endpointPaths = endpoints.map((endpoint) => endpoint.path);
|
|
58
|
+
const endpointPathCounts = countValues(endpointPaths);
|
|
59
|
+
|
|
60
|
+
for (const endpointPath of requiredEndpoints) {
|
|
61
|
+
assert(endpointPathCounts[endpointPath] === 1, `expected exactly one contract for ${endpointPath}, found ${endpointPathCounts[endpointPath] || 0}`);
|
|
62
|
+
assert(markdown.includes(endpointPath), `${markdownPath} does not mention ${endpointPath}`);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const toolNames = endpoints.map((endpoint) => endpoint.mcp_tool).filter(Boolean);
|
|
66
|
+
const mcpTools = contract.mcp && Array.isArray(contract.mcp.tools) ? contract.mcp.tools.map((tool) => tool.name) : [];
|
|
67
|
+
const allTools = [...toolNames, ...mcpTools];
|
|
68
|
+
|
|
69
|
+
for (const toolName of requiredTools) {
|
|
70
|
+
assert(allTools.includes(toolName), `missing MCP tool ${toolName}`);
|
|
71
|
+
assert(markdown.includes(toolName), `${markdownPath} does not mention MCP tool ${toolName}`);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
for (const endpoint of endpoints) {
|
|
75
|
+
for (const field of requiredEndpointFields) {
|
|
76
|
+
assert(Object.prototype.hasOwnProperty.call(endpoint, field), `${endpoint.path || endpoint.id || 'unknown endpoint'} missing field ${field}`);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
assert(endpoint.method === 'GET', `${endpoint.path} method must be GET per Crawleo docs`);
|
|
80
|
+
assert(typeof endpoint.url === 'string' && endpoint.url.startsWith('https://api.crawleo.dev/'), `${endpoint.path} url must use Crawleo API base URL`);
|
|
81
|
+
assert(Array.isArray(endpoint.sources) && endpoint.sources.length > 0, `${endpoint.path} must include source links`);
|
|
82
|
+
assert(Array.isArray(endpoint.parameters) && endpoint.parameters.length > 0, `${endpoint.path} must include parameters`);
|
|
83
|
+
assert(endpoint.parameters.some((parameter) => parameter.name === 'x-api-key' && parameter.in === 'header'), `${endpoint.path} must include x-api-key header parameter`);
|
|
84
|
+
assert(Array.isArray(endpoint.examples) && endpoint.examples.length > 0, `${endpoint.path} must include examples`);
|
|
85
|
+
assert(endpoint.response_shape && Array.isArray(endpoint.response_shape.top_level_fields) && endpoint.response_shape.top_level_fields.length > 0, `${endpoint.path} must include response top-level fields`);
|
|
86
|
+
assert(Array.isArray(endpoint.ambiguities) && endpoint.ambiguities.length > 0, `${endpoint.path} must include ambiguity notes, even if only to say no ambiguity is known`);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
assert(markdown.includes('not specified in Crawleo docs'), `${markdownPath} must preserve required ambiguity phrase`);
|
|
90
|
+
assert(JSON.stringify(contract).includes('not specified in Crawleo docs'), `${contractPath} must preserve required ambiguity phrase`);
|
|
91
|
+
|
|
92
|
+
if (process.exitCode) {
|
|
93
|
+
console.error('Crawleo contract verification failed.');
|
|
94
|
+
process.exit(process.exitCode);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
console.log(`Crawleo contract verification passed: ${requiredEndpoints.length} endpoints and ${requiredTools.length} MCP tools covered.`);
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
|
|
5
|
+
const requiredFiles = [
|
|
6
|
+
'README.md',
|
|
7
|
+
'LICENSE',
|
|
8
|
+
'SKILL.md',
|
|
9
|
+
'skill.json',
|
|
10
|
+
'package.json',
|
|
11
|
+
'contracts/crawleo-endpoints.json',
|
|
12
|
+
'contracts/crawleo-endpoints.md',
|
|
13
|
+
'contracts/coverage-checklist.md',
|
|
14
|
+
'contracts/final-assembly-report.md',
|
|
15
|
+
'examples/README.md',
|
|
16
|
+
'examples/offline-fake-fetch.js',
|
|
17
|
+
'examples/live-usage-template.js',
|
|
18
|
+
'src/index.js',
|
|
19
|
+
'src/client.js',
|
|
20
|
+
'src/contract.js',
|
|
21
|
+
'src/endpoints.js',
|
|
22
|
+
'src/errors.js',
|
|
23
|
+
'test/client.test.js',
|
|
24
|
+
'test/endpoints.test.js',
|
|
25
|
+
'test/error-fixtures.test.js',
|
|
26
|
+
'test/errors.test.js',
|
|
27
|
+
'test/live.test.js',
|
|
28
|
+
'test/scaffold.test.js',
|
|
29
|
+
'test/wrapper-fixtures.test.js',
|
|
30
|
+
'scripts/verify-contracts.js',
|
|
31
|
+
'scripts/verify-final.js',
|
|
32
|
+
'scripts/verify-scaffold.js'
|
|
33
|
+
];
|
|
34
|
+
|
|
35
|
+
const requiredEndpoints = ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser'];
|
|
36
|
+
const requiredTools = ['search_web', 'google_search', 'google_maps', 'crawl_web', 'headful_browser'];
|
|
37
|
+
const requiredWrappers = ['client.search', 'client.googleSearch', 'client.googleMaps', 'client.crawl', 'client.headfulBrowser'];
|
|
38
|
+
const requiredScripts = ['test', 'test:live', 'verify:contracts', 'verify:examples', 'verify:scaffold', 'verify:final'];
|
|
39
|
+
const deliverableFilesForBranding = [
|
|
40
|
+
'README.md',
|
|
41
|
+
'SKILL.md',
|
|
42
|
+
'skill.json',
|
|
43
|
+
'package.json',
|
|
44
|
+
'contracts/crawleo-endpoints.md',
|
|
45
|
+
'contracts/coverage-checklist.md',
|
|
46
|
+
'contracts/final-assembly-report.md',
|
|
47
|
+
'examples/README.md',
|
|
48
|
+
'examples/offline-fake-fetch.js',
|
|
49
|
+
'examples/live-usage-template.js',
|
|
50
|
+
'src/index.js',
|
|
51
|
+
'src/client.js',
|
|
52
|
+
'src/contract.js',
|
|
53
|
+
'src/endpoints.js',
|
|
54
|
+
'src/errors.js'
|
|
55
|
+
];
|
|
56
|
+
const disallowedBrandTerms = ['Firecrawl', 'firecrawl', 'Apify', 'apify', 'Tavily', 'tavily', 'SerpAPI', 'serpapi', 'Browserbase', 'browserbase'];
|
|
57
|
+
|
|
58
|
+
const failures = [];
|
|
59
|
+
|
|
60
|
+
function check(condition, message) {
|
|
61
|
+
if (!condition) failures.push(message);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function readText(path) {
|
|
65
|
+
return fs.readFileSync(path, 'utf8');
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
for (const file of requiredFiles) {
|
|
69
|
+
check(fs.existsSync(file), `${file} is missing`);
|
|
70
|
+
if (fs.existsSync(file)) check(fs.statSync(file).size > 0, `${file} is empty`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
let packageJson = {};
|
|
74
|
+
let skillJson = {};
|
|
75
|
+
let contract = {};
|
|
76
|
+
try {
|
|
77
|
+
packageJson = JSON.parse(readText('package.json'));
|
|
78
|
+
} catch (error) {
|
|
79
|
+
failures.push(`package.json is invalid JSON: ${error.message}`);
|
|
80
|
+
}
|
|
81
|
+
try {
|
|
82
|
+
skillJson = JSON.parse(readText('skill.json'));
|
|
83
|
+
} catch (error) {
|
|
84
|
+
failures.push(`skill.json is invalid JSON: ${error.message}`);
|
|
85
|
+
}
|
|
86
|
+
try {
|
|
87
|
+
contract = JSON.parse(readText('contracts/crawleo-endpoints.json'));
|
|
88
|
+
} catch (error) {
|
|
89
|
+
failures.push(`contracts/crawleo-endpoints.json is invalid JSON: ${error.message}`);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const textFiles = Object.fromEntries(
|
|
93
|
+
requiredFiles.filter((file) => fs.existsSync(file)).map((file) => [file, readText(file)])
|
|
94
|
+
);
|
|
95
|
+
const combined = Object.values(textFiles).join('\n');
|
|
96
|
+
|
|
97
|
+
check(packageJson.name === 'openclaw-crawleo-skill', 'package name must be openclaw-crawleo-skill');
|
|
98
|
+
check(packageJson.type === 'module', 'package type must be module');
|
|
99
|
+
check(packageJson.private === false, 'package must be publishable with private=false');
|
|
100
|
+
check(packageJson.license === 'MIT', 'package license must be MIT');
|
|
101
|
+
check(Array.isArray(packageJson.files) && packageJson.files.includes('LICENSE'), 'package files must include LICENSE');
|
|
102
|
+
check(Array.isArray(packageJson.files) && packageJson.files.includes('scripts/'), 'package files must include scripts/ for verifier scripts');
|
|
103
|
+
check(skillJson.name === 'crawleo', 'skill.json name must be crawleo');
|
|
104
|
+
check(skillJson.brand === 'Crawleo', 'skill.json brand must be Crawleo');
|
|
105
|
+
check(skillJson.contract === './contracts/crawleo-endpoints.json', 'skill.json must point to contract JSON');
|
|
106
|
+
check(skillJson.instructions === './SKILL.md', 'skill.json must point to SKILL.md');
|
|
107
|
+
|
|
108
|
+
for (const scriptName of requiredScripts) {
|
|
109
|
+
check(Boolean(packageJson.scripts?.[scriptName]), `package.json missing ${scriptName} script`);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const contractEndpoints = Array.isArray(contract.endpoints) ? contract.endpoints : [];
|
|
113
|
+
const contractPaths = contractEndpoints.map((endpoint) => endpoint.path);
|
|
114
|
+
const contractTools = contractEndpoints.map((endpoint) => endpoint.mcp_tool).filter(Boolean);
|
|
115
|
+
const mcpTools = Array.isArray(contract.mcp?.tools) ? contract.mcp.tools.map((tool) => tool.name) : [];
|
|
116
|
+
|
|
117
|
+
for (const endpoint of requiredEndpoints) {
|
|
118
|
+
check(contractPaths.filter((path) => path === endpoint).length === 1, `contract must contain exactly one ${endpoint}`);
|
|
119
|
+
for (const file of ['README.md', 'SKILL.md', 'contracts/crawleo-endpoints.md', 'contracts/coverage-checklist.md', 'contracts/final-assembly-report.md', 'examples/offline-fake-fetch.js']) {
|
|
120
|
+
check(textFiles[file]?.includes(endpoint), `${file} must mention ${endpoint}`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
for (const tool of requiredTools) {
|
|
125
|
+
check(contractTools.includes(tool) || mcpTools.includes(tool), `contract must include MCP tool ${tool}`);
|
|
126
|
+
for (const file of ['README.md', 'SKILL.md', 'contracts/crawleo-endpoints.md', 'contracts/coverage-checklist.md', 'contracts/final-assembly-report.md', 'skill.json']) {
|
|
127
|
+
check(textFiles[file]?.includes(tool), `${file} must mention ${tool}`);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
for (const wrapper of requiredWrappers) {
|
|
132
|
+
for (const file of ['README.md', 'contracts/coverage-checklist.md', 'test/wrapper-fixtures.test.js']) {
|
|
133
|
+
check(textFiles[file]?.includes(wrapper), `${file} must mention ${wrapper}`);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
for (const file of ['README.md', 'SKILL.md', 'contracts/coverage-checklist.md', 'contracts/final-assembly-report.md', 'test/live.test.js']) {
|
|
138
|
+
check(textFiles[file]?.includes('CRAWLEO_ENABLE_LIVE_TESTS'), `${file} must document or enforce CRAWLEO_ENABLE_LIVE_TESTS`);
|
|
139
|
+
check(textFiles[file]?.includes('CRAWLEO_API_KEY'), `${file} must document or enforce CRAWLEO_API_KEY`);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
check(textFiles['README.md']?.includes('contracts/final-assembly-report.md'), 'README.md must point to final assembly report');
|
|
143
|
+
check(textFiles['README.md']?.includes('npm run test:live'), 'README.md must document npm run test:live');
|
|
144
|
+
check(textFiles['SKILL.md']?.includes('npm run test:live'), 'SKILL.md must document npm run test:live');
|
|
145
|
+
check(textFiles['contracts/coverage-checklist.md']?.includes('npm run test:live'), 'coverage checklist must document npm run test:live');
|
|
146
|
+
check(combined.includes('not specified in Crawleo docs'), 'deliverables must preserve the Crawleo ambiguity phrase');
|
|
147
|
+
check(combined.includes('https://api.crawleo.dev/mcp'), 'deliverables must mention the optional Crawleo MCP endpoint');
|
|
148
|
+
|
|
149
|
+
for (const file of deliverableFilesForBranding) {
|
|
150
|
+
const text = textFiles[file] || '';
|
|
151
|
+
for (const term of disallowedBrandTerms) {
|
|
152
|
+
check(!text.includes(term), `${file} contains non-Crawleo brand/reference ${term}`);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (failures.length > 0) {
|
|
157
|
+
console.error('Crawleo final assembly verification failed:');
|
|
158
|
+
for (const failure of failures) console.error(`- ${failure}`);
|
|
159
|
+
process.exit(1);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
console.log(`Crawleo final assembly verification passed: ${requiredFiles.length} files, ${requiredEndpoints.length} endpoints, ${requiredTools.length} MCP tools, ${requiredWrappers.length} wrapper methods, ${requiredScripts.length} scripts.`);
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
|
|
5
|
+
const requiredFiles = [
|
|
6
|
+
'package.json',
|
|
7
|
+
'README.md',
|
|
8
|
+
'SKILL.md',
|
|
9
|
+
'skill.json',
|
|
10
|
+
'src/index.js',
|
|
11
|
+
'src/client.js',
|
|
12
|
+
'src/contract.js',
|
|
13
|
+
'src/endpoints.js',
|
|
14
|
+
'src/errors.js',
|
|
15
|
+
'test/scaffold.test.js',
|
|
16
|
+
'test/client.test.js',
|
|
17
|
+
'test/endpoints.test.js',
|
|
18
|
+
'test/errors.test.js',
|
|
19
|
+
'test/error-fixtures.test.js',
|
|
20
|
+
'test/live.test.js',
|
|
21
|
+
'test/wrapper-fixtures.test.js',
|
|
22
|
+
'examples/README.md',
|
|
23
|
+
'examples/offline-fake-fetch.js',
|
|
24
|
+
'examples/live-usage-template.js',
|
|
25
|
+
'contracts/crawleo-endpoints.json',
|
|
26
|
+
'contracts/crawleo-endpoints.md',
|
|
27
|
+
'contracts/coverage-checklist.md',
|
|
28
|
+
'scripts/verify-contracts.js',
|
|
29
|
+
'scripts/verify-scaffold.js'
|
|
30
|
+
];
|
|
31
|
+
|
|
32
|
+
const requiredEndpoints = ['/search', '/google-search', '/google-maps', '/crawl', '/headful-browser'];
|
|
33
|
+
const requiredTools = ['search_web', 'google_search', 'google_maps', 'crawl_web', 'headful_browser'];
|
|
34
|
+
const requiredExports = [
|
|
35
|
+
'createCrawleoClient',
|
|
36
|
+
'requestCrawleo',
|
|
37
|
+
'buildCrawleoUrl',
|
|
38
|
+
'CrawleoError',
|
|
39
|
+
'CRAWLEO_ERROR_CODES',
|
|
40
|
+
'search',
|
|
41
|
+
'googleSearch',
|
|
42
|
+
'googleMaps',
|
|
43
|
+
'crawl',
|
|
44
|
+
'headfulBrowser'
|
|
45
|
+
];
|
|
46
|
+
const requiredWrapperMentions = ['client.search', 'client.googleSearch', 'client.googleMaps', 'client.crawl', 'client.headfulBrowser'];
|
|
47
|
+
|
|
48
|
+
function fail(message) {
|
|
49
|
+
console.error(`FAIL: ${message}`);
|
|
50
|
+
process.exitCode = 1;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function assert(condition, message) {
|
|
54
|
+
if (!condition) fail(message);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
for (const file of requiredFiles) {
|
|
58
|
+
assert(fs.existsSync(file), `${file} is missing`);
|
|
59
|
+
if (fs.existsSync(file)) {
|
|
60
|
+
assert(fs.statSync(file).size > 0, `${file} is empty`);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
let packageJson = {};
|
|
65
|
+
let skillJson = {};
|
|
66
|
+
let contract = {};
|
|
67
|
+
|
|
68
|
+
try {
|
|
69
|
+
packageJson = JSON.parse(fs.readFileSync('package.json', 'utf8'));
|
|
70
|
+
} catch (error) {
|
|
71
|
+
fail(`package.json is invalid JSON: ${error.message}`);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
try {
|
|
75
|
+
skillJson = JSON.parse(fs.readFileSync('skill.json', 'utf8'));
|
|
76
|
+
} catch (error) {
|
|
77
|
+
fail(`skill.json is invalid JSON: ${error.message}`);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
contract = JSON.parse(fs.readFileSync('contracts/crawleo-endpoints.json', 'utf8'));
|
|
82
|
+
} catch (error) {
|
|
83
|
+
fail(`contracts/crawleo-endpoints.json is invalid JSON: ${error.message}`);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
assert(packageJson.name === 'openclaw-crawleo-skill', 'package name must be openclaw-crawleo-skill');
|
|
87
|
+
assert(packageJson.type === 'module', 'package must use ESM modules');
|
|
88
|
+
assert(packageJson.scripts && packageJson.scripts.test === 'node --test', 'package must expose node --test script');
|
|
89
|
+
assert(packageJson.scripts && packageJson.scripts['test:live'] === 'node --test test/live.test.js', 'package must expose test:live script');
|
|
90
|
+
assert(packageJson.scripts && packageJson.scripts['verify:contracts'] === 'node scripts/verify-contracts.js', 'package must expose verify:contracts script');
|
|
91
|
+
assert(packageJson.scripts && packageJson.scripts['verify:examples'] === 'node examples/offline-fake-fetch.js && node examples/live-usage-template.js', 'package must expose verify:examples script');
|
|
92
|
+
assert(packageJson.scripts && packageJson.scripts['verify:scaffold'] === 'node scripts/verify-scaffold.js', 'package must expose verify:scaffold script');
|
|
93
|
+
|
|
94
|
+
assert(skillJson.contract === './contracts/crawleo-endpoints.json', 'skill.json must point at the contract inventory');
|
|
95
|
+
assert(skillJson.instructions === './SKILL.md', 'skill.json must point at SKILL.md');
|
|
96
|
+
assert(skillJson.entrypoint === './src/index.js', 'skill.json must point at src/index.js');
|
|
97
|
+
|
|
98
|
+
const readme = fs.readFileSync('README.md', 'utf8');
|
|
99
|
+
const skillText = fs.readFileSync('SKILL.md', 'utf8');
|
|
100
|
+
const coverageText = fs.readFileSync('contracts/coverage-checklist.md', 'utf8');
|
|
101
|
+
const sourceText = fs.readFileSync('src/index.js', 'utf8');
|
|
102
|
+
const contractText = JSON.stringify(contract);
|
|
103
|
+
const skillJsonText = JSON.stringify(skillJson);
|
|
104
|
+
|
|
105
|
+
for (const endpoint of requiredEndpoints) {
|
|
106
|
+
assert(readme.includes(endpoint), `README.md must mention ${endpoint}`);
|
|
107
|
+
assert(skillText.includes(endpoint), `SKILL.md must mention ${endpoint}`);
|
|
108
|
+
assert(coverageText.includes(endpoint), `coverage checklist must mention ${endpoint}`);
|
|
109
|
+
assert(sourceText.includes(endpoint), `src/index.js must mention ${endpoint}`);
|
|
110
|
+
assert(contractText.includes(endpoint), `contract must mention ${endpoint}`);
|
|
111
|
+
assert(skillJsonText.includes(endpoint), `skill.json must mention ${endpoint}`);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
for (const tool of requiredTools) {
|
|
115
|
+
assert(readme.includes(tool), `README.md must mention ${tool}`);
|
|
116
|
+
assert(skillText.includes(tool), `SKILL.md must mention ${tool}`);
|
|
117
|
+
assert(coverageText.includes(tool), `coverage checklist must mention ${tool}`);
|
|
118
|
+
assert(sourceText.includes(tool), `src/index.js must mention ${tool}`);
|
|
119
|
+
assert(contractText.includes(tool), `contract must mention ${tool}`);
|
|
120
|
+
assert(skillJsonText.includes(tool), `skill.json must mention ${tool}`);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
assert(readme.includes('CRAWLEO_API_KEY'), 'README.md must document CRAWLEO_API_KEY');
|
|
124
|
+
assert(readme.includes('CRAWLEO_ENABLE_LIVE_TESTS'), 'README.md must document CRAWLEO_ENABLE_LIVE_TESTS');
|
|
125
|
+
assert(readme.includes('npm run test:live'), 'README.md must document test:live');
|
|
126
|
+
assert(skillText.includes('CRAWLEO_ENABLE_LIVE_TESTS'), 'SKILL.md must document CRAWLEO_ENABLE_LIVE_TESTS');
|
|
127
|
+
assert(skillText.includes('npm run test:live'), 'SKILL.md must document test:live');
|
|
128
|
+
assert(coverageText.includes('npm run test:live'), 'coverage checklist must document test:live');
|
|
129
|
+
assert(readme.includes('offline'), 'README.md must document offline verification posture');
|
|
130
|
+
assert(readme.includes('createCrawleoClient'), 'README.md must document createCrawleoClient');
|
|
131
|
+
assert(readme.includes('client.search'), 'README.md must document runtime wrapper methods');
|
|
132
|
+
assert(readme.includes('contracts/crawleo-endpoints.json'), 'README.md must point to contract JSON');
|
|
133
|
+
assert(readme.includes('contracts/coverage-checklist.md'), 'README.md must point to coverage checklist');
|
|
134
|
+
assert(skillText.includes('contracts/coverage-checklist.md'), 'SKILL.md must point to coverage checklist');
|
|
135
|
+
assert(readme.includes('https://api.crawleo.dev/mcp'), 'README.md must mention optional Crawleo MCP endpoint');
|
|
136
|
+
assert(skillText.includes('not specified in Crawleo docs'), 'SKILL.md must include ambiguity policy');
|
|
137
|
+
assert(skillText.includes('createCrawleoClient'), 'SKILL.md must document the client factory');
|
|
138
|
+
|
|
139
|
+
let publicApi = {};
|
|
140
|
+
try {
|
|
141
|
+
publicApi = await import(new URL('../src/index.js', import.meta.url));
|
|
142
|
+
} catch (error) {
|
|
143
|
+
fail(`src/index.js could not be imported: ${error.message}`);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
for (const exportName of requiredExports) {
|
|
147
|
+
assert(typeof publicApi[exportName] !== 'undefined', `src/index.js must export ${exportName}`);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
for (const wrapperName of ['search', 'googleSearch', 'googleMaps', 'crawl', 'headfulBrowser']) {
|
|
151
|
+
assert(typeof publicApi[wrapperName] === 'function', `${wrapperName} export must be a function`);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
assert(typeof publicApi.createCrawleoClient === 'function', 'createCrawleoClient export must be a function');
|
|
155
|
+
|
|
156
|
+
for (const wrapperMention of requiredWrapperMentions) {
|
|
157
|
+
assert(readme.includes(wrapperMention), `README.md must mention ${wrapperMention}`);
|
|
158
|
+
assert(coverageText.includes(wrapperMention), `coverage checklist must mention ${wrapperMention}`);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if (process.exitCode) {
|
|
162
|
+
console.error('Crawleo scaffold verification failed.');
|
|
163
|
+
process.exit(process.exitCode);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
console.log(`Crawleo scaffold verification passed: ${requiredFiles.length} files, ${requiredEndpoints.length} endpoints, ${requiredTools.length} MCP tools, ${requiredExports.length} public exports.`);
|
package/skill.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "crawleo",
|
|
3
|
+
"displayName": "Crawleo OpenClaw Skill",
|
|
4
|
+
"version": "0.1.0",
|
|
5
|
+
"description": "OpenClaw skill package for Crawleo web search, Google Search, Google Maps, crawling, and headful browser crawling.",
|
|
6
|
+
"brand": "Crawleo",
|
|
7
|
+
"entrypoint": "./src/index.js",
|
|
8
|
+
"instructions": "./SKILL.md",
|
|
9
|
+
"contract": "./contracts/crawleo-endpoints.json",
|
|
10
|
+
"environment": [
|
|
11
|
+
{
|
|
12
|
+
"name": "CRAWLEO_API_KEY",
|
|
13
|
+
"requiredForLiveCalls": true,
|
|
14
|
+
"secret": true,
|
|
15
|
+
"description": "Crawleo API key sent with the x-api-key header. Never log or echo the value."
|
|
16
|
+
}
|
|
17
|
+
],
|
|
18
|
+
"capabilities": {
|
|
19
|
+
"restEndpoints": [
|
|
20
|
+
{ "path": "/search", "name": "Bing Search API", "mcpTool": "search_web" },
|
|
21
|
+
{ "path": "/google-search", "name": "Google Search API", "mcpTool": "google_search" },
|
|
22
|
+
{ "path": "/google-maps", "name": "Google Maps API", "mcpTool": "google_maps" },
|
|
23
|
+
{ "path": "/crawl", "name": "Crawler API", "mcpTool": "crawl_web" },
|
|
24
|
+
{ "path": "/headful-browser", "name": "Headful Browser API", "mcpTool": "headful_browser" }
|
|
25
|
+
],
|
|
26
|
+
"mcpTools": [
|
|
27
|
+
"search_web",
|
|
28
|
+
"google_search",
|
|
29
|
+
"google_maps",
|
|
30
|
+
"crawl_web",
|
|
31
|
+
"headful_browser"
|
|
32
|
+
]
|
|
33
|
+
},
|
|
34
|
+
"verification": {
|
|
35
|
+
"offlineByDefault": true,
|
|
36
|
+
"defaultCommands": [
|
|
37
|
+
"npm test",
|
|
38
|
+
"npm run verify:contracts",
|
|
39
|
+
"npm run verify:scaffold"
|
|
40
|
+
],
|
|
41
|
+
"liveCallsRequire": [
|
|
42
|
+
"CRAWLEO_API_KEY",
|
|
43
|
+
"explicit live-test enablement"
|
|
44
|
+
]
|
|
45
|
+
},
|
|
46
|
+
"status": "scaffold-only; REST wrappers are implemented in a later slice"
|
|
47
|
+
}
|