crawlio-browser 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +10 -0
- package/LICENSE +21 -0
- package/README.md +508 -0
- package/bin/crawlio-browser.js +5 -0
- package/dist/mcp-server/chunk-JSBRDJBE.js +30 -0
- package/dist/mcp-server/index.d.ts +2 -0
- package/dist/mcp-server/index.js +5878 -0
- package/dist/mcp-server/init-TRQTWLAB.js +492 -0
- package/package.json +65 -0
- package/skills/browser-automation/SKILL.md +245 -0
- package/skills/browser-automation/reference.md +259 -0
|
@@ -0,0 +1,492 @@
|
|
|
1
|
+
import "./chunk-JSBRDJBE.js";
|
|
2
|
+
|
|
3
|
+
// src/mcp-server/init.ts
|
|
4
|
+
import { execFileSync, spawn } from "child_process";
|
|
5
|
+
import { existsSync, mkdirSync, writeFileSync, readFileSync, readdirSync, copyFileSync } from "fs";
|
|
6
|
+
import { join, resolve, dirname, sep, basename } from "path";
|
|
7
|
+
import { homedir, platform } from "os";
|
|
8
|
+
import { createServer as createNetServer } from "net";
|
|
9
|
+
import { createInterface } from "readline";
|
|
10
|
+
import { fileURLToPath } from "url";
|
|
11
|
+
var PORTAL_URL = "http://127.0.0.1:3001";
|
|
12
|
+
var HEALTH_URL = `${PORTAL_URL}/health`;
|
|
13
|
+
var MCP_URL = `${PORTAL_URL}/mcp`;
|
|
14
|
+
var HOME = homedir();
|
|
15
|
+
var bold = (s) => `\x1B[1m${s}\x1B[0m`;
|
|
16
|
+
var green = (s) => `\x1B[32m${s}\x1B[0m`;
|
|
17
|
+
var cyan = (s) => `\x1B[36m${s}\x1B[0m`;
|
|
18
|
+
var dim = (s) => `\x1B[2m${s}\x1B[0m`;
|
|
19
|
+
var yellow = (s) => `\x1B[33m${s}\x1B[0m`;
|
|
20
|
+
function parseFlags(argv) {
|
|
21
|
+
const opts = {
|
|
22
|
+
portal: false,
|
|
23
|
+
full: false,
|
|
24
|
+
dryRun: false,
|
|
25
|
+
plugin: false,
|
|
26
|
+
agents: [],
|
|
27
|
+
yes: false
|
|
28
|
+
};
|
|
29
|
+
for (let i = 0; i < argv.length; i++) {
|
|
30
|
+
const arg = argv[i];
|
|
31
|
+
if (arg === "--portal") opts.portal = true;
|
|
32
|
+
else if (arg === "--full") opts.full = true;
|
|
33
|
+
else if (arg === "--dry-run") opts.dryRun = true;
|
|
34
|
+
else if (arg === "--plugin") opts.plugin = true;
|
|
35
|
+
else if (arg === "--yes" || arg === "-y") opts.yes = true;
|
|
36
|
+
else if (arg === "-a" && i + 1 < argv.length) {
|
|
37
|
+
opts.agents.push(argv[++i]);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return opts;
|
|
41
|
+
}
|
|
42
|
+
function buildAddMcpArgs(options) {
|
|
43
|
+
const args = ["-y", "add-mcp"];
|
|
44
|
+
if (options.portal) {
|
|
45
|
+
args.push(MCP_URL);
|
|
46
|
+
} else if (options.full) {
|
|
47
|
+
args.push("crawlio-browser --full");
|
|
48
|
+
} else {
|
|
49
|
+
args.push("crawlio-browser");
|
|
50
|
+
}
|
|
51
|
+
args.push("--name", "crawlio-browser", "--global", "--yes");
|
|
52
|
+
for (const agent of options.agents) {
|
|
53
|
+
args.push("-a", agent);
|
|
54
|
+
}
|
|
55
|
+
return args;
|
|
56
|
+
}
|
|
57
|
+
function buildStdioEntry(options) {
|
|
58
|
+
const args = ["-y", "crawlio-browser"];
|
|
59
|
+
if (options?.full) args.push("--full");
|
|
60
|
+
return { command: "npx", args };
|
|
61
|
+
}
|
|
62
|
+
function buildPortalEntry() {
|
|
63
|
+
return { type: "http", url: MCP_URL };
|
|
64
|
+
}
|
|
65
|
+
function isAlreadyConfigured(config) {
|
|
66
|
+
return "crawlio-browser" in config.mcpServers;
|
|
67
|
+
}
|
|
68
|
+
async function confirm(question, defaultYes = true) {
|
|
69
|
+
if (!process.stdin.isTTY || !process.stdout.isTTY) return defaultYes;
|
|
70
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
71
|
+
const hint = defaultYes ? "[Y/n]" : "[y/N]";
|
|
72
|
+
return new Promise((resolve2) => {
|
|
73
|
+
rl.question(` ${question} ${dim(hint)} `, (answer) => {
|
|
74
|
+
rl.close();
|
|
75
|
+
const a = answer.trim().toLowerCase();
|
|
76
|
+
resolve2(a === "" ? defaultYes : a === "y" || a === "yes");
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
function findMcpConfig() {
|
|
81
|
+
const candidates = [
|
|
82
|
+
join(process.cwd(), ".mcp.json"),
|
|
83
|
+
join(HOME, ".mcp.json")
|
|
84
|
+
];
|
|
85
|
+
for (const p of candidates) {
|
|
86
|
+
if (!existsSync(p)) continue;
|
|
87
|
+
try {
|
|
88
|
+
const raw = readFileSync(p, "utf-8");
|
|
89
|
+
const parsed = JSON.parse(raw);
|
|
90
|
+
if (parsed && typeof parsed === "object" && "mcpServers" in parsed) {
|
|
91
|
+
return { path: p, config: parsed };
|
|
92
|
+
}
|
|
93
|
+
} catch {
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
return null;
|
|
97
|
+
}
|
|
98
|
+
async function healthCheck() {
|
|
99
|
+
try {
|
|
100
|
+
const res = await fetch(HEALTH_URL);
|
|
101
|
+
if (!res.ok) return { ok: false };
|
|
102
|
+
const data = await res.json();
|
|
103
|
+
return { ok: true, toolCount: data.toolCount, bridgeConnected: data.bridgeConnected };
|
|
104
|
+
} catch {
|
|
105
|
+
return { ok: false };
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
async function waitForHealth(retries, delayMs) {
|
|
109
|
+
for (let i = 0; i < retries; i++) {
|
|
110
|
+
const result = await healthCheck();
|
|
111
|
+
if (result.ok) return result;
|
|
112
|
+
await new Promise((r) => setTimeout(r, delayMs));
|
|
113
|
+
}
|
|
114
|
+
return { ok: false };
|
|
115
|
+
}
|
|
116
|
+
function getServerEntryPath() {
|
|
117
|
+
return resolve(dirname(fileURLToPath(import.meta.url)), "index.js");
|
|
118
|
+
}
|
|
119
|
+
function resolveNodePath() {
|
|
120
|
+
try {
|
|
121
|
+
const cmd = platform() === "win32" ? "where" : "which";
|
|
122
|
+
const result = execFileSync(cmd, ["node"], { encoding: "utf-8", timeout: 5e3 }).trim();
|
|
123
|
+
const firstLine = result.split("\n")[0].trim();
|
|
124
|
+
if (firstLine && existsSync(firstLine)) return firstLine;
|
|
125
|
+
} catch {
|
|
126
|
+
}
|
|
127
|
+
return process.execPath;
|
|
128
|
+
}
|
|
129
|
+
function isPortFree(port) {
|
|
130
|
+
return new Promise((resolve2) => {
|
|
131
|
+
const srv = createNetServer();
|
|
132
|
+
srv.once("error", () => resolve2(false));
|
|
133
|
+
srv.once("listening", () => {
|
|
134
|
+
srv.close(() => resolve2(true));
|
|
135
|
+
});
|
|
136
|
+
srv.listen(port, "127.0.0.1");
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
function generatePlist(nodePath, serverPath) {
|
|
140
|
+
const logDir = join(HOME, "Library/Logs/Crawlio");
|
|
141
|
+
return `<?xml version="1.0" encoding="UTF-8"?>
|
|
142
|
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
143
|
+
<plist version="1.0">
|
|
144
|
+
<dict>
|
|
145
|
+
<key>Label</key>
|
|
146
|
+
<string>com.crawlio.browser</string>
|
|
147
|
+
<key>ProgramArguments</key>
|
|
148
|
+
<array>
|
|
149
|
+
<string>${nodePath}</string>
|
|
150
|
+
<string>${serverPath}</string>
|
|
151
|
+
<string>--portal</string>
|
|
152
|
+
</array>
|
|
153
|
+
<key>RunAtLoad</key>
|
|
154
|
+
<true/>
|
|
155
|
+
<key>KeepAlive</key>
|
|
156
|
+
<true/>
|
|
157
|
+
<key>StandardOutPath</key>
|
|
158
|
+
<string>${logDir}/server.log</string>
|
|
159
|
+
<key>StandardErrorPath</key>
|
|
160
|
+
<string>${logDir}/server.err</string>
|
|
161
|
+
<key>EnvironmentVariables</key>
|
|
162
|
+
<dict>
|
|
163
|
+
<key>PATH</key>
|
|
164
|
+
<string>/usr/local/bin:/opt/homebrew/bin:/usr/bin:/bin</string>
|
|
165
|
+
</dict>
|
|
166
|
+
</dict>
|
|
167
|
+
</plist>`;
|
|
168
|
+
}
|
|
169
|
+
async function ensurePortalRunning(dryRun) {
|
|
170
|
+
console.log(` Starting portal server...`);
|
|
171
|
+
const health = await healthCheck();
|
|
172
|
+
if (health.ok) {
|
|
173
|
+
console.log(` ${green("+")} Server already running on ${PORTAL_URL}`);
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
const serverPath = getServerEntryPath();
|
|
177
|
+
const nodePath = resolveNodePath();
|
|
178
|
+
if (dryRun) {
|
|
179
|
+
console.log(` ${dim("[dry-run]")} Node path: ${nodePath}`);
|
|
180
|
+
console.log(` ${dim("[dry-run]")} Server entry: ${serverPath}`);
|
|
181
|
+
if (platform() === "darwin") {
|
|
182
|
+
const plistPath = join(HOME, "Library/LaunchAgents/com.crawlio.browser.plist");
|
|
183
|
+
console.log(` ${dim("[dry-run]")} Would write plist to: ${plistPath}`);
|
|
184
|
+
console.log(` ${dim("[dry-run]")} Would run: launchctl load ${plistPath}`);
|
|
185
|
+
} else {
|
|
186
|
+
console.log(` ${dim("[dry-run]")} Would spawn detached: ${nodePath} ${serverPath} --portal`);
|
|
187
|
+
}
|
|
188
|
+
return;
|
|
189
|
+
}
|
|
190
|
+
if (platform() === "darwin") {
|
|
191
|
+
const plistDir = join(HOME, "Library/LaunchAgents");
|
|
192
|
+
const plistPath = join(plistDir, "com.crawlio.browser.plist");
|
|
193
|
+
const logDir = join(HOME, "Library/Logs/Crawlio");
|
|
194
|
+
mkdirSync(logDir, { recursive: true });
|
|
195
|
+
mkdirSync(plistDir, { recursive: true });
|
|
196
|
+
try {
|
|
197
|
+
execFileSync("launchctl", ["unload", plistPath], { stdio: "ignore" });
|
|
198
|
+
} catch {
|
|
199
|
+
}
|
|
200
|
+
writeFileSync(plistPath, generatePlist(nodePath, serverPath));
|
|
201
|
+
try {
|
|
202
|
+
execFileSync("launchctl", ["load", plistPath]);
|
|
203
|
+
} catch {
|
|
204
|
+
return startDetachedServer(serverPath, nodePath);
|
|
205
|
+
}
|
|
206
|
+
const result = await waitForHealth(5, 1e3);
|
|
207
|
+
if (result.ok) {
|
|
208
|
+
console.log(` ${green("+")} Server running on ${PORTAL_URL}`);
|
|
209
|
+
console.log(` ${green("+")} Auto-start on login configured (launchd)`);
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
const portFree = await isPortFree(3001);
|
|
213
|
+
if (!portFree) {
|
|
214
|
+
console.log(` ${yellow("!")} Port 3001 is already in use by another process`);
|
|
215
|
+
console.log(` ${dim(" Try: npx crawlio-browser --portal --port 3002")}`);
|
|
216
|
+
return;
|
|
217
|
+
}
|
|
218
|
+
console.log(` ${yellow("!")} launchd loaded but server not responding, falling back...`);
|
|
219
|
+
return startDetachedServer(serverPath, nodePath);
|
|
220
|
+
}
|
|
221
|
+
return startDetachedServer(serverPath, nodePath);
|
|
222
|
+
}
|
|
223
|
+
async function startDetachedServer(serverPath, nodePath) {
|
|
224
|
+
const crawlioDir = join(HOME, ".crawlio");
|
|
225
|
+
mkdirSync(crawlioDir, { recursive: true });
|
|
226
|
+
const pidFile = join(crawlioDir, "server.pid");
|
|
227
|
+
const child = spawn(nodePath, [serverPath, "--portal"], {
|
|
228
|
+
detached: true,
|
|
229
|
+
stdio: "ignore"
|
|
230
|
+
});
|
|
231
|
+
child.unref();
|
|
232
|
+
if (child.pid) {
|
|
233
|
+
writeFileSync(pidFile, String(child.pid));
|
|
234
|
+
}
|
|
235
|
+
const result = await waitForHealth(5, 1e3);
|
|
236
|
+
if (result.ok) {
|
|
237
|
+
console.log(` ${green("+")} Server running on ${PORTAL_URL}`);
|
|
238
|
+
console.log(` ${dim(" PID saved to ~/.crawlio/server.pid")}`);
|
|
239
|
+
} else {
|
|
240
|
+
const portFree = await isPortFree(3001);
|
|
241
|
+
if (!portFree) {
|
|
242
|
+
console.log(` ${yellow("!")} Port 3001 is already in use by another process`);
|
|
243
|
+
console.log(` ${dim(" Try: npx crawlio-browser --portal --port 3002")}`);
|
|
244
|
+
} else {
|
|
245
|
+
console.log(` ${yellow("!")} Server started but health check failed \u2014 check logs`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
function installBrowserSkill(dryRun) {
|
|
250
|
+
console.log("");
|
|
251
|
+
console.log(` Installing browser-automation skill...`);
|
|
252
|
+
const claudeDir = join(HOME, ".claude");
|
|
253
|
+
if (!existsSync(claudeDir)) {
|
|
254
|
+
console.log(` ${dim(" i ~/.claude not found \u2014 skipping skill install")}`);
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
const destDir = join(claudeDir, "skills", "browser-automation");
|
|
258
|
+
const moduleDir = dirname(fileURLToPath(import.meta.url));
|
|
259
|
+
const skillSrcDir = resolve(moduleDir, "..", "..", "skills", "browser-automation");
|
|
260
|
+
if (!existsSync(join(skillSrcDir, "SKILL.md"))) {
|
|
261
|
+
console.log(` ${yellow("!")} Skill source not found at ${dim(skillSrcDir)}`);
|
|
262
|
+
return;
|
|
263
|
+
}
|
|
264
|
+
if (dryRun) {
|
|
265
|
+
console.log(` ${dim("[dry-run]")} Would copy SKILL.md + reference.md to ${destDir}`);
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
mkdirSync(destDir, { recursive: true });
|
|
269
|
+
for (const file of ["SKILL.md", "reference.md"]) {
|
|
270
|
+
const src = join(skillSrcDir, file);
|
|
271
|
+
if (existsSync(src)) {
|
|
272
|
+
copyFileSync(src, join(destDir, file));
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
console.log(` ${green("+")} Browser automation skill installed to ${dim(destDir)}`);
|
|
276
|
+
}
|
|
277
|
+
function installPlugin(dryRun) {
|
|
278
|
+
console.log("");
|
|
279
|
+
console.log(` Installing crawlio-plugin skills...`);
|
|
280
|
+
const pluginsDir = join(HOME, ".crawlio", "plugins");
|
|
281
|
+
const pluginDir = join(pluginsDir, "crawlio-plugin");
|
|
282
|
+
if (dryRun) {
|
|
283
|
+
console.log(` ${dim("[dry-run]")} Plugin target: ${pluginDir}`);
|
|
284
|
+
console.log(` ${dim("[dry-run]")} Would clone: https://github.com/Crawlio-app/crawlio-plugin.git`);
|
|
285
|
+
return;
|
|
286
|
+
}
|
|
287
|
+
mkdirSync(pluginsDir, { recursive: true });
|
|
288
|
+
if (existsSync(join(pluginDir, "package.json"))) {
|
|
289
|
+
console.log(` ${green("+")} Plugin already installed at ${dim(pluginDir)}`);
|
|
290
|
+
try {
|
|
291
|
+
execFileSync("git", ["-C", pluginDir, "pull", "--ff-only"], { stdio: "ignore", timeout: 15e3 });
|
|
292
|
+
console.log(` ${green("+")} Updated to latest`);
|
|
293
|
+
} catch {
|
|
294
|
+
}
|
|
295
|
+
} else {
|
|
296
|
+
try {
|
|
297
|
+
execFileSync("git", ["clone", "https://github.com/Crawlio-app/crawlio-plugin.git", pluginDir], {
|
|
298
|
+
timeout: 3e4,
|
|
299
|
+
stdio: "pipe"
|
|
300
|
+
});
|
|
301
|
+
console.log(` ${green("+")} Cloned to ${dim(pluginDir)}`);
|
|
302
|
+
} catch (error) {
|
|
303
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
304
|
+
console.log(` ${yellow("!")} Clone failed: ${msg.slice(0, 150)}`);
|
|
305
|
+
return;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
const claudeSkillsDir = join(HOME, ".claude", "skills");
|
|
309
|
+
if (existsSync(join(HOME, ".claude"))) {
|
|
310
|
+
mkdirSync(claudeSkillsDir, { recursive: true });
|
|
311
|
+
try {
|
|
312
|
+
const allFiles = readdirSync(pluginDir, { recursive: true, encoding: "utf-8" });
|
|
313
|
+
const skillFiles = allFiles.filter(
|
|
314
|
+
(f) => f.includes(`skills${sep}`) && f.endsWith(".md")
|
|
315
|
+
).map((f) => join(pluginDir, f));
|
|
316
|
+
for (const skillFile of skillFiles) {
|
|
317
|
+
const name = basename(skillFile);
|
|
318
|
+
const dest = join(claudeSkillsDir, `crawlio-${name}`);
|
|
319
|
+
writeFileSync(dest, readFileSync(skillFile, "utf-8"));
|
|
320
|
+
}
|
|
321
|
+
if (skillFiles.length > 0) {
|
|
322
|
+
console.log(` ${green("+")} ${skillFiles.length} skills copied to ${dim(claudeSkillsDir)}`);
|
|
323
|
+
}
|
|
324
|
+
} catch {
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
async function portalFlow(options) {
|
|
329
|
+
await ensurePortalRunning(options.dryRun);
|
|
330
|
+
console.log("");
|
|
331
|
+
console.log(` Configuring MCP clients (portal mode)...`);
|
|
332
|
+
runAddMcp(options);
|
|
333
|
+
}
|
|
334
|
+
async function configureMetaMcp(found, options) {
|
|
335
|
+
console.log(` Found MCP config at ${cyan(found.path)}`);
|
|
336
|
+
if (isAlreadyConfigured(found.config)) {
|
|
337
|
+
console.log(` ${green("+")} crawlio-browser already configured \u2014 skipping`);
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
if (!options.yes) {
|
|
341
|
+
const proceed = await confirm("Add crawlio-browser to this config?");
|
|
342
|
+
if (!proceed) {
|
|
343
|
+
console.log(` ${dim("Skipped")}`);
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
const entry = options.portal ? buildPortalEntry() : buildStdioEntry({ full: options.full });
|
|
348
|
+
if (options.dryRun) {
|
|
349
|
+
console.log(` ${dim("[dry-run]")} Would add to ${found.path}:`);
|
|
350
|
+
console.log(` ${dim("[dry-run]")} "crawlio-browser": ${JSON.stringify(entry)}`);
|
|
351
|
+
return;
|
|
352
|
+
}
|
|
353
|
+
found.config.mcpServers["crawlio-browser"] = entry;
|
|
354
|
+
writeFileSync(found.path, JSON.stringify(found.config, null, 2) + "\n");
|
|
355
|
+
console.log(` ${green("+")} Added crawlio-browser to ${found.path}`);
|
|
356
|
+
}
|
|
357
|
+
function configureStdioClients(options) {
|
|
358
|
+
console.log("");
|
|
359
|
+
console.log(` Configuring MCP clients (stdio mode)...`);
|
|
360
|
+
runAddMcp(options);
|
|
361
|
+
}
|
|
362
|
+
function runAddMcp(options) {
|
|
363
|
+
const npxBin = platform() === "win32" ? "npx.cmd" : "npx";
|
|
364
|
+
const args = buildAddMcpArgs(options);
|
|
365
|
+
if (options.dryRun) {
|
|
366
|
+
console.log(` ${dim("[dry-run]")} Would run: ${npxBin} ${args.join(" ")}`);
|
|
367
|
+
return;
|
|
368
|
+
}
|
|
369
|
+
try {
|
|
370
|
+
const output = execFileSync(npxBin, args, {
|
|
371
|
+
encoding: "utf-8",
|
|
372
|
+
timeout: 6e4,
|
|
373
|
+
env: { ...process.env, npm_config_yes: "true" }
|
|
374
|
+
});
|
|
375
|
+
const lines = output.split("\n").filter((l) => l.trim());
|
|
376
|
+
let configuredCount = 0;
|
|
377
|
+
for (const line of lines) {
|
|
378
|
+
const trimmed = line.trim();
|
|
379
|
+
if (trimmed) {
|
|
380
|
+
console.log(` ${green("+")} ${trimmed}`);
|
|
381
|
+
configuredCount++;
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
if (configuredCount === 0) {
|
|
385
|
+
console.log(` ${dim(" add-mcp ran but no clients detected")}`);
|
|
386
|
+
}
|
|
387
|
+
} catch (error) {
|
|
388
|
+
const errObj = error;
|
|
389
|
+
const code = errObj?.code;
|
|
390
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
391
|
+
if (code === "ENOENT") {
|
|
392
|
+
console.log(` ${yellow("!")} ${npxBin} not found in PATH \u2014 install Node.js 18+ and retry`);
|
|
393
|
+
} else if (code === "ETIMEDOUT" || msg.includes("ETIMEDOUT") || msg.includes("timed out")) {
|
|
394
|
+
console.log(` ${yellow("!")} add-mcp timed out \u2014 check network and retry`);
|
|
395
|
+
} else {
|
|
396
|
+
console.log(` ${yellow("!")} add-mcp failed: ${msg.slice(0, 200)}`);
|
|
397
|
+
}
|
|
398
|
+
const target = options.portal ? MCP_URL : "crawlio-browser";
|
|
399
|
+
console.log(` ${dim(` Manual: npx add-mcp ${target} --name crawlio-browser --global`)}`);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
function printBanner() {
|
|
403
|
+
console.log("");
|
|
404
|
+
console.log(` ${bold("Crawlio Browser Init")}`);
|
|
405
|
+
console.log(` ${dim("Browser automation via MCP")}`);
|
|
406
|
+
console.log("");
|
|
407
|
+
}
|
|
408
|
+
function printExtensionLink() {
|
|
409
|
+
console.log("");
|
|
410
|
+
console.log(` ${bold("Chrome Extension:")}`);
|
|
411
|
+
console.log(` Install from ${cyan("https://crawlio.app/agent")}`);
|
|
412
|
+
}
|
|
413
|
+
async function printSummary(options) {
|
|
414
|
+
console.log("");
|
|
415
|
+
console.log(` ${bold("Status:")}`);
|
|
416
|
+
if (options.portal) {
|
|
417
|
+
const health = await healthCheck();
|
|
418
|
+
if (health.ok) {
|
|
419
|
+
console.log(` ${green("+")} Portal running on ${cyan(PORTAL_URL)} (${health.toolCount ?? "?"} tools)`);
|
|
420
|
+
} else {
|
|
421
|
+
console.log(` ${yellow("!")} Portal not responding \u2014 start with: npx crawlio-browser --portal`);
|
|
422
|
+
}
|
|
423
|
+
if (health.bridgeConnected) {
|
|
424
|
+
console.log(` ${green("+")} Chrome extension connected`);
|
|
425
|
+
} else {
|
|
426
|
+
console.log(` ${dim(" i Chrome extension not connected yet")}`);
|
|
427
|
+
}
|
|
428
|
+
} else {
|
|
429
|
+
console.log(` ${green("+")} Code mode configured (3 tools: search, execute, connect_tab)`);
|
|
430
|
+
console.log(` ${dim(" i 125 commands searchable via the search tool")}`);
|
|
431
|
+
}
|
|
432
|
+
if (options.plugin) {
|
|
433
|
+
const pluginDir = join(HOME, ".crawlio", "plugins", "crawlio-plugin");
|
|
434
|
+
if (existsSync(pluginDir)) {
|
|
435
|
+
console.log(` ${green("+")} Plugin installed`);
|
|
436
|
+
} else {
|
|
437
|
+
console.log(` ${yellow("!")} Plugin not installed`);
|
|
438
|
+
}
|
|
439
|
+
} else {
|
|
440
|
+
console.log(` ${dim(" i Plugin skipped (use --plugin to install)")}`);
|
|
441
|
+
}
|
|
442
|
+
console.log("");
|
|
443
|
+
console.log(` ${bold("Next steps:")}`);
|
|
444
|
+
console.log(` 1. Install the Chrome extension ${dim("-> https://crawlio.app/agent")}`);
|
|
445
|
+
console.log(` 2. Open any configured MCP client and use crawlio-browser tools`);
|
|
446
|
+
if (!options.portal) {
|
|
447
|
+
console.log(` ${dim(" Tip: use --portal for multi-client sharing or ChatGPT Desktop")}`);
|
|
448
|
+
}
|
|
449
|
+
console.log("");
|
|
450
|
+
}
|
|
451
|
+
async function runInit(argv) {
|
|
452
|
+
const options = parseFlags(argv);
|
|
453
|
+
printBanner();
|
|
454
|
+
if (options.dryRun) {
|
|
455
|
+
console.log(` ${bold(yellow("[DRY RUN]"))} \u2014 showing what init would do without executing
|
|
456
|
+
`);
|
|
457
|
+
}
|
|
458
|
+
const nodeVersion = parseInt(process.versions.node.split(".")[0], 10);
|
|
459
|
+
if (nodeVersion < 18) {
|
|
460
|
+
console.log(` ${yellow("!")} Node.js ${process.versions.node} detected \u2014 18+ required`);
|
|
461
|
+
console.log(` ${dim("Install via: https://nodejs.org")}`);
|
|
462
|
+
process.exit(1);
|
|
463
|
+
}
|
|
464
|
+
if (options.portal) {
|
|
465
|
+
await portalFlow(options);
|
|
466
|
+
} else {
|
|
467
|
+
const mcpConfig = findMcpConfig();
|
|
468
|
+
if (mcpConfig) {
|
|
469
|
+
await configureMetaMcp(mcpConfig, options);
|
|
470
|
+
} else {
|
|
471
|
+
configureStdioClients(options);
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
installBrowserSkill(options.dryRun);
|
|
475
|
+
if (options.plugin) {
|
|
476
|
+
installPlugin(options.dryRun);
|
|
477
|
+
}
|
|
478
|
+
if (!options.dryRun) {
|
|
479
|
+
printExtensionLink();
|
|
480
|
+
await printSummary(options);
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
export {
|
|
484
|
+
buildAddMcpArgs,
|
|
485
|
+
buildPortalEntry,
|
|
486
|
+
buildStdioEntry,
|
|
487
|
+
findMcpConfig,
|
|
488
|
+
installBrowserSkill,
|
|
489
|
+
isAlreadyConfigured,
|
|
490
|
+
parseFlags,
|
|
491
|
+
runInit
|
|
492
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "crawlio-browser",
|
|
3
|
+
"version": "1.3.0",
|
|
4
|
+
"description": "MCP server with 87 CDP-backed tools for browser automation — screenshots, DOM, network capture, framework detection, cookies, storage, performance metrics via Chrome",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/mcp-server/index.js",
|
|
7
|
+
"bin": {
|
|
8
|
+
"crawlio-browser": "bin/crawlio-browser.js"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"bin/crawlio-browser.js",
|
|
12
|
+
"dist/mcp-server/",
|
|
13
|
+
"skills/",
|
|
14
|
+
".claude-plugin/",
|
|
15
|
+
"README.md"
|
|
16
|
+
],
|
|
17
|
+
"scripts": {
|
|
18
|
+
"build": "npm run build:server && npm run build:extension",
|
|
19
|
+
"build:server": "node -e \"require('fs').rmSync('dist/mcp-server',{recursive:true,force:true})\" && tsup src/mcp-server/index.ts --format esm --dts --outDir dist/mcp-server",
|
|
20
|
+
"prepublishOnly": "npm run typecheck && npm run test && npm run build:server",
|
|
21
|
+
"build:extension": "rm -rf dist/extension && tsup src/extension/background.ts src/extension/popup.ts src/extension/welcome.ts --format iife --outDir dist/extension --define.__DEV__=false --minify && cd dist/extension && mv background.global.js background.js && mv popup.global.js popup.js && mv welcome.global.js welcome.js && cd ../.. && cp src/extension/manifest.prod.json dist/extension/manifest.json && cp src/extension/popup.html src/extension/popup.css src/extension/welcome.html src/extension/welcome.css src/extension/icon16.png src/extension/icon32.png src/extension/icon48.png src/extension/icon128.png dist/extension/",
|
|
22
|
+
"build:dev": "rm -rf dist/extension-dev && tsup src/extension/background.ts src/extension/popup.ts src/extension/welcome.ts --format iife --outDir dist/extension-dev --define.__DEV__=true && cd dist/extension-dev && mv background.global.js background.js && mv popup.global.js popup.js && mv welcome.global.js welcome.js && cd ../.. && cp src/extension/manifest.dev.json dist/extension-dev/manifest.json && cp src/extension/popup.html src/extension/popup.css src/extension/welcome.html src/extension/welcome.css src/extension/icon*.png dist/extension-dev/",
|
|
23
|
+
"test": "vitest run",
|
|
24
|
+
"test:watch": "vitest",
|
|
25
|
+
"test:coverage": "vitest run --coverage",
|
|
26
|
+
"typecheck": "tsc --noEmit && tsc --noEmit -p tsconfig.extension.json",
|
|
27
|
+
"typecheck:extension": "tsc --noEmit -p tsconfig.extension.json",
|
|
28
|
+
"dev": "tsup src/mcp-server/index.ts --format esm --watch",
|
|
29
|
+
"setup": "node dist/mcp-server/index.js init"
|
|
30
|
+
},
|
|
31
|
+
"keywords": [
|
|
32
|
+
"mcp",
|
|
33
|
+
"crawlio",
|
|
34
|
+
"browser-automation",
|
|
35
|
+
"chrome",
|
|
36
|
+
"model-context-protocol",
|
|
37
|
+
"ai",
|
|
38
|
+
"screenshots",
|
|
39
|
+
"dom",
|
|
40
|
+
"web-scraping"
|
|
41
|
+
],
|
|
42
|
+
"homepage": "https://crawlio.app/agent",
|
|
43
|
+
"repository": {
|
|
44
|
+
"type": "git",
|
|
45
|
+
"url": "https://github.com/Crawlio-app/crawlio-browser-mcp"
|
|
46
|
+
},
|
|
47
|
+
"license": "MIT",
|
|
48
|
+
"engines": {
|
|
49
|
+
"node": ">=18"
|
|
50
|
+
},
|
|
51
|
+
"dependencies": {
|
|
52
|
+
"@modelcontextprotocol/sdk": "^1.8.0",
|
|
53
|
+
"ws": "^8.18.1",
|
|
54
|
+
"zod": "^3.24.2"
|
|
55
|
+
},
|
|
56
|
+
"devDependencies": {
|
|
57
|
+
"@types/chrome": "^0.0.287",
|
|
58
|
+
"@types/ws": "^8.18.0",
|
|
59
|
+
"@vitest/coverage-v8": "^4.0.18",
|
|
60
|
+
"sharp": "^0.34.5",
|
|
61
|
+
"tsup": "^8.4.0",
|
|
62
|
+
"typescript": "^5.6.2",
|
|
63
|
+
"vitest": "^4.0.18"
|
|
64
|
+
}
|
|
65
|
+
}
|